pax_global_header00006660000000000000000000000064141533677510014526gustar00rootroot0000000000000052 comment=0f31b7c3aeb8635a7505eaaeea1485676b06e355 check_pgbackrest-REL2_2/000077500000000000000000000000001415336775100152775ustar00rootroot00000000000000check_pgbackrest-REL2_2/.github/000077500000000000000000000000001415336775100166375ustar00rootroot00000000000000check_pgbackrest-REL2_2/.github/workflows/000077500000000000000000000000001415336775100206745ustar00rootroot00000000000000check_pgbackrest-REL2_2/.github/workflows/main.yml000066400000000000000000000044431415336775100223500ustar00rootroot00000000000000--- name: main on: push: branches: - main workflow_dispatch: jobs: use-case-1: runs-on: ubuntu-latest strategy: fail-fast: false matrix: include: - DOCKERI: centos:7 DBTYPE: PG DBVERSION: 14 CLNAME: c7pg EXTRA_VARS: "pgbackrest_excpected_release=2.36 check_pgbackrest_build=true" steps: - uses: actions/checkout@v2 with: path: check_pgbackrest - uses: shogo82148/actions-setup-perl@v1 - name: Initial step run: cd ${HOME?} && sh ${GITHUB_WORKSPACE?}/check_pgbackrest/tests/run.sh -i - name: Run CI script env: EDB_REPO_USERNAME: ${{ secrets.EDB_REPO_USERNAME }} EDB_REPO_PASSWORD: ${{ secrets.EDB_REPO_PASSWORD }} ARCH: use-case-1 CLPATH: /home/runner/clusters CLNAME: ${{ matrix.CLNAME }} DBTYPE: ${{ matrix.DBTYPE }} DBVERSION: ${{ matrix.DBVERSION }} DOCKERI: ${{ matrix.DOCKERI }} EXTRA_VARS: ${{ matrix.EXTRA_VARS }} RUN_ARGS: ACTIVITY: true run: cd ${HOME?} && sh ${GITHUB_WORKSPACE?}/check_pgbackrest/tests/ci.sh use-case-2: runs-on: ubuntu-latest strategy: fail-fast: false matrix: include: - DOCKERI: ubuntu:20.04 DBTYPE: PG DBVERSION: 14 CLNAME: u20pg EXTRA_VARS: "pgbackrest_excpected_release=2.36 check_pgbackrest_build=true" steps: - uses: actions/checkout@v2 with: path: check_pgbackrest - uses: shogo82148/actions-setup-perl@v1 - name: Initial step run: cd ${HOME?} && sh ${GITHUB_WORKSPACE?}/check_pgbackrest/tests/run.sh -i - name: Run CI script env: EDB_REPO_USERNAME: ${{ secrets.EDB_REPO_USERNAME }} EDB_REPO_PASSWORD: ${{ secrets.EDB_REPO_PASSWORD }} ARCH: use-case-2 CLPATH: /home/runner/clusters CLNAME: ${{ matrix.CLNAME }} DBTYPE: ${{ matrix.DBTYPE }} DBVERSION: ${{ matrix.DBVERSION }} DOCKERI: ${{ matrix.DOCKERI }} EXTRA_VARS: ${{ matrix.EXTRA_VARS }} RUN_ARGS: ACTIVITY: true run: cd ${HOME?} && sh ${GITHUB_WORKSPACE?}/check_pgbackrest/tests/ci.sh check_pgbackrest-REL2_2/.gitignore000066400000000000000000000001151415336775100172640ustar00rootroot00000000000000.vagrant/ vagrant.yml tests/validation.sh tests/validation.log tests/test.sh check_pgbackrest-REL2_2/CHANGELOG.md000066400000000000000000000115751415336775100171210ustar00rootroot00000000000000Changelog ========= 2021-12-06 v2.2: - The retention service will check if any error was detected during the backup (reported since pgBackRest 2.36). - Add nagios_strict output format to filter out unsupported types of values from performance data. (Reported by netphantm and Adrien Nayrat) - Support uncompressed files in the archives service. (Suggested by Jean-Philippe Guérard) - Add `retention-diff` and `retention-incr` options in the retention service. (Contributed by devopstales). - Add `retention-age-to-oldest` option in the retention service. (Suggested by Hendrik Schöffmann) 2021-09-21 v2.1: - Only support pgBackRest **2.33** and above in order to add support for the multi-repository feature. Introduce the `--repo` option to set the repository index to operate on. When multiple repositories will be found, if the `--repo` argument is not provided, the services will operate on all repositories defined, checking for inconsistencies across multiple repositories. It is however recommended to also define checks using the `--repo` argument to verify the sanity of each repository separately. (Reviewed by Adrien Nayrat) - Add a new `max-archives-check-number` option for the archives service. This is intended to use in case of timeline switch and when boundary WAL can't be detected properly, in order to prevent infinite WAL archives check. - Add `prtg` output format (Hans-Peter Zahno). 2021-02-10 v2.0: - Only support pgBackRest **2.32** and above in order to only use its internal commands. This remove Perl dependencies no-longer needed to reach repository hosts or S3 compatible object stores. This also brings Azure compatible object stores support. The `repo-*` arguments have then been deprecated. - Support non-gz compressed files in the archives check (Magnus Hagander). - Fix the `ignore-archived-*` features when using pgBackRest internal commands (Magnus Hagander). - Improve `ignore-archived-*` features to skip WAL consistency check for backups involving ignored archives. - Skip unneeded boundary WAL check on TL switch (reported by sebastienruiz). - The retention service will now check that at least the backup directory exists, not only trusting the pgBackRest info command output (suggested by Michael Banck). 2020-07-28 v1.9: - The archives service will now only look at the archives listed between the oldest backup start archive and the max WAL returned by the pgBackRest info command. This should avoid unnecessary alerts. To extend the check to all the archives found, the new --extended-check argument has been implemented (suggested by blogh). - Remove refresh of pgBackRest info return after getting the archives list. That avoids CRITICAL alert if an archive is generated between those two steps. Instead, a WARNING message "max WAL is not the latest archive" will be displayed (suggested by blogh). - Fix S3 archives detection (reported by khadijahvf). - New enable-internal-pgbr-cmds argument, for pgBackRest >= 2.28. Internal pgBackRest commands will then be used to list and get the content of files in the repository instead of Perl specific drivers. This is, for instance, needed to access encrypted repositories. This should become the default and only access method in the next release, removing some Perl dependencies. 2020-03-16 v1.8: - Change output of missing archives. The complete list is now only shown in --debug mode (suggested by Guillaume Lelarge). - Add --list-archives argument to print the list of all the archived WAL segments. 2020-01-14 v1.7: - Rename --format argument to --output. - Add json output format. - Add timing debug information. - Improve performance of the needed wal list check. 2019-11-14 v1.6: - Check for each backup its needed archived WALs based on wal start/stop information given by the pgBackRest "info" command. - Return WARNING instead of CRITICAL in case of missing archived WAL prior to latest backup, regardless its type. - Add ignore-archived-before argument to ignore the archived WALs before the provided interval. - Rename ignore-archived-since argument to ignore-archived-after. - Add --retention-age-to-full argument to check the latest full backup age. - Fix bad behavior on CIFS mount (reported by `renesepp`). - Add Amazon s3 support for archives service (Andrew E. Bruno). - Avoid chdir when scanning a directory to avoid some problems with `sudo -u` (Christophe Courtois). - New check_pgb_version service (suggested by Christophe Courtois). 2019-03-18 v1.5: - Order archived WALs list by filename to validate if none is missing. - Add --debug option to print some debug messages. - Add ignore-archived-since argument to ignore the archived WALs since the provided interval. - Add --latest-archive-age-alert to define the max age of the latest archived WAL before raising a critical alert. check_pgbackrest-REL2_2/INSTALL.md000066400000000000000000000014121415336775100167250ustar00rootroot00000000000000## Manual installation To handle the json output format of the pgBackRest info command, you need to install the following module: - on RedHat-like: `perl-JSON` - on Debian-like: `libjson-perl` The Data::Dump perl module is also needed: - On RedHat-like: `perl-Data-Dumper` - On Debian-like: `libdata-dump-perl` ----- ## PGDG packages ### RPM To install check_pgbackrest using the PGDG repositories: ``` yum install -y epel-release yum install -y nagios-plugins-pgbackrest ``` The rpm will also require nagios-plugins to be installed and put the `check_pgbackrest` script there. That's why the epel-release package is needed too. ### DEB To install check_pgbackrest using the PGDG repositories (located in `/usr/bin`): ``` apt-get -y install check-pgbackrest ``` check_pgbackrest-REL2_2/LICENSE000066400000000000000000000017361415336775100163130ustar00rootroot00000000000000PostgreSQL License Copyright (c) 2018-2020, DALIBO Copyright (c) 2020-2021, Stefan Fercot Permission to use, copy, modify, and distribute this software and its documentation for any purpose, without fee, and without a written agreement is hereby granted, provided that the above copyright notice and this paragraph and the following two paragraphs appear in all copies. IN NO EVENT SHALL Stefan Fercot BE LIABLE TO ANY PARTY FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, INCLUDING LOST PROFITS, ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN IF Stefan Fercot HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Stefan Fercot SPECIFICALLY DISCLAIMS ANY WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS ON AN "AS IS" BASIS, AND Stefan Fercot HAS NO OBLIGATIONS TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS. check_pgbackrest-REL2_2/README000066400000000000000000000122711415336775100161620ustar00rootroot00000000000000NAME check_pgbackrest - pgBackRest backup check plugin for Nagios SYNOPSIS check_pgbackrest [-s|--service SERVICE] [-S|--stanza NAME] check_pgbackrest [-l|--list] check_pgbackrest [--help] DESCRIPTION check_pgbackrest is designed to monitor pgBackRest (2.33 and above) backups from Nagios. -s, --service SERVICE The Nagios service to run. See section SERVICES for a description of available services or use "--list" for a short service and description list. -S, --stanza NAME Name of the stanza to check. --repo REPOSITORY Repository index to operate on. If no "--repo" argument is provided, the service will operate on all repositories defined, checking for inconsistencies across multiple repositories. When using multiple repositories, it is recommended to also define checks using the "--repo" argument to verify the sanity of each repository separately. -O, --output OUTPUT_FORMAT The output format. Supported outputs are: "human", "json", "nagios" (default), "nagios_strict" and "prtg". The "nagios_strict" output format will filter out unsupported types of values from the performance data. -C, --command FILE pgBackRest executable file (default: "pgbackrest"). -c, --config CONFIGURATION_FILE pgBackRest configuration file. -P, --prefix COMMAND Some prefix command to execute the pgBackRest info command (eg: "sudo -iu postgres"). -l, --list List available services. --debug Print some debug messages. -V, --version Print version and exit. -?, --help Show this help page. SERVICES Descriptions and parameters of available services. retention Fail when the number of full backups is less than the "--retention-full" argument. Fail when the number of differential backups is less than the "--retention-diff" argument. Fail when the number of incremental backups is less than the "--retention-incr" argument. Fail when the newest backup is older than the "--retention-age" argument. Fail when the newest full backup is older than the "--retention-age-to-full" argument. Fail when the oldest backup is newer than the "--retention-age-to-oldest" argument. The following units are accepted (not case sensitive): s (second), m (minute), h (hour), d (day). You can use more than one unit per given value. Arguments are not mandatory to only show some information. archives Check if all archived WALs exist between the oldest and the latest WAL needed for the recovery. Use the "--wal-segsize" argument to set the WAL segment size. The following units are accepted (not case sensitive): b (Byte), k (KB), m (MB), g (GB), t (TB), p (PB), e (EB) or Z (ZB). Only integers are accepted. Eg. "1.5MB" will be refused, use "1500kB". The factor between units is 1024 bytes. Eg. "1g = 1G = 1024*1024*1024." Use the "--ignore-archived-before" argument to ignore the archived WALs generated before the provided interval. Used to only check the latest archives. Use the "--ignore-archived-after" argument to ignore the archived WALs generated after the provided interval. The "--latest-archive-age-alert" argument defines the max age of the latest archived WAL as an interval before raising a critical alert. The following units are accepted as interval (not case sensitive): s (second), m (minute), h (hour), d (day). You can use more than one unit per given value. If not set, the last unit is in seconds. Eg. "1h 55m 6" = "1h55m6s". All the missing archives are only shown in the "--debug" mode. Use "--list-archives" in addition with "--debug" to print the list of all the archived WAL segments. By default, all the archives older than the oldest backup start archive or newer than the max_wal returned by the pgBackRest info command are ignored. Use the "--extended-check" argument to force a full check of the found archives and raise warnings in case of inconsistencies. When WAL archives on different timelines are found, .history files are parsed to find the switch point and define the boundary WAL. Use the "--max-archives-check-number" to prevent infinite WAL archives check when boundary WAL can't be defined properly. check_pgb_version Check if this script is running a given version. You must provide the expected version using "--target-version". CONTRIBUTING check_pgbackrest is an open project. Any contribution to improve it is welcome. VERSION check_pgbackrest version 2.2, released on Mon Dec 06 2021. LICENSING This program is open source, licensed under the PostgreSQL license. For license terms, see the LICENSE file. AUTHORS Author: Stefan Fercot. Logo: Damien Cazeils (www.damiencazeils.com). Copyright: (c) 2018-2020, Dalibo / 2020-2021, Stefan Fercot. check_pgbackrest-REL2_2/README.pod000066400000000000000000000114741415336775100167470ustar00rootroot00000000000000=head1 NAME check_pgbackrest - pgBackRest backup check plugin for Nagios =head1 SYNOPSIS check_pgbackrest [-s|--service SERVICE] [-S|--stanza NAME] check_pgbackrest [-l|--list] check_pgbackrest [--help] =head1 DESCRIPTION check_pgbackrest is designed to monitor pgBackRest (2.33 and above) backups from Nagios. =over =item B<-s>, B<--service> SERVICE The Nagios service to run. See section SERVICES for a description of available services or use C<--list> for a short service and description list. =item B<-S>, B<--stanza> NAME Name of the stanza to check. =item B<--repo> REPOSITORY Repository index to operate on. If no C<--repo> argument is provided, the service will operate on all repositories defined, checking for inconsistencies across multiple repositories. When using multiple repositories, it is recommended to also define checks using the C<--repo> argument to verify the sanity of each repository separately. =item B<-O>, B<--output> OUTPUT_FORMAT The output format. Supported outputs are: C, C, C (default), C and C. The C output format will filter out unsupported types of values from the performance data. =item B<-C>, B<--command> FILE pgBackRest executable file (default: "pgbackrest"). =item B<-c>, B<--config> CONFIGURATION_FILE pgBackRest configuration file. =item B<-P>, B<--prefix> COMMAND Some prefix command to execute the pgBackRest info command (eg: "sudo -iu postgres"). =item B<-l>, B<--list> List available services. =item B<--debug> Print some debug messages. =item B<-V>, B<--version> Print version and exit. =item B<-?>, B<--help> Show this help page. =back =head2 SERVICES Descriptions and parameters of available services. =over =item B Fail when the number of full backups is less than the C<--retention-full> argument. Fail when the number of differential backups is less than the C<--retention-diff> argument. Fail when the number of incremental backups is less than the C<--retention-incr> argument. Fail when the newest backup is older than the C<--retention-age> argument. Fail when the newest full backup is older than the C<--retention-age-to-full> argument. Fail when the oldest backup is newer than the C<--retention-age-to-oldest> argument. The following units are accepted (not case sensitive): s (second), m (minute), h (hour), d (day). You can use more than one unit per given value. Arguments are not mandatory to only show some information. =item B Check if all archived WALs exist between the oldest and the latest WAL needed for the recovery. Use the C<--wal-segsize> argument to set the WAL segment size. The following units are accepted (not case sensitive): b (Byte), k (KB), m (MB), g (GB), t (TB), p (PB), e (EB) or Z (ZB). Only integers are accepted. Eg. C<1.5MB> will be refused, use C<1500kB>. The factor between units is 1024 bytes. Eg. C<1g = 1G = 1024*1024*1024.> Use the C<--ignore-archived-before> argument to ignore the archived WALs generated before the provided interval. Used to only check the latest archives. Use the C<--ignore-archived-after> argument to ignore the archived WALs generated after the provided interval. The C<--latest-archive-age-alert> argument defines the max age of the latest archived WAL as an interval before raising a critical alert. The following units are accepted as interval (not case sensitive): s (second), m (minute), h (hour), d (day). You can use more than one unit per given value. If not set, the last unit is in seconds. Eg. "1h 55m 6" = "1h55m6s". All the missing archives are only shown in the C<--debug> mode. Use C<--list-archives> in addition with C<--debug> to print the list of all the archived WAL segments. By default, all the archives older than the oldest backup start archive or newer than the max_wal returned by the pgBackRest info command are ignored. Use the C<--extended-check> argument to force a full check of the found archives and raise warnings in case of inconsistencies. When WAL archives on different timelines are found, .history files are parsed to find the switch point and define the boundary WAL. Use the C<--max-archives-check-number> to prevent infinite WAL archives check when boundary WAL can't be defined properly. =item B Check if this script is running a given version. You must provide the expected version using C<--target-version>. =back =head1 CONTRIBUTING check_pgbackrest is an open project. Any contribution to improve it is welcome. =head1 VERSION check_pgbackrest version 2.2, released on Mon Dec 06 2021. =head1 LICENSING This program is open source, licensed under the PostgreSQL license. For license terms, see the LICENSE file. =head1 AUTHORS Author: Stefan Fercot. Logo: Damien Cazeils (www.damiencazeils.com). Copyright: (c) 2018-2020, Dalibo / 2020-2021, Stefan Fercot. check_pgbackrest-REL2_2/RELEASING.md000066400000000000000000000037421415336775100171400ustar00rootroot00000000000000# Releasing ## Source code Edit variable `$VERSION` in `check_pgbackrest`, and update the version field at the end of the in-line documentation in this script. Use date format `LC_TIME=C date +"%a %b %d %Y"`. Update the `CHANGELOG.md` file too. ## Documentation Generate updated documentation: ```bash pod2text check_pgbackrest > README podselect check_pgbackrest > README.pod ``` ## Tagging and building tar file ```bash TAG=REL2_2 git tag -a $TAG -m "Release $TAG" git tag git push --tags git archive --prefix=check_pgbackrest-$TAG/ -o /tmp/check_pgbackrest-2.2.tar.gz $TAG ``` ## Release on github - Go to https://github.com/pgstef/check_pgbackrest/releases - Edit the release notes for the new tag - Set "check_pgbackrest $VERSION" as title, eg. "check_pgbackrest 2.2" - Here is the format of the release node itself: YYYY-MM-DD - Version X.Y Changelog: * item 1 * item 2 * ... - Upload the tar file - Save - Check or update https://github.com/pgstef/check_pgbackrest/releases ## Community ### Submit a news on postgresql.org * Title: "check_pgbackrest 2.2 has been released" * Content: ``` _Town, Country, Month xx, 2021_ `check_pgbackrest` is designed to monitor [pgBackRest](https://pgbackrest.org) backups from Nagios, relying on the status information given by the [info](https://pgbackrest.org/command.html#command-info) command. It allows to monitor the backups retention and the consistency of the archived WAL segments. Changes in check_pgbackrest 2.2 ------------------------------------------------------------------------------ * ... * ... Links & Credits -------------------------------------------------------------------------------- This is an open project, licensed under the PostgreSQL license. Any contribution to improve it is welcome. Links: * Download: https://github.com/pgstef/check_pgbackrest/releases * Support: https://github.com/pgstef/check_pgbackrest/issues ``` * check "Related Open Source" check_pgbackrest-REL2_2/check_pgbackrest000077500000000000000000001300321415336775100205060ustar00rootroot00000000000000#!/usr/bin/env perl #----------------------------------------------------------------------------- # This program is open source, licensed under the PostgreSQL license. # For license terms, see the LICENSE file. # # Author: Stefan Fercot # Copyright: (c) 2018-2020, Dalibo. # Copyright: (c) 2020-2021, Stefan Fercot. #----------------------------------------------------------------------------- =head1 NAME check_pgbackrest - pgBackRest backup check plugin for Nagios =head1 SYNOPSIS check_pgbackrest [-s|--service SERVICE] [-S|--stanza NAME] check_pgbackrest [-l|--list] check_pgbackrest [--help] =head1 DESCRIPTION check_pgbackrest is designed to monitor pgBackRest (2.33 and above) backups from Nagios. =cut use vars qw($VERSION $PROGRAM $PGBR_SUPPORT $INIT_TIME); use strict; use warnings; use POSIX; use Data::Dumper; use File::Basename; use File::Spec; use File::Find; use Getopt::Long qw(:config bundling no_ignore_case_always); use Pod::Usage; use Config; use FindBin; # Display error message if some specific modules are not loaded BEGIN { my(@DBs, @missingDBs, $mod); @DBs = qw(JSON); for $mod (@DBs) { if (eval "require $mod") { $mod->import(); } else { push @missingDBs, $mod; } } die "@missingDBs module(s) not loaded.\n" if @missingDBs; } # Messing with PATH so pod2usage always finds this script my @path = split /$Config{'path_sep'}/ => $ENV{'PATH'}; push @path => $FindBin::Bin; $ENV{'PATH'} = join $Config{'path_sep'} => @path; undef @path; # Reference to the output sub my $output_fmt; $VERSION = '2.2'; $PROGRAM = 'check_pgbackrest'; $PGBR_SUPPORT = '2.33'; $INIT_TIME = time(); # Available services and descriptions. #----------------------------------------------------------------------------- my %services = ( 'retention' => { 'sub' => \&check_retention, 'desc' => 'Check the retention policy.', 'stanza-arg' => 1 }, 'archives' => { 'sub' => \&check_wal_archives, 'desc' => 'Check WAL archives.', 'stanza-arg' => 1 }, 'check_pgb_version' => { 'sub' => \&check_pgb_version, 'desc' => 'Check the version of this check_pgbackrest script.', 'stanza-arg' => 0 } ); =over =item B<-s>, B<--service> SERVICE The Nagios service to run. See section SERVICES for a description of available services or use C<--list> for a short service and description list. =item B<-S>, B<--stanza> NAME Name of the stanza to check. =item B<--repo> REPOSITORY Repository index to operate on. If no C<--repo> argument is provided, the service will operate on all repositories defined, checking for inconsistencies across multiple repositories. When using multiple repositories, it is recommended to also define checks using the C<--repo> argument to verify the sanity of each repository separately. =item B<-O>, B<--output> OUTPUT_FORMAT The output format. Supported outputs are: C, C, C (default), C and C. The C output format will filter out unsupported types of values from the performance data. =item B<-C>, B<--command> FILE pgBackRest executable file (default: "pgbackrest"). =item B<-c>, B<--config> CONFIGURATION_FILE pgBackRest configuration file. =item B<-P>, B<--prefix> COMMAND Some prefix command to execute the pgBackRest info command (eg: "sudo -iu postgres"). =item B<-l>, B<--list> List available services. =item B<--debug> Print some debug messages. =item B<-V>, B<--version> Print version and exit. =item B<-?>, B<--help> Show this help page. =back =cut my %args = ( 'command' => 'pgbackrest', 'output' => 'nagios', 'wal-segsize' => '16MB', 'default-pgbackrest-config-file' => '/etc/pgbackrest.conf', ); # Set name of the program without path* my $orig_name = $0; $0 = $PROGRAM; # Die on kill -1, -2, -3 or -15 $SIG{'HUP'} = $SIG{'INT'} = $SIG{'QUIT'} = $SIG{'TERM'} = \&terminate; # Handle SIG sub terminate { my ($signal) = @_; die ("SIG $signal caught."); } # Print the version and exit sub version { printf "%s version %s, Perl %vd\n", $PROGRAM, $VERSION, $^V; exit 0; } # List services that can be performed sub list_services { print "List of available services:\n\n"; foreach my $service ( sort keys %services ) { printf "\t%-17s\t%s\n", $service, $services{$service}{'desc'}; } exit 0; } # Handle output formats #----------------------------------------------------------------------------- # Define which @longmsg keys will use TimeSeconds or Count units. my @TimeKeys = ("latest_bck_age", "latest_full_age", "latest_archive_age", "oldest_bck_age"); my @CountKeys = ("full", "diff", "incr", "num_unique_archives", "num_missing_archives"); sub dprint { return unless $args{'debug'}; foreach (@_) { print "DEBUG: $_"; } } sub unknown($;$$$) { return $output_fmt->( 3, $_[0], $_[1], $_[2], $_[3] ); } sub critical($;$$$) { return $output_fmt->( 2, $_[0], $_[1], $_[2], $_[3] ); } sub warning($;$$$) { return $output_fmt->( 1, $_[0], $_[1], $_[2], $_[3] ); } sub ok($;$$$) { return $output_fmt->( 0, $_[0], $_[1], $_[2], $_[3] ); } sub human_output ($$;$$$) { my $rc = shift; my $service = shift; my $ret; my @msg; my @longmsg; my @human_only_longmsg; @msg = @{ $_[0] } if defined $_[0]; @longmsg = @{ $_[1] } if defined $_[1]; @human_only_longmsg = @{ $_[2] } if defined $_[2]; $ret = sprintf "%-15s: %s\n", 'Service', $service; $ret .= sprintf "%-15s: 0 (%s)\n", "Returns", "OK" if $rc == 0; $ret .= sprintf "%-15s: 1 (%s)\n", "Returns", "WARNING" if $rc == 1; $ret .= sprintf "%-15s: 2 (%s)\n", "Returns", "CRITICAL" if $rc == 2; $ret .= sprintf "%-15s: 3 (%s)\n", "Returns", "UNKNOWN" if $rc == 3; $ret .= sprintf "%-15s: %s\n", "Message", $_ foreach @msg; $ret .= sprintf "%-15s: %s\n", "Long message", $_ foreach @longmsg; $ret .= sprintf "%-15s: %s\n", "Long message", $_ foreach @human_only_longmsg; print $ret; return $rc; } sub json_output ($$;$$$) { my $rc = shift; my $service = shift; my @msg; my @longmsg; my @human_only_longmsg; @msg = @{ $_[0] } if defined $_[0]; @longmsg = @{ $_[1] } if defined $_[1]; @human_only_longmsg = @{ $_[2] } if defined $_[2]; my %json_hash = ('service' => $service); my @rc_long = ("OK", "WARNING", "CRITICAL", "UNKNOWN"); $json_hash{'status'}{'code'} = $rc; $json_hash{'status'}{'message'} = $rc_long[$rc]; $json_hash{'message'} = join( ', ', @msg ) if @msg; foreach my $msg_to_split (@longmsg, @human_only_longmsg) { my ($key, $value) = split(/=/, $msg_to_split); $json_hash{'long_message'}{$key} = $value; } my $json_text = encode_json \%json_hash; print "[$json_text]"; return $rc; } sub nagios_output ($$;$$) { my $rc = shift; my $ret = shift; my @msg; my @longmsg; $ret .= " OK" if $rc == 0; $ret .= " WARNING" if $rc == 1; $ret .= " CRITICAL" if $rc == 2; $ret .= " UNKNOWN" if $rc == 3; @msg = @{ $_[0] } if defined $_[0]; @longmsg = @{ $_[1] } if defined $_[1]; $ret .= " - ". join( ', ', @msg ) if @msg; $ret .= " | ". join( ' ', @longmsg ) if @longmsg; print $ret; return $rc; } sub nagios_strict_output ($$;$$) { my $rc = shift; my $service = shift; my @msg; my @longmsg; @msg = @{ $_[0] } if defined $_[0]; @longmsg = @{ $_[1] } if defined $_[1]; # Generate TEXT message my $text; $text .= $service . " OK" if $rc == 0; $text .= $service . " WARNING" if $rc == 1; $text .= $service . " CRITICAL" if $rc == 2; $text .= $service . " UNKNOWN" if $rc == 3; $text .= " - ". join( ', ', @msg ) if @msg; # Enforce Nagios strict specs, filter out some keys my @longmsg_strict; foreach my $msg_to_split (@longmsg) { my ($key, $value) = split(/=/, $msg_to_split); if ( grep /^$key$/, @TimeKeys or grep /^$key$/, @CountKeys ) { push @longmsg_strict, $msg_to_split; } } $text .= " | ". join( ' ', @longmsg_strict ) if @longmsg_strict; print $text; return $rc; } sub prtg_output ($$;$$) { my $rc = shift; my $service = shift; my @msg; my @longmsg; my @textmsg; @msg = @{ $_[0] } if defined $_[0]; @longmsg = @{ $_[1] } if defined $_[1]; # Generate TEXT message my $text = ""; $text .= $service . " OK" if $rc == 0; $text .= $service . " WARNING" if $rc == 1; $text .= $service . " CRITICAL" if $rc == 2; $text .= $service . " UNKNOWN" if $rc == 3; $text .= " - ". join( ', ', @msg ) if @msg; # Generate service status result my $results = "status$rc"; $results .= "0"; $results .= "1"; $results .= "1"; foreach my $msg_to_split (@longmsg) { my ($key, $value) = split(/=/, $msg_to_split); if ( grep /^$key$/, @TimeKeys ) { chop($value); $results .= "$key$valueTimeSeconds"; } elsif ( grep /^$key$/, @CountKeys ) { $results .= "$key$valueCount"; } else { # Add extra keys to the text message push @textmsg, $msg_to_split; } } $text .= " - ". join( ', ', @textmsg ) if @textmsg; $text .= ""; print "" . $results . $text. ""; return $rc; } # Handle time intervals #----------------------------------------------------------------------------- sub is_time($){ my $str_time = lc( shift() ); return 1 if ( $str_time =~ /^(\s*([0-9]\s*[smhd]?\s*))+$/ ); return 0; } # Return formatted time string with units. # Parameter: duration in seconds sub to_interval($) { my $val = shift; my $interval = ''; return $val if $val =~ /^-?inf/i; $val = int($val); if ( $val > 604800 ) { $interval = int( $val / 604800 ) . "w "; $val %= 604800; } if ( $val > 86400 ) { $interval .= int( $val / 86400 ) . "d "; $val %= 86400; } if ( $val > 3600 ) { $interval .= int( $val / 3600 ) . "h"; $val %= 3600; } if ( $val > 60 ) { $interval .= int( $val / 60 ) . "m"; $val %= 60; } $interval .= "${val}s" if $val > 0; return "${val}s" unless $interval; # Return a value if $val <= 0 return $interval; } sub to_interval_output_dependent($) { my $val = shift; my $interval = ''; return $val if $val =~ /^-?inf/i; $val = int($val); return to_interval($val) unless $args{'output'} =~ /^(nagios|nagios_strict|prtg)$/; return "${val}s"; } # Return a duration in seconds from an interval (with units). sub get_time($) { my $str_time = lc( shift() ); my $ts = 0; my @date; die( "Malformed interval: «$str_time»!\n" . "Authorized unit are: dD, hH, mM, sS.\n" ) unless is_time($str_time); # No bad units should exist after this line! @date = split( /([smhd])/, $str_time ); LOOP_TS: while ( my $val = shift @date ) { $val = int($val); die("Wrong value for an interval: «$val»!") unless defined $val; my $unit = shift(@date) || ''; if ( $unit eq 'm' ) { $ts += $val * 60; next LOOP_TS; } if ( $unit eq 'h' ) { $ts += $val * 3600; next LOOP_TS; } if ( $unit eq 'd' ) { $ts += $val * 86400; next LOOP_TS; } $ts += $val; } return $ts; } # Handle size units #----------------------------------------------------------------------------- # Return a size in bytes from a size with unit. # If unit is '%', use the second parameter to compute the size in bytes. sub get_size($;$) { my $str_size = shift; my $size = 0; my $unit = ''; die "Only integers are accepted as size. Adjust the unit to your need.\n" if $str_size =~ /[.,]/; $str_size =~ /^([0-9]+)(.*)$/; $size = int($1); $unit = lc($2); return $size unless $unit ne ''; if ( $unit eq '%' ) { my $ratio = shift; die("Can't compute a ratio without the factor!\n") unless defined $unit; return int( $size * $ratio / 100 ); } return $size if $unit eq 'b'; return $size * 1024 if $unit =~ '^k[bo]?$'; return $size * 1024**2 if $unit =~ '^m[bo]?$'; return $size * 1024**3 if $unit =~ '^g[bo]?$'; return $size * 1024**4 if $unit =~ '^t[bo]?$'; return $size * 1024**5 if $unit =~ '^p[bo]?$'; return $size * 1024**6 if $unit =~ '^e[bo]?$'; return $size * 1024**7 if $unit =~ '^z[bo]?$'; die("Unknown size unit: $unit\n"); } # Interact with pgBackRest #----------------------------------------------------------------------------- sub pgbackrest_info { my $infocmd = $args{'command'}." info"; $infocmd .= " --stanza=".$args{'stanza'}; $infocmd .= " --output=json --log-level-console=error"; if(defined $args{'config'}) { $infocmd .= " --config=".$args{'config'}; } if(defined $args{'repo'}) { $infocmd .= " --repo=".$args{'repo'}; } if(defined $args{'prefix'}) { $infocmd = $args{'prefix'}." $infocmd"; } dprint("pgBackRest info command was : '$infocmd'\n"); my $json_output = `$infocmd 2>&1 |grep -v ERROR`; die("Can't get pgBackRest info.\nCommand was '$infocmd'.\n") unless ($? eq 0); my $decoded_json = decode_json($json_output); foreach my $line (@{$decoded_json}) { return $line if($line->{'name'} eq $args{'stanza'}); } return; } sub pgbackrest_get { my $args_ref = shift; my %args = %{ $args_ref }; my $directory = shift; my $filename = shift; my $repo_key = shift; pod2usage( -message => 'FATAL: Unsupported pgBackRest version.', -exitval => 127 ) if ( pgbackrest_version(\%args) < $PGBR_SUPPORT ); my $getcmd = $args{'command'}." repo-get"; $getcmd .= " --stanza=".$args{'stanza'}; $getcmd .= " ".$directory."/".$filename; $getcmd .= " --log-level-console=error"; $getcmd .= " --repo=".$repo_key; if(defined $args{'config'}) { $getcmd .= " --config=".$args{'config'}; } if(defined $args{'prefix'}) { $getcmd = $args{'prefix'}." $getcmd"; } dprint("pgBackRest get command was : '$getcmd'\n"); my $history_content = `$getcmd 2>&1 |grep -v ERROR`; die("Can't get pgBackRest file content.\nCommand was '$getcmd'.\n") unless ($? eq 0); return $history_content; } sub pgbackrest_ls { my $args_ref = shift; my %args = %{ $args_ref }; my $directory = shift; my $repo_key = shift; my $recurse = shift; pod2usage( -message => 'FATAL: Unsupported pgBackRest version.', -exitval => 127 ) if ( pgbackrest_version(\%args) < $PGBR_SUPPORT ); my $lscmd = $args{'command'}." repo-ls"; $lscmd .= " --stanza=".$args{'stanza'}; $lscmd .= " ".$directory; $lscmd .= " --output=json --log-level-console=error"; $lscmd .= " --repo=".$repo_key; if($recurse) { $lscmd .= " --recurse"; } if(defined $args{'config'}) { $lscmd .= " --config=".$args{'config'}; } if(defined $args{'prefix'}) { $lscmd = $args{'prefix'}." $lscmd"; } dprint("pgBackRest ls command was : '$lscmd'\n"); my $json_output = `$lscmd 2>&1 |grep -v ERROR`; die("Can't get pgBackRest list.\nCommand was '$lscmd'.\n") unless ($? eq 0); return decode_json($json_output); } sub pgbackrest_version { my $args_ref = shift; my %args = %{ $args_ref }; my $version_cmd = $args{'command'}." version"; if(defined $args{'config'}) { $version_cmd .= " --config=".$args{'config'}; } if(defined $args{'prefix'}) { $version_cmd = $args{'prefix'}." $version_cmd"; } dprint("pgBackRest version command was : '$version_cmd'\n"); my $pgbackrest_version = `$version_cmd | sed -e s/pgBackRest\\ // | sed -e s/dev//`; die("Can't get pgBackRest version.\nCommand was '$version_cmd'.\n") unless ($? eq 0); return $pgbackrest_version; } # Services #----------------------------------------------------------------------------- =head2 SERVICES Descriptions and parameters of available services. =over =item B Fail when the number of full backups is less than the C<--retention-full> argument. Fail when the number of differential backups is less than the C<--retention-diff> argument. Fail when the number of incremental backups is less than the C<--retention-incr> argument. Fail when the newest backup is older than the C<--retention-age> argument. Fail when the newest full backup is older than the C<--retention-age-to-full> argument. Fail when the oldest backup is newer than the C<--retention-age-to-oldest> argument. The following units are accepted (not case sensitive): s (second), m (minute), h (hour), d (day). You can use more than one unit per given value. Arguments are not mandatory to only show some information. =cut sub check_retention { my $me = 'BACKUPS_RETENTION'; my %args = %{ $_[0] }; my @msg; my @warn_msg; my @crit_msg; my @longmsg; # When using the --repo option, pgBackRest info will apply the repository filter my $backups_info = pgbackrest_info(); die("Can't get pgBackRest info.\n") unless (defined $backups_info); if($backups_info->{'status'}->{'code'} == 0) { # List each repository content my @backups_dir_content; foreach my $repo (@{$backups_info->{'repo'}}) { my $backups_dir = "backup/".$args{'stanza'}; # Relative path inside repository dprint("repo".$repo->{'key'}.", backups_dir: $backups_dir\n"); $backups_dir_content[$repo->{'key'}] = pgbackrest_ls(\%args, $backups_dir, $repo->{'key'}, 0); } # List backups per type and check consistency between backup info and real repository content my @full_bck; my @diff_bck; my @incr_bck; foreach my $line (@{$backups_info->{'backup'}}) { push @full_bck, $line if($line->{'type'} eq "full"); push @diff_bck, $line if($line->{'type'} eq "diff"); push @incr_bck, $line if($line->{'type'} eq "incr"); my $backup_label = $line->{'label'}; my $repo_key = $line->{'database'}->{'repo-key'}; unless(defined $backups_dir_content[$repo_key]->{$backup_label} and $backups_dir_content[$repo_key]->{$backup_label}->{'type'} eq 'path') { push @crit_msg, "$backup_label directory missing in repo$repo_key"; } # Check if any error was detected during the backup (reported in the json output since pgBackRest 2.36) if(defined $line->{'error'} && $line->{'error'}){ push @crit_msg, "error(s) detected during backup $backup_label (repo$repo_key)"; } } push @longmsg, "full=".scalar(@full_bck); push @longmsg, "diff=".scalar(@diff_bck); push @longmsg, "incr=".scalar(@incr_bck); # Check retention-full if(defined $args{'retention-full'} and scalar(@full_bck) < $args{'retention-full'}) { push @crit_msg, "not enough full backups: ".$args{'retention-full'}." required"; } # Check retention-diff if(defined $args{'retention-diff'} and scalar(@diff_bck) < $args{'retention-diff'}) { push @crit_msg, "not enough differential backups: ".$args{'retention-diff'}." required"; } # Check retention-incr if(defined $args{'retention-incr'} and scalar(@incr_bck) < $args{'retention-incr'}) { push @crit_msg, "not enough incremental backups: ".$args{'retention-incr'}." required"; } # Check latest age # Backup age considered at pg_stop_backup my $latest_bck = @{$backups_info->{'backup'}}[-1]; my $latest_bck_age = time() - $latest_bck->{'timestamp'}->{'stop'}; push @longmsg, "latest_bck=".$latest_bck->{'label'}; push @longmsg, "latest_bck_type=".$latest_bck->{'type'}; push @longmsg, "latest_bck_age=".to_interval_output_dependent($latest_bck_age); if(defined $args{'retention-age'}){ my $bck_age_limit = get_time($args{'retention-age'} ); push @crit_msg, "backups are too old" if $latest_bck_age >= $bck_age_limit; } # Check latest full backup age if(defined $args{'retention-age-to-full'}){ my $latest_full_bck = $full_bck[-1]; my $latest_full_bck_age = time() - $latest_full_bck->{'timestamp'}->{'stop'}; push @longmsg, "latest_full=".$latest_full_bck->{'label'}; push @longmsg, "latest_full_age=".to_interval_output_dependent($latest_full_bck_age); my $bck_age_limit = get_time($args{'retention-age-to-full'} ); push @crit_msg, "full backups are too old" if $latest_full_bck_age >= $bck_age_limit; } # Check oldest age my $oldest_bck = @{$backups_info->{'backup'}}[0]; my $oldest_bck_age = time() - $oldest_bck->{'timestamp'}->{'stop'}; push @longmsg, "oldest_bck=".$oldest_bck->{'label'}; push @longmsg, "oldest_bck_age=".to_interval_output_dependent($oldest_bck_age); if(defined $args{'retention-age-to-oldest'}){ my $bck_age_limit = get_time($args{'retention-age-to-oldest'} ); push @crit_msg, "backups are too young" if $oldest_bck_age < $bck_age_limit; } }else{ # Get the exact status code per repository foreach my $repo (@{$backups_info->{'repo'}}) { push @crit_msg, "repo".$repo->{'key'}.": ".$repo->{'status'}->{'message'} if $repo->{'status'}->{'code'} gt 0; } } return critical($me, \@crit_msg, \@longmsg) if @crit_msg; return warning($me, \@warn_msg, \@longmsg) if @warn_msg; push @msg, "backups policy checks ok"; return ok( $me, \@msg, \@longmsg ); } =item B Check if all archived WALs exist between the oldest and the latest WAL needed for the recovery. Use the C<--wal-segsize> argument to set the WAL segment size. The following units are accepted (not case sensitive): b (Byte), k (KB), m (MB), g (GB), t (TB), p (PB), e (EB) or Z (ZB). Only integers are accepted. Eg. C<1.5MB> will be refused, use C<1500kB>. The factor between units is 1024 bytes. Eg. C<1g = 1G = 1024*1024*1024.> Use the C<--ignore-archived-before> argument to ignore the archived WALs generated before the provided interval. Used to only check the latest archives. Use the C<--ignore-archived-after> argument to ignore the archived WALs generated after the provided interval. The C<--latest-archive-age-alert> argument defines the max age of the latest archived WAL as an interval before raising a critical alert. The following units are accepted as interval (not case sensitive): s (second), m (minute), h (hour), d (day). You can use more than one unit per given value. If not set, the last unit is in seconds. Eg. "1h 55m 6" = "1h55m6s". All the missing archives are only shown in the C<--debug> mode. Use C<--list-archives> in addition with C<--debug> to print the list of all the archived WAL segments. By default, all the archives older than the oldest backup start archive or newer than the max_wal returned by the pgBackRest info command are ignored. Use the C<--extended-check> argument to force a full check of the found archives and raise warnings in case of inconsistencies. When WAL archives on different timelines are found, .history files are parsed to find the switch point and define the boundary WAL. Use the C<--max-archives-check-number> to prevent infinite WAL archives check when boundary WAL can't be defined properly. =cut sub get_archived_wal_list { my $min_wal = shift; my $max_wal = shift; my $args_ref = shift; my %args = %{ $args_ref }; my $archives_dir = shift; my $suffix = "(\.(gz|lz4|zst|xz|bz2))?"; my %filelist; my @branch_wals; my $filename_re_full = qr/[0-9A-F]{24}.*$suffix$/; my $start_tl = substr($min_wal, 0, 8); my $end_tl = substr($max_wal, 0, 8); my $history_re_full = qr/$end_tl.history$/; foreach my $repo_key (keys %{$archives_dir}) { dprint("repo$repo_key, archives_dir: ".$archives_dir->{$repo_key}."\n"); my $list = pgbackrest_ls(\%args, $archives_dir->{$repo_key}, $repo_key, 1); foreach my $key (keys %{$list}) { next unless $list->{$key}->{'type'} eq 'file'; my @split_tab = split('/', $key); my $filename = $split_tab[-1]; if($filename =~ /$filename_re_full/){ # Get stats of the archived WALs if ( $args{'ignore-archived-after'} or $args{'ignore-archived-before'} ) { my $diff_epoch = $INIT_TIME - $list->{$key}->{'time'}; if ( $args{'ignore-archived-after'} && $diff_epoch <= get_time($args{'ignore-archived-after'}) ){ dprint ("ignored file ".$filename." as interval since epoch : ".to_interval($diff_epoch)."\n"); next; } if ( $args{'ignore-archived-before'} && $diff_epoch >= get_time($args{'ignore-archived-before'}) ){ dprint ("ignored file ".$filename." as interval since epoch : ".to_interval($diff_epoch)."\n"); next; } } my $segname = substr($filename, 0, 24); if ( ! $args{'extended-check'} && $segname lt $min_wal ){ dprint ("ignored file ".$segname." older than ".$min_wal."\n"); next; } if ( ! $args{'extended-check'} && $segname gt $max_wal ){ dprint ("ignored file ".$segname." newer than ".$max_wal."\n"); next; } # Only add the file in the list if not already found in a previous loop/repository unless(defined $filelist{$segname}){ $filelist{$segname} = [$segname, $filename, $list->{$key}->{'time'}, $list->{$key}->{'size'}, $archives_dir->{$repo_key}."/$key"]; } }elsif($filename =~ /$history_re_full/ && $start_tl ne $end_tl){ # Look for the last history file if needed dprint("history file to open : ".$archives_dir->{$repo_key}."/$key\n"); my $history_content = pgbackrest_get(\%args, $archives_dir->{$repo_key}, $filename, $repo_key); my @history_lines = split /\n/, $history_content; foreach my $line ( @history_lines ){ my $line_re = qr/^\s*(\d)\t([0-9A-F]+)\/([0-9A-F]+)\t.*$/; $line =~ /$line_re/ || next; push @branch_wals => sprintf("%08d%08s%08X", $1, $2, hex($3)>>24); } } } } my @unique_branch_wals = do { my %seen; grep { !$seen{$_}++ } @branch_wals }; return(\%filelist, \@unique_branch_wals); } sub generate_needed_wal_archives_list { my $min_wal = shift; my $max_wal = shift; my $branch_wals_ref = shift; my @branch_wals = @{ $branch_wals_ref }; my $seg_per_wal = shift; my $start_tl = substr($min_wal, 0, 8); my $end_tl = substr($max_wal, 0, 8); my $timeline = hex($start_tl); my $wal = hex(substr($min_wal, 8, 8)); my $seg = hex(substr($min_wal, 16, 8)); my $args_ref = shift; my %args = %{ $args_ref }; # Generate list my $curr = $min_wal; my @needed_wal_archives_list; push @needed_wal_archives_list, $min_wal; for ( my $i=0, my $j=1; $curr lt $max_wal ; $i++, $j++ ) { $curr = sprintf('%08X%08X%08X', $timeline, $wal + int(($seg + $j)/$seg_per_wal), ($seg + $j)%$seg_per_wal ); if ( grep /$curr/, @branch_wals ) { dprint("found a boundary @ '$curr' !\n"); $timeline++; $j--; next; }else{ push @needed_wal_archives_list, $curr; } # Break the loop in case max-archives-check-number is defined # Infinite loop might happen when there's a timeline switch but boundary WAL isn't detected correctly die("max-archives-check-number limit exceeded.\n") if ( defined $args{'max-archives-check-number'} and scalar(@needed_wal_archives_list) > $args{'max-archives-check-number'} ); } my @unique_needed_wal_archives_list = do { my %seen; grep { !$seen{$_}++ } @needed_wal_archives_list }; return sort @unique_needed_wal_archives_list; } sub check_wal_archives { my $me = 'WAL_ARCHIVES'; my %args = %{ $_[0] }; my @msg; my @warn_msg; my @crit_msg; my @longmsg; my @human_only_longmsg; # When using the --repo option, pgBackRest info will apply the repository filter my $start_time = time(); my $backups_info = pgbackrest_info(); die("Can't get pgBackRest info.\n") unless (defined $backups_info); dprint("!> pgBackRest info took ".(time() - $start_time)."s\n"); if($backups_info->{'status'}->{'code'} == 0) { my %archives_dir; my $min_wal; my $max_wal; foreach my $line (@{$backups_info->{'archive'}}) { my $repo_key = $line->{'database'}->{'repo-key'}; $archives_dir{$repo_key} = "archive/".$args{'stanza'}."/".$line->{'id'}; # Relative path inside repository $min_wal = $line->{'min'} if(not $min_wal or $line->{'min'} lt $min_wal); $max_wal = $line->{'max'} if(not $max_wal or $line->{'max'} gt $max_wal); } # Get the oldest backup info my $oldest_bck = @{$backups_info->{'backup'}}[0]; my $oldest_bck_archive_start = $oldest_bck->{'archive'}->{'start'}; # Change min_wal to oldest_bck_archive_start if ( $min_wal lt $oldest_bck_archive_start ) { $min_wal = $oldest_bck_archive_start; dprint ("min_wal changed to ".$min_wal."\n"); } # Get all the WAL archives and history files $start_time = time(); dprint("Get all the WAL archives and history files...\n"); my ($filelist_ref, $branch_wals_ref) = &get_archived_wal_list($min_wal, $max_wal, \%args, \%archives_dir); my %filelist; %filelist = %{ $filelist_ref } if $filelist_ref; my @filelist_simplified = sort(keys %filelist); my $first_wal_in_list = $filelist_simplified[0]; # identify first elem of hash array my $last_wal_in_list = $filelist_simplified[-1]; # identify last elem of hash array my @branch_wals; @branch_wals = @{ $branch_wals_ref } if $branch_wals_ref; return unknown $me, ['no archived WAL found'] unless %filelist; dprint("!> Get all the WAL archives and history files took ".(time() - $start_time)."s\n"); # Change min_wal if some archives are ignored if ( $args{'ignore-archived-before'} && $min_wal ) { $min_wal = $first_wal_in_list; dprint ("min_wal changed to ".$min_wal."\n"); } # Change max_wal if some archives are ignored if ( $args{'ignore-archived-after'} && $max_wal ) { $max_wal = $last_wal_in_list; dprint ("max_wal changed to ".$max_wal."\n"); } # Check min/max exists, start = min, last = max ? return critical $me, ['min WAL not found: '.$min_wal] if($min_wal && ! grep( /^$min_wal$/, @filelist_simplified )); return critical $me, ['max WAL not found: '.$max_wal] if($max_wal && ! grep( /^$max_wal$/, @filelist_simplified )); push @warn_msg, "min WAL is not the oldest archive" if($min_wal && $filelist{$first_wal_in_list}[0] lt $min_wal); push @warn_msg, "max WAL is not the latest archive" if($max_wal && $filelist{$last_wal_in_list}[0] gt $max_wal); my $latest_archive_age = time() - $filelist{$last_wal_in_list}[2]; my $num_archives = scalar(@filelist_simplified); push @longmsg, "latest_archive_age=".to_interval_output_dependent($latest_archive_age); push @longmsg, "num_unique_archives=$num_archives"; # Is the latest archive too old ? if ( $args{'latest-archive-age-alert'} && $latest_archive_age > get_time($args{'latest-archive-age-alert'})){ push @crit_msg => "latest_archive_age (".to_interval($latest_archive_age).") exceeded"; } push @msg, "$num_archives unique WAL archived"; push @msg, "latest archived since ". to_interval($latest_archive_age); # Get all the needed WAL archives based on min/max pgBackRest info my $wal_segsize = $args{'wal-segsize'}; my $walsize = '4GB'; # 4 TB -> bytes my $seg_per_wal = get_size($walsize) / get_size($wal_segsize); #Only for PG >= 9.3 my $dbver=($backups_info->{'db'}[0]->{'version'}+0)*10; $seg_per_wal-- if $dbver <= 92; dprint("Get all the needed WAL archives...\n"); $start_time = time(); my @needed_wal_archives_list=&generate_needed_wal_archives_list($min_wal, $max_wal, \@branch_wals, $seg_per_wal, \%args); dprint("!> Get all the needed WAL archives took ".(time() - $start_time)."s\n"); # Get the latest backup info my $latest_bck = @{$backups_info->{'backup'}}[-1]; my $latest_bck_archive_start = $latest_bck->{'archive'}->{'start'}; # Print human_only_longmsg push @human_only_longmsg, "min_wal=$min_wal" if $min_wal; push @human_only_longmsg, "max_wal=$max_wal" if $max_wal; push @human_only_longmsg, "latest_archive=".$filelist{$last_wal_in_list}[0]; push @human_only_longmsg, "latest_bck_archive_start=".$latest_bck_archive_start; push @human_only_longmsg, "latest_bck=".$latest_bck->{'label'}; push @human_only_longmsg, "latest_bck_type=".$latest_bck->{'type'}; push @human_only_longmsg, "oldest_archive=".$filelist{$first_wal_in_list}[0]; push @human_only_longmsg, "oldest_bck_archive_start=".$oldest_bck_archive_start; push @human_only_longmsg, "oldest_bck_type=".$oldest_bck->{'type'}; my @warn_missing_files; my @crit_missing_files; # Go through needed WAL list and check if it exists in the file list $start_time = time(); foreach my $needed_wal (@needed_wal_archives_list) { unless ( $filelist{ $needed_wal } ) { if($needed_wal lt $latest_bck_archive_start) { push @warn_missing_files => $needed_wal; }else{ push @crit_missing_files => $needed_wal; } } } dprint("!> Go through needed WAL list and check took ".(time() - $start_time)."s\n"); # Go through each backup to check their needed WAL archives $start_time = time(); foreach my $line (@{$backups_info->{'backup'}}){ dprint("Get all the needed WAL archives for ".$line->{'label'}."...\n"); # Ignore backups if archives are ignored my $diff_epoch_stop = $INIT_TIME - $line->{'timestamp'}->{'stop'}; if ( $args{'ignore-archived-after'} && $diff_epoch_stop <= get_time($args{'ignore-archived-after'}) ){ dprint ("ignored backup ".$line->{'label'}." as interval since epoch : ".to_interval($diff_epoch_stop)."\n"); next; } my $diff_epoch_start = $INIT_TIME - $line->{'timestamp'}->{'start'}; if ( $args{'ignore-archived-before'} && $diff_epoch_start >= get_time($args{'ignore-archived-before'}) ){ dprint ("ignored backup ".$line->{'label'}." as interval since epoch : ".to_interval($diff_epoch_start)."\n"); next; } foreach my $needed_wal (&generate_needed_wal_archives_list($line->{'archive'}->{'start'}, $line->{'archive'}->{'stop'}, \@branch_wals, $seg_per_wal, \%args)) { unless ( $filelist{ $needed_wal } ) { push @crit_missing_files => $needed_wal; } } } dprint("!> Go through each backup, get the needed WAL and check took ".(time() - $start_time)."s\n"); # Generate @warn_msg and @crit_msg with missing files (sorted and unique) my @unique_warn_missing_files = do { my %seen; grep { !$seen{$_}++ } @warn_missing_files }; my @unique_warn_missing_files_sorted = sort @unique_warn_missing_files; my $num_missing_archives = scalar(@unique_warn_missing_files_sorted); my $oldest_missing_archive = $unique_warn_missing_files_sorted[0] || '000000000000000000000000'; my $latest_missing_archive = $unique_warn_missing_files_sorted[-1] || '000000000000000000000000'; push @warn_msg, "wrong sequence, $num_missing_archives missing file(s) ($oldest_missing_archive / $latest_missing_archive)" if @warn_missing_files; push @crit_missing_files, @warn_missing_files if @warn_missing_files and @crit_missing_files; my @unique_crit_missing_files = do { my %seen; grep { !$seen{$_}++ } @crit_missing_files }; my @unique_crit_missing_files_sorted = sort @unique_crit_missing_files; $num_missing_archives = scalar(@unique_crit_missing_files_sorted); $oldest_missing_archive = $unique_crit_missing_files_sorted[0] || $oldest_missing_archive || '000000000000000000000000'; $latest_missing_archive = $unique_crit_missing_files_sorted[-1] || $latest_missing_archive || '000000000000000000000000'; push @crit_msg, "wrong sequence, $num_missing_archives missing file(s) ($oldest_missing_archive / $latest_missing_archive)" if @crit_missing_files; push @longmsg, "num_missing_archives=$num_missing_archives" if $num_missing_archives; push @longmsg, "oldest_missing_archive=$oldest_missing_archive" if $num_missing_archives; push @longmsg, "latest_missing_archive=$latest_missing_archive" if $num_missing_archives; # DEBUG print all missing archives if(@warn_missing_files and not @crit_missing_files) { foreach (@unique_warn_missing_files_sorted) { dprint("missing $_\n"); } }elsif(@crit_missing_files) { foreach (@unique_crit_missing_files_sorted) { dprint("missing $_\n"); } } # DEBUG print all archives if($args{'list-archives'}) { foreach (@filelist_simplified) { dprint("found $_\n"); } } }else{ # Get the exact status code per repository foreach my $repo (@{$backups_info->{'repo'}}) { push @crit_msg, "repo".$repo->{'key'}.": ".$repo->{'status'}->{'message'} if $repo->{'status'}->{'code'} gt 0; } } return critical($me, \@crit_msg, \@longmsg, \@human_only_longmsg) if @crit_msg; return warning($me, \@warn_msg, \@longmsg, \@human_only_longmsg) if @warn_msg; return ok( $me, \@msg, \@longmsg, \@human_only_longmsg); } =item B Check if this script is running a given version. You must provide the expected version using C<--target-version>. =cut sub check_pgb_version { my $me = 'CHECK_PGBACKREST_VERSION'; my %args = %{ $_[0] }; my @msg; my @warn_msg; my @crit_msg; my @longmsg; pod2usage( -message => 'FATAL: you must provide --target-version.', -exitval => 127 ) if not defined $args{'target-version'}; pod2usage( -message => "FATAL: given version does not look like a $PROGRAM version!", -exitval => 127 ) if ( defined $args{'target-version'} and $args{'target-version'} !~ m/^\d\.\d+(?:_?(?:dev|beta|rc)\d*)?$/ ); if (defined $args{'target-version'} and $VERSION ne $args{'target-version'}){ push @crit_msg, sprintf("%s version should be %s", $PROGRAM, $args{'target-version'}); push @longmsg, sprintf("%s version %s, Perl %vd", $PROGRAM, $VERSION, $^V); } return critical($me, \@crit_msg, \@longmsg) if @crit_msg; return warning($me, \@warn_msg, \@longmsg) if @warn_msg; push @msg, sprintf("%s version %s, Perl %vd", $PROGRAM, $VERSION, $^V); return ok( $me, \@msg, \@longmsg ); } # End of SERVICE section in pod doc =pod =back =cut Getopt::Long::Configure('bundling'); GetOptions( \%args, 'command|C=s', 'config|c=s', 'debug!', 'extended-check!', 'help|?!', 'ignore-archived-after=s', 'ignore-archived-before=s', 'latest-archive-age-alert=s', 'list|l!', 'list-archives|L!', 'max-archives-check-number=s', 'output|O=s', 'prefix|P=s', 'repo=s', 'retention-age=s', 'retention-age-to-full=s', 'retention-age-to-oldest=s', 'retention-diff=i', 'retention-full=i', 'retention-incr=i', 'service|s=s', 'stanza|S=s', 'target-version=s', 'version|V!', 'wal-segsize=s' ) or pod2usage( -exitval => 127 ); list_services() if $args{'list'}; version() if $args{'version'}; pod2usage( -verbose => 2 ) if $args{'help'}; pod2usage( -verbose => 1 ) unless defined $args{'service'}; # Check that the given service exists. pod2usage( -message => "FATAL: service $args{'service'} does not exist.\n" . " Use --list to show the available services.", -exitval => 127 ) unless exists $services{ $args{'service'} }; # The stanza name must be given if a service is specified and 'stanza-arg' is required pod2usage( -message => "FATAL: you must specify a stanza name.\n" . " See -S or --stanza command line option.", -exitval => 127 ) if defined $args{'service'} and $services{$args{'service'}}{'stanza-arg'} and not defined $args{'stanza'}; # Check "retention" specific args my @specific_args = ('retention-age', 'retention-age-to-full', 'retention-age-to-oldest', 'retention-diff', 'retention-full', 'retention-incr'); foreach( @specific_args ){ pod2usage( -message => "FATAL: \"$_\" is only allowed with \"retention\" service.", -exitval => 127 ) if ( $args{$_} and $args{'service'} ne 'retention' ); } # Check "archives" specific args @specific_args = ('extended-check', 'ignore-archived-after', 'ignore-archived-before', 'latest-archive-age-alert', 'max-archives-check-number'); foreach( @specific_args ){ pod2usage( -message => "FATAL: \"$_\" is only allowed with \"archives\" service.", -exitval => 127 ) if ( $args{$_} and $args{'service'} ne 'archives' ); } # Check "archives" specific arg --list-archives pod2usage( -message => 'FATAL: "list-archives" is only allowed with "archives" service and "debug" option.', -exitval => 127 ) if $args{'list-archives'} and ( $args{'service'} ne 'archives' or ! $args{'debug'} ); # Check "check_pgb_version" specific arg --target-version pod2usage( -message => 'FATAL: "target-version" is only allowed with "check_pgb_version" service.', -exitval => 127 ) if $args{'target-version'} and $args{'service'} ne 'check_pgb_version'; # Output format for ( $args{'output'} ) { if ( /^human$/ ) { $output_fmt = \&human_output } elsif ( /^json$/ ) { $output_fmt = \&json_output } elsif ( /^nagios$/ ) { $output_fmt = \&nagios_output } elsif ( /^nagios_strict$/ ) { $output_fmt = \&nagios_strict_output } elsif ( /^prtg$/ ) { $output_fmt = \&prtg_output } else { pod2usage( -message => "FATAL: unrecognized output format \"$_\" (see \"--output\")", -exitval => 127 ); } } exit $services{ $args{'service'} }{'sub'}->( \%args ); __END__ =head1 CONTRIBUTING check_pgbackrest is an open project. Any contribution to improve it is welcome. =head1 VERSION check_pgbackrest version 2.2, released on Mon Dec 06 2021. =head1 LICENSING This program is open source, licensed under the PostgreSQL license. For license terms, see the LICENSE file. =head1 AUTHORS Author: Stefan Fercot. Logo: Damien Cazeils (www.damiencazeils.com). Copyright: (c) 2018-2020, Dalibo / 2020-2021, Stefan Fercot. =cut check_pgbackrest-REL2_2/docs/000077500000000000000000000000001415336775100162275ustar00rootroot00000000000000check_pgbackrest-REL2_2/docs/img/000077500000000000000000000000001415336775100170035ustar00rootroot00000000000000check_pgbackrest-REL2_2/docs/img/logo-horizontal-white.png000066400000000000000000005227461415336775100237760ustar00rootroot00000000000000PNG  IHDR b栈sBIT|d IDATxwew}wn@I^c1=fya63>&cX %2![%$![1 w$+TUUn8[Vodf"I$I$I$I$I, H$I$I$I$I$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?$I$I$I$I$i?V~lv8TG~u$I$I$I$I$1p' A(ZqׇC0q'I$I$I$I$-+w {>v9\f,}[tNim؎BmU"ɍŠg{}"+]B-g g<D)mVuV:jjE>$I$I$I$I*eNߝ yAюL ,{~\m ]\s,(ܴu ;^+O}“(YÏIGϓ9c<Ԡe&zl߻>dw7p掛JRZ 22Z_|^Wq!;x? z(٘Y픙BϠJ?lk'x3_ uϮk.䗐Q w$I$I$I$I?;}W&ѹ2a.~#g_In߹JJia)e!\])Tn3*Uݡ~cscVH٥[翿huǸ}:ڶDDʕnZ:w''s"ǟ:B+P$I$I$I$IZM 3hc]mlQ mj=IGGQkv-rr7/PK}O׵,=O! t1jἚ. :>="7 #]irZ2*rgS\sfb ֨fUhMvVa+C;` M{<<ڤzѲI$I$I$I$I;}Wظu3g;vm,19; -vK;NtQڢ;+Q&t왲wn̦/q˵e.ޮ,69\C6be]縌xq ~)O;a/H$I$I$I$IhW@QՄ$5w~}lz\*IdBĢ@$׹t} 8؆P*Pra;; >aTm!6ᆙl{-]]w͗~Uvv"{|[ߒb-]-KTJN.3תYdMjyOz}yޓaCxo.95$dW(^MI$I$I$I$e eͷ7xƻmk yJwBceGooiq^p͖\yF޲o nqĒEMjεRJU;wuDgdSxqǝ2ҁwuIKRd=$I$I$I$I\VKuI-%7 cxʬ񭋘:pzЁu~' nKv ls7l\MoiuC^PnmfJkëgL}" JJOcNcN#;5M1 Mvn?'J$I$I$I$6Z#k6>Ʀe;n*A!Z1@Y-Ƌ&=A;wpm$An=^$k3G?nqyVv޽m Jٶ{9͟ƼJ>7Bk}%6^v벍1 GFd-ǵ9s~“lm8|!<`aIǞkN$I$I$I$I4w6] /:ZGY) tC.K!VAYD|) D&}&$8.Pj74eB+Dmvmk\ԔpD]v$E!jFP;X'}iSaF& d5%u^zatIJDI6vM-*z쓡tJ$I$I$I$IA&AL> ξܾsE$cJinlҘ6#̅VBR A}(IS24AW YQu ;ZA ڵ1I+ \z=\[WIZF=KmMwzRs eD$hmII_P I$I$I$I$g 4i͜}'MIEeeq1QZxkUk1ZC_DGC[Ѱ&IaD}2bDCoZII$I$I$I$i1pdiinfBso IzJ|D`$I$I$I$I[eMwdi wCe7^.9; #EPIb0g$I$I$I$I{1pĤΥr'Dpx%a624` Y5%2}7H$I$I$I$IV wL 5E&Y`%W~6| d+I"q&ʸL$I$I$I$I$Ifn5JȨv D!r}էٳalڂv5%=AF]$I$I$I$I*],ƻl:޵LnO>)K@d:*} h2դ҂{$I$I$I$I{1p . |u&F Ab : qxAP~6I$I$I$I$IV<w+Z%md =a=g^IJۂxm|l欲N-<"#l{U 'C PODfLN9Jµ۾E #I$I$I$I$IsV+RZ ddv͗Kg1g^IFt#t-p\!Ltq1I7J~/~?s!PFI$I$I$I$ b HJZ [7󗗜;n cMھdVQ[bD ?s~2%I$I$I$I&"H "l)ܾ;6,dI+x=n@+Y #`3. D]BFi#d+dƏ>ek3l'Q,-$&@$+?/"-'h7)Nido+r'I$I$I$IC,>\Hlhb;K7nབྷ|mg8VJd*T,#-"W% Zh1&jp!^:z)Ӷn˜GPHJ͖e܍JJ w$I$I$I$I:{eLHM : صp7c\pC]ӃSH2IBottY3f#9~:}.(dJe8DZ Q$I$I$I$I:{Em% Mwq g}}^H #\҃WjO "ڵqƣZʈ'}7rĚ.Zc AќN - I@wFJ$I$I$I$I w$C@vo[&MQѲ %3D)3vǎRkOD/PE=?kBƬYq>HG@`IGQ-T9 İ$I$I$I$IxjYa;#\qF*@TJQٓ]@m ;=XQ[O5Ke%ke-?罊>ƭD %>Yb>>ܲ6jJOv$I$I$I$I:{w ^ϙW}D&FKeDIFْv==nV]0ɸv?ˋO{dk#bf7 Mv%i]w];OvĘdM)Z!:2:p$I$I$I$IRH9\4ozhI#XvrH  T`ha !W*P.j@)P+AGOE< E`%Fs&d2*߳~eiǝJ%is [юBxdj7罛]`DO(ѵC$$I$I$I$It 3pDDd@"v1A y۶A_- Gi&/C\O1ZH*]@dKft5D?Y+]y 䣏[Hn:CD :F!k//9{(Yԓ$I$I$I$IFJҺL(IJJPuv>t'/cT $jҕBaXƴ&0.cQ]j %GM ? KB֍ʌN9dkllٿfV8BeHmzĈnN$I$I$I$I:X[*ZWl%[Bw/cW_y_<{`!nj(Z)g2ӪV(PkjO2?yֳ;R+t1 ya;ha;ʈҺu_J8傡-*0jmsDJBJ$I$I$I$I{)¯p1NU7n_<BT(kvP+1ZZ =~W?o={jQڈO ݌O$])D&B5iUS p0zC[\$7ȫ9c8Ss݃ DϱokxQJ*=ezMwTޗ] %gϠPm$I$I$I$INʼ!6626"`n[ugq^1髻R dad&?.|>R8/ER,g+ܳ_1G>wx2"]mmqukJ-D.P䣟ȟWx!G r+{˹}8cc~e1m'I$I$I$IInj.BxeOpƋ:'c+dD~ AkLJ)r!<17q ki O:D䕿[/:RKG$B-IԠϤtPǕ[ø$Aek ^Ÿp,I @!.=_{9~8w!#('h$I$I$I$Itt4[vDfn1mx _ٚ`QȎlA]4Fʞ{x_1IAFׂoI$I$I$I$u51fy$*I+l:wGnMtzb\Zcپ!ꖙ{Y𮏞w^ܰc+y+Pg.:Yv,YtӇ IDATa7.!"9Tl` #8raW'>nqRX:-0zO;vΈB&D X[I$I$I$I$I MRi Bm;/2ݐ^L@W+ nDI ;a2*vI$`7vxTοg'WqQ s-9mYȄ׿8/P*LFY9btoyůr1Ƿ{s$m$m7݈Jt [7+^IPL2ƌI)#‡$I$I$I$I ܵ15Q= yιS e8*YI%@?MlmbZ&\z'3)Q[h ˹~VkrI}!(+{أy?L铮 ]Ȭt̉ak6&I|oLJF!qA0 q͘$I$I$I$IB\.u_狼A@YCè˜dA-guāInڵќ햚5g} w_sұO .+D2^TK[f/y y'_g'% cRyN5=]_&[;d*Z{/;.x1kHP/X(} fO)$%I$I$I$I 2oZh`M;>{ELFb4 2X7$2shm>N>slApھSj lZ;S SE Irݔ's"_攣H1Oa~e +^vhAgq馷dӶƒ-?a)or;Ln=|?n=I$I$I$I$iu[} w͚Fh. %lm>t公t]$JZ&p8aknX{>/Bn! #e-t?&ݰ]2g7o:/yK~Oy.A&+Lw5Zt +_]?y=Ӧ2bQNk #(Zś}}&Y~[WD5>>J$I$I$I$I Buޕ!8$-t?_]Ǟ^5(R1ӊ]-R ·SOxҾ0N9dJ)dwSLG 9fM瘷;zHzJtCE3d٘52R8#y$iS#rf bfo_\t&7~{+.@ v:kGpжBI$I$I$I$w0 Dt@p _ὗXJe-*AGJF"q${mJP{@3 *KR:.`G7|]wkr(hmݣL[ډLƶL#kl찜7ܹO(FT:Jqv,LRʓ9q$I$I$I$I`/p7):nv|m˵d&kbD_h#dDk5+@(IWnfSi-8uv2,CdnOeT ]e!k/;򖗿9̲|vv8j l$Z>>MeҚ2!`GζO EO aŝ$I$I$I$I`j8*{g~njgQ븍d P ƭl6WֳC-"Skǝ:]֡@Q>. d7[pMPdm:4E[oC!'眬Ôٜ.#3. ]$FBmmG f񤣎P$I$I$I$IRZ (y./t(@ "3d1t \~ѷlZ3N8.!|quk1_nEsvB!}6wrm}j&,C/v5%KNO"{~9Skώ2GBd"*H>JGGv3oNl$I$I$I$IҁnՍݰu3g;;vSJ"[𧋄~/֡}sZ&?`<\ ۷$ICjW͊KPfrx=5V뻋ͯ$I{- UBGdɛ﹛\--IYD <)xɩgCuG+>;ם @%ɨDf?s%;Jo~|sMѵrI-X+t Wg319gUÑ> +A'd#IM:[;a>$q0m"V&Gd $Kzi̞DGٺG$ޖQ\+*Cm$0<u.gs7^T_a셧/ Okt24S^@]ۆaef3 7v^' ZIњ/Ɲa0^|~ _%IJk1|yM߫ka$IlU\L|@kK-U)crRKXwk^I/n:3gQ_Xw&"0.̀J%39sGNZ7>m4* tm6w!Of!JG}7rǵ -|9#11 a+kG&;;''ܽ>4wDfr=~R{z?1v)ՄTm;E/[2]~7Ԟ/>n.Z)Z2/|+<2 ELc2N')Z׼uϺ/=D+XѦ#wv /katYtjP"XHf=eP(1}su^_'1e` _y=W:a6 }6vB];8O M˧P'9+wӎ;$A.5uŧCAeGO3;O{6>$3i*a2VxO.=g>)w$^ӷo҅pg]rE%kE̽ PlN$iGYx2*;.>9tyAFWVeYU> Z@D+jAGD)gÿFM,Vy*F׎k 4=p;/羛knƬn7.goB##Z?Y]\z$% CzXL2޲Jμ'Y[.>1sL {WIolDZo"m҃QheϺ탬>1 NR3(}\=_IG=}jcvYVٻ7~JԤejys-fl>I$I$I$I$-,d(>*xxw=w3 0 0 0 0 0 0 0)H), 1Ǥ\jn?o.<D<"a8<#Wŋɝ /Q-?# .$_ēϿX 睪C<;R 9 U5sށBl5 0 0 0 0 0 0 0 0WV9}7FTUwRָLrqxԜ#˕gxjϷP'T|zy{y/jSx3\M<>< KEpYp<}= /N啫gѳmC(;aaaaaaaa3IU]Ĕyy+}/#V#_cۦsBFEy.A:5$8 *J AGB?^:G;B;"okoaaaaaaaa|&Ka*:Q!٤b;K(k\.*ȦO=yBt^ 5 ƭ<gu7=̶[Z2 %"h|'2 u;! A)y@.N zAE#>p! j\0 0 0 0 0 0 0 0 c G|G\_S#!}zK&OLswZzhI). uUqZ֩TZ*_乃/nUSJiH?pV>/!̼^13ru;}ü{=hGeطpqG{xG$sk`nOHJ;p4w]3+⛔faaaaaaa1S?UɥNsR2Sˤ\t (hf)A4:zԡ*PQG.EtT.ȈT8.ygv{[ƒkZL!;t!Sdԧ'LajQPyٶåVH [%]xŋ'S {p'wA(Ys oo?ҝX`raaaaaaaadQ]8R!]'WjyN>7}FmvW^{ꒇt"azq@.i"5r5$x'gZ^'PDTɑHyUǩ'e͢5M4ۍjKkpj>rU2q/ócw? hyaaaaaaaa\H%cM@2`Vyy>kiLP*"E"T|;AbX6.ZG!n^xcf@wT;;y* xd aa 9v7 NwW^txK<̶[xe-{%shH+b՜|kH,k=BRs1PU9$dNEcضq+^tVaaaaaaaa3 #MbMQyR O t{!]gz[ RtV6ڀwWT#|bcvMXc$7ǯoPx NtvT~b,êS100@E'< 0ԙ:Dco! s8!9tҸuZ:;Ď'S/d1 sxF*Ȅ$xUUPW%hw'{Q5کmaaaaaaaq%ij( ڌџll-Fcw//:rgʅv|s=@%r|> HOg[?Ǚ={6ι3:2!8o?'tvL Rq#4~t8Ie|g>)rµ. /5Q'}O=' 䴪ȥLNy}8jԵú 0Sy@ K)[0 0 0 0 0 0 0 0 c#>$UW4 !נ:ě'L.X(Z!HW@JY!Ϻ%(VHI3 *8+NaP7MϞHԡf1t=-/hg{Z-^T\YMĖQgJK^ᮆpK0^0{6s]kQo}Z<p"BK~?Yp%ӲA!M@'Ug}W|=E7 0 0 0 0 0 0 0 InzNeBObbT2z2I3qR,>ԡ^~ ~AD;pYu5vB,1=9^f%xltK̝;\KHŚV=,Nb4ɸUL0hͼcamd~degnS7.Vص nP>D֤aaaaaaaaS͔TW;k7hUw@)5EO&ɧL8En3(#BoUǵL?ǧ?(S2kきw{k Sc\#)d *k&9|M:: ST‘S|kO3KM=9ytN1Z1Z;BlD1umJ>#̟5Lp!u^u)#XnNB8k>1njZQ%ů h,x;OO=A@lJJ;Q'rPmYaaaaaaaaSͤ+Dw3}]AH$n3GH/ZZ&3S,\5*tK5"\Ŵ_q|Pڅ$yԥ|0< a|9hpHb u4CvVh=ӆhz±0 0 0 0 0 0 0 0 ØjDpw AHD8 %vd"}źūSwxє6E>Ƽ9GTJ5М#]'9?OuWsd[b G'`'(ir?0 0 0 0 0 0 0 0 øکMAOv:(8:BCWaޜk[~;WnHum5r7XZ̷'8:yd.:z}M/yl1Gp ǟK^n~84Z!\t}C YԤ ^RB+uLz.)Ǒm|k~yj^82 EQ$sTdʫop!'ZmV0 0 0 0 0 0 0 0 Ør&.KΕUx഼D/ G};C~wǟ6jū+/CX0{!?.Z֘JWwEP=NCԋ4xEI2n8͞ÿ~Cg! '!+"jUZ?<8ѓ ,*81C}?0 0 0 0 0 0 0 0 x;3%*}[WLy5y#JB$Jr{maujR3띪?'w<ѮvTmV='{6?ZhNA.sB7CrO͹P7\k҅KCζM['ӯXQ92yz{''u, 0 0 0 0 0 0 0 0) EsU%%<2<4 KY,iii3] W`4]}|b㴞=j:MEA "f06_a~g>9אǻ ΋G|">Ow7)7g}7:elaaaaaaaaW3S"ӊ,9Ub偺|PE\r uW8<E6 0 0 0 0 0 0 0 0dPazTD7pw[q-N ëL;[/^i;w#KfTPeds_{B;OQ@2={>W]ei9Ν+n]ׇ3 $Q-Ǔy%>4pRBJ4Q aaaaaaaaەI nhIB [ށ FERBxyܻf Z!NQՆkv5]bl]"HtIc"NXA` )%we  cLU6򌃰;;Cr nv (Ȋ 0 0 0 0 0 0 0 0 [M-y22^nGE d|}œG}iRztwhNkL)lxũD-N~LBlɍPw|b7Z:]0 0 0 0 0 0 0 0 0JLp7ѽ(hؘ/qDP Nw9)fȸ;D(fL5܁Wxo=.Fe"\dv :Y;/(?8ǟz#P+c*B.cf‚/Ewe/?Ǯ{Q. ?Ydb;gJ. (S1ʸ`)Tm/4t06ֈb0oc~nQ|QTifysq~xp8>f׵u 7Xf2帰^[R M3lP'çJFj~b(1g9>_]<[.\Qq9Lhc|s>a}Xk~xc=-9y4Oy{\6Nc_ULs'K<::D&Pj:(ڕI3^^@2Usur RURtn{ǻP -G}s_ѿEP#RUT"d=}O>gxl|'Hg !H/bcOҋqP^:4vW8JPJh"r4b2†(4]aa]xAW@B ju)5ԥԷFSAeX1~B:GVB;gu# T< E; e07K[-)-7 /ZuwTϣ%3ƥzdcoe#B*m*B]yx{BQ C2^ڠ%BOL/vxEJGgic] \yZRVxװ~,:.Hd@׉5sW+FIm )Bl cvNn>^(75!eE]a0nP=9mUi l$+"tRTcÞ }X2rܮWհaܟ%uy0d1S)9b3 xN4f@9Ɨ8~j?QƸ'/ Ɨ_EU%U:VeH1xN*q7J?0^$G îj2JQv!RZ֑7å׎+qC]JՅJ8<ɸ,aW6֩6\sWy}W(<hp#*MTI|ucT)}4G.X SbhTb[ m%\Ë0tŚ1>"R`|pU:8- I'*Gc7bʸ:k3]H9m+#ײҀAcym4/+bWOҟ!ZOE4qpeٴJzd#ǂynq9|BOa <"́{yLRKƺSCND*ouꛏ͸XV?NvB\z/M>sI8llTFA`4a<* yw_wzUi:غf 5N0blh4ˑaUǠo*Ku<5dkqJv8F|w'naj5fb [&~Eyx`xC#X0~LJLuv"v6R iwC!Jڻ !,!ha:AQ4@4.3U\ҐFRY (-jil3{3ot'Җyx_Gq҅׳l".Y|n]W36Hq];]Fұ06:r$:wqAYゐt9ͩnZϴSxg;}@ۙ6ʅ%)EQʼsYt5skpkYp K,*J+As Dr7-Lu~;2}ŐE t妜=ɞNwS]tcuvp~4l9 3վdE,[:XrXt5̾wX  #ʝ.QSPl1iTpET]h젣3'@j+_u0 cقE,_ĺUiZ PóS(ǿ\B-$K_tH J64-.պm\tٖi8>.ԩ]mTi;Bw~gC;B5:Y9MX: WruaǮBKsuX,NMŢfTF)DtiQ/mųd x.}ON.y]cv\z2hvK.bV.ܸt fekдsaMPV tYe}p2O~3b8TO]=gi=NO~3otq,st;ɉɹ(#âz[UX^vm\\q\q*s˪oI9}Eԍ? uGpģn] $ױ$ihrtQF']b|.DJό!SÇWcG9z9f:zp,{rn4sx_'r[tj\Iᘙ׳Ajl^LxϠY3fDO)BiщKZȾsL_xg{pTW%U*5犵;5.} 7-YǏ-6p M,q=$C1G|ab4. C|;LMnl3&iG &xvؠ9պS,T= )h(ONv|Ohk|=;[?̒B..PQ,"<ܗh>|V?%j۱4U {;ils,g8Kuш$ܪhw=ޅ zi۩*G Q3 +7r\k25_5k}E`fƏDp \0Jkzn9{fC;q(NĢɇO!,p!*-BQDbsf_aD0Iݰxe[gq$xд\o:g _$ Pr|LRڄ~VZzSfB H?^7y?Ƀ1u47~*6p#Z74.PbIHҌw"pPr(FfۛLWYNvh 9C9 \Ο{XWn`قEkZڦT@Ŷ-5r9 1TȢa,\% /pű!KS1vT5|4QD_ʢ* sL֢Psi1|cy@t_{;{ ۏp2qXuKVpuܹr3/s s@MB^)n"cLL0F`fID╊ irvy$YJ;n岦r[va;!1@\JGc-*9AƺE=8sbvpYx%W5?Ɲ+qp7!J w46Os]"j܇ni={;yq#@UqEReyK*,n2;sS^9y0UTBLFsϚ+X0kWE7f ]=\N.|s@!P ͚ˢibzz([8FkW'4΃SK]RA 7AX__M2[|3xJ(`<.Y͚EXdnXɆ+ B/RɅ3mh/uOWa|(Pwvcf6Gt׎ma?Ś1O> NN\OWn}qZaNtNF CV_ }Q-O>m;r7lVr~o(4BlU֣vVwtHWqCDТ98o},e"wG`p˪ _-7iK.*CraTuf]RGsXJ7dZΞ=Ʃg9u 6%E;/؀e"^ $LC]J{wv^x".lbU\fJ_ Vq怆!i ;S; /8ԕj\'EF9˜ #T(6"^o_/GI59G[) y0=0Ϯ<1Y$){f?gbOxD3ćyo0,ܠ/ ?}/ RVpCԥFӟ۸z@&B(B?r!v{/㼢53..ZtAH:\&sCbz8Irsp hhwq%7,b}z6.[W/whլKƛ'%MDmoex5x\.8@7l "LMi-!S8%Xw wD 69r4w(>7{o{ma;h]T&!QL%1!:̞üьj m,:i|t*fI>y^ ;ԇ+v'! N;W5wr ,=aQ?gwWYlR53Aotbx;p _ϣG"QA$'i3X&Ge@0zh?s)L==ttÎPT-r_|#Wl`y0d[I%oHz 9qօA; rDH%^ټjĚ+XP:"_mS y4\.Zݎ'ؾtE w>Tx]^jwᕖxsB=>b߅ys e u,#Ja&@)Sl穽,jxNJ[v]W q &O:A$il cX UyQ'8 ®\e.b8uz| c|Ԕ{tdǞgy5ި񨺸 _]ҜiAZ|ms{H4k]G9} {f}ܾ䯂BUB0 }Q-gNpxD ݜ sbc_Yֆrw/`h_HPN?+TE0c컥4x99Xx[gܾjC(S505.*(s'oȸbFRL.w6i]8ct N/N a;Pt8ޡ^tbwX6iw.[WgShشQƌpaA`?Ǻ:PD}NÜUX;.wA;? W:e~դARC~^<*/ q|yf)$WZsJ{RL38rB*qafęTQb,48®?v^ko )9$HQD5m2:26.l0RVQQDK8JG~<5hpқgĥŮH.Pȴ|(8NtXC=q=;T b8/`ifA!z/R᪥YxJ=eBaᅲ{faw!KյMܱ|=b&4 7uRӸ5k&ㅣgOsإ0@Kch'4 z.rML#W1ʘNzw3]X>oq1/vT|\v{n-Tu`:G<҄zilP#Ez/_߻\'fi\ gNr/(EyEvGNvʹzBU)Өp7wDg8C)/y^> /x`=,Y(~v2I *7Ϯ=ز'&.N"zE(O;@7 ߍC݉~$):Xޓip?9sg_{o8nY&-&Eo$xNvꄖ6v{-ijw2vLuZXH.C*]qTR?KDB`+w!ͬrjd>կ\We#Z-Vr ܹr#t;)<&)vxM:<=|/yy,H!;)1f.dbO BJܹrycPT)< ñ!v/.H%!1Q%!VҖ?xWϧש+<(Tr{vٶi+ۼ WAD]:3`>N9w GŁ8fHw 5G.m|)&0v>''mqPXv3eK)%}G_7^A|E\$Z~y\wOb4֘rRP 7V>i@Ep⁾<g"7 zyu=8/ s8J=9 ./7%ASJ naӆľx}1> 86*RIv}Nf'}~k7(!ٓy!\TSy|+tq'j.~q:DŽc)$fb4 Fb>\!cH3rYalNtO7]~:Bv:mbլa%X|pIp)[ /꣨pc dPڶyyw$ƬrGvɧH}:Oo\~i e+h*9~TLp7eKرIMq8':۹ijl-'{6ynz1TɝR\ %tO'WfJZ:O4C`\+Ȩ`idױꕿ/m8C,ѝQvQ.}sjُwbQZ&5M]ʬ,kh4! Qe(?/Թ8֋c82TiPdyy/G+]̺bӍ늀BZkx ID<}o/7泥@RܖŶD],1 uw̦i1!<{ /^9)}X }Gp8|ؾ[kocˊ۸{-,_ DEPcw`? v+DF3ax}xצJ~gA <*uo\wطFWgƯca!/k'|{dLB~UYk0ZA% *2@ժjm﫯j[E:^*jeHԶ DCdJrd@2p &眜aXk' CrE|%!jIW?G|[LHQ} OUj1.|?oץ;wxlQo_ǽeŎ5(+)I֮'DLq*R:ŚF mR,ޯ0  Orqj&ԝR UU7Wq<}),Uu)xxu1I~ٷHM1>lM,qDg"b\MuUHɨZrT_xKhS-3nEӹG{=5j԰%ݽv&Vl_[ :%4$b43(Ȭ굨}& W+^rwؠ^,#K1j$Y~)/`ܘ7r[.b9s̨ёeu^R*+dJHæk\hILa%)T`rk^ŕ$}n$ȇurSϦl,ZtܱgrocРR F:vTIb w@_^> sbPFmlÓHĮ3-<-~;֒v;sQ %Liia 8Qi9D&{bw IDAT75v<Jd5"9ٷڥU S* ;:_bu:IWǸoKbx^jV"BOK}CHdBsLTj}~1/'ɋ~+ƿbFR`"Vߨ)7)B±8Wn[+J^7 >,IUx? \Zޓܫ4ߝd|(Xa96,p qotIuA{Q] tbC} R%J"87Ck&| / 'W3dkJZy(Z})BJ(FiDT*8W'd󝒮KTQRo[m|n9ؓ/^ʘ >v'ߩEVl[=<³[W"GeFUvgVJ26x~ceѿm"Ʊ)PȈ&$%N[֕O!|Os71\}3_Z_k! en9O>13nje[.cOjN;3PL}%]/ϸ;7!2gf'6fD( [9Ms2Hvh9FXJ556'Wv=O=U+G({@sk^+ mOs^y~>:> w| ]mܷQk3$: l{Ǥ ^W[kuvn(>?{:z>W-U]Εo +pbdG3VVl_3>aw거PpME z+|}F{ل 8Zضmۊ:Hٌ;ٜ͜{Y*,%C X.2}GA%2͢OL5^\†i81 mH֨QCc?@rBKh]9bJhF6\P<~9OlٜL|;s!1ԑIH֌jT&) 㫟O̠+i I{ٱ'%?展K !hP:=8DyQ2Z+I4vHwaYyzQHý(vq?#^·.QG.iUΝP,I2)x.bc6/ke8GP1x6wl;Ow r1:!ac3TcRAؐLɷY8| !G O}zĤl'eMCnCqQIXL+HsBDر%,ظ)،fHj81do[_N:Wn>y{2w"a|HqƲa{K|,,O~}7K_曏V-3(geIPr}ERPNd it{4S;]}]ϼc\<$H( \$Dx>qqx}o\e߼7=_A*G#}}kPD<a{g;-y9/FrȅHsֵmfm|g,Ɵ&ƅ\}> 0QSEt.&MM[ cMBq$)^=&W$ ?~e96 !Q,y~/zGS:X(Ro[=>³[W!j_EkMر"䞨[z7<0Jxe%H}!n*I渹TjHbF4 +sV?=OȭҠ8]q^Q8P6xF*L2&(<0n_Ut3JC@ư8ɌATYU_>؇WB1aE]biHV>)],#Z癇كl#D /h3dRG{T]BfЎuIJkNV1L_6{<Ğxi.( ͫ~2/,߶7 ?Ql Z%Ϳ~k2q%f_ESQ j^΍?緬CЎzQvt䫏|KϸkGsđqriC\A6pN&.Kު5^|E4Cw䖹w1w\s. #~XMk P\mSz GDJhPC $uYp%JjrLv+vn,(ɉ?qYK*N H7Fvp}:QK^U dD=}ֵmdE!^ IX8XiwΟƎjH{G9yU|x^ly1GË~/-sBꆘQjdXz &5W%NQXƚ-̢SK=Vƛ'o[k>F&f6-7<cA1bJ )Ǎgϒ˃P R-K5.$%̗: U hՁAy~D⨆D.ENmefMѱ&R1,3V㮧gOV<^m ѢH:j[4l4kPs;Hr昱NE_Y9UjHTTNXO}eWcQK( vD]PdrTL|q|K~y:Lr1y6 ;񻊆JQ=Tk{!GU,߶_)8ፔ@M}5,*H4҉n9SMeWo/FVQ3D+(G^쀹>SSĄ:_NcLɗtBePO5j&HK1o[ōM/_F%E 9Y#)gNjPh\S WG8b<]}|;Y*S0j C SA &JBQ`0&c%NQF#tžx=|{7t}~!"*"գ!Aaİ?u.^~+y>~՜2BŠ k}2AQ ^=0;(!%f/͜º-XxHVB%7vc8vԑ|k7fl8^G$ƄWd$0k * ?DAŖ+<˶AW_?jL w}K(f" –@ X6[}G@1Rk~]a)[HW_w4+P Z ȰƁsu>qKRVZCxmm e[Ŝ @,UcyPuͤ 'x>lFsDaz j*>(u`$Tg<̔;Rnŧg<0Vmm"1]L#B,BnbAC`^9aE]*H14btx$A+;{46vl#dz@qJni ;U ȎB} Gy4_곌;!Nǩ|wr"Tv 4f,hȣc0>v㰠բ;?fgQ ȽZ S>vjxzS(z$< k^ʤnr)r̨H@R E`QO#c mOHK!ulkKmA{5JcQOosL3US@"_|CE:Ry-BR4K +7/H` (Z9 lPU O[V?v铽FN(=O?esM&-8ruVyX#4E`HؖԸi7XM#Lj5%8vhrյs*a#~ j43 HK Ywt_QL7Fڻ82ȩ2 ETF-^@\Զk:_|ߧ`9e#ܷQ~GJT&)N|(z/\\T7ҋ7-s-O ءjP!3}KP!6@2ZBNP1x[0۶ lt "\o@tR {!eM+l[wXӂ7'P 7?Ws.n6Cޓp<ʴs߅C.I8&CEɔx/li*t"?ڨl7v6oA0X58 c&Sfx|!4I GWo>~,82)tk̇a<'f FBq=o?\jO2 9bo,ֹX Lxv6}Xi5dS& Z?xN:>Xd,`\;S\ryqJ(J1l=O+x(A\*ʨSN!3vM# 5ƫ& r1Ws 74\jdܹ^8|rkԨQWAP 3Z1KC1ހS+s&(R=C^A5}G< PyxQ~ɳ79dU D"0}苟W|OTaozc Xih*Hj @SL wr\3SrTÈa /xa57ŋmatXϱXGԢa=xiB ބH!×fMaA<"a 1xMEs@ʸ1cfŸ0n̩ `)nn~֫Fm4f]|x_!$腼IV'A٬)ԬԆ&)hžIn?\6ͿԱXx?sV.y4"&4$q=l .ozwҁl7i歬ol!Gkl 7OퟺqcXhTؤ3ٛ|R^mulI[bz?~x !lfMHK *N%ƶ_%2/E$RQL$u&2o\76&H ͲHDt\?Nn\ ZGux8\8s¨&M +sRp%7CEN{mZ^yjCt0.K*"xor5UBqP}N| wMhP|#]>z>u۱,8P IЍ[w:6}Jt$^B{WC9`&)6LnO=(|Fk+v}%ؘL'<-͗fBOߞ\} b@(BW_̻f(eRU$6js6~Hk>{S%ҷwWW>MV~HM_6w?Wf-}+ Je W4*#WP)FV|cOowa,5Wsi}\] /w0iul/NפBU1jʇ*H-"24| '&4 gSx^殲wwi}K~ Z;b+\M|ٚ{ \TS}~)QV~u]ڎMQP46U[qC>ge3?8ʗ1hn Y#ɼz!D®ny :ԸSb*&zOOCX_cMWmzW_'_{1]}$/j]AB !YjA6:=3IqX bH72i]0٩PzMCE,قn`2 c/XlCSHk^ oFpjRKd 11ٟxDIzV%`l+qR!tfU'ש\M3bnd`|w#šޫcyp':<}BD`T^(ԧT1U)x&ϝʜ N zaS >x\tXUYhS03obEQ˖(JFPHLx& 7ϻ+'+>t5A hu!R3S!=%*I3idּeFaVT@!E'^7~mul%7aDT" )NmH^=}?@jCH : {'}s1935w1w3T2Ȋ5صqZ==|ql{aN-4F| \mlM:GqFPu]Cj慤df]VMtq5 DSôIogέAKT֭ij5jtx~3:RxZkzxSϚ|pEo~?|a̳R.Ťim 5M(bxrqX3O8_p=4*Yżp; !#q:[h"Q3ne:%~jqՒtkc3ϸ,oma 24ܻ_ϣ WDƫ^}yw kԨѴhdbicm60~1~2Fyp^7rt,)VCAGm(' =RUɰ"HYb`n,QCW_7>!l|hx"|UϜ sO'޴`8\Pdcd5*@V^ ܩⳘ3º-\7}2]}=4Awцr]  k+sWđ 4*;5jP@4){8s :dCo J3>Þ>-7>Jl(5)rQx\$p*˷̝ʼU H`Ml3hA2೜oV(&YqTccJes&E #@aYU1# ٹzy DZ`\IѼB瞮nx6z{B!Zf*$Fu} 7!r&j%)1/Qcè߄dn1ξHP9k Oдk m^LȊM鷱cSXc6BS L;I\XMv:7HJ7,%*Aۤ6@k/H욳z>̻+mu9\JiV_?hRTC0obniAlhn@MuCh:wrǂB~܇ }PpF60`c%LP47λX~q.4s ÿETn)b)UZc1'N(rpZ˷e,^,y$}Cea}6q}Ž$U01BMx\WZ< ֩ZӐ%4f=tZڰKƝ?G`c>8za Nfq4#3 < >ǘ,tq.o6^[ndz9fԑMx APO.(urìY&2cq( 1 8f6=ecь{OC"=ok.Tׅ,Ԕ*M`zj60>z1^ ͪq_Q+,4 ]4t=`0X%Һfb70RAI! orTL,sVsq턏>6FyB ihnX3idzًz_jF*BP!!r0jYt>BeX3-CKڎm1 c+}(t0i6oHڏ8CP;о`M ۉ3ǟFzA }˻{>^w)a\CèarcV,;ycBA z%7&t %kS+YsbՑK,`NBuhN (+O$HM2&BuV35j49jݠF.(8^8EKϸ Ll(+*AE"AT^TNx#G:v&nj:N<'ZHQ)]CsO;N;>%1c<Hk a cLlY}#DbB$' 484_CT@Y8ȱ˒ãqꡤxI==v]5y!'7F!pҨWOh>|-uY=[~T3\wTfX  o<9x+]?[GMTX}vDՒjԨff8 I/U1`IJhݳtpGUE8"o|4Kj,st&gpq9y|t*ig:#W)9'q̨~KƝ7w:;Q5aYCw_MLQP,jBIVFԩ@/]ZgOѺf@FˆuWSj"0Vt%M#Qlxo2#LZ4C&J6d#d11!,Z(]X[Xu57=&OQJy77̸0X N2_ۇ^ ]}]0Vul OcZ%X[!6D){%eT42bERL.4lk8DPDUcw;wT?È yAY H5LW + v2yQGTjS-bKR W$OEP%N|d6AKPI?PǴe$fcf&eqEnk $cʜqGXԧ]c"DAN`,wlz&'>qq(}}}\76o^n@c!J#WW'fƆ-$E@2 ['59 yŧKRQa'QGl*1钪xyzk7bFh&ovRSUvN95_欙9SQL'&.9GOC645B bN ]'M/`s_r̈́O!b?Xp96;i{ݪmږ eʿ/Ć5~ȹ`Ry~[⌓:>q'c97kw@%i@PZIx~z =5eGAGVa@ ո/t2{z{i/ kx04v,WVzn|t~S5]=jܡYӀyM  Seڲ| rT+Da??RtA ۘÐ !*2G+ \5!b1#MRjH b=׮d4ڴ* D%h]4k|«z'!11uXzw&\=}ui}dUOu裺^T7<Fg? |7f|[B"]GIrx16QMvŸ0w4.GxTvRQơiӶIi|.\YAYow\;,;آ %C+]X18>b4Lz8`yh}(ErON Ұoh^);Z&Xo ?^b_}{C8P;kĝDE\kl/"<SsKrEb >޻'W{u''>A'^,v},U 9YL<-8Ew:ʁaL kf'ر= UX}10S=XGT)jAd>dKYQcP aĎ1>_,>x)*D Nf-.qtDdxb;Fu1 r@^H+W1YNxu981qi9,F'qW|q@O?XS>sz菰FWERQzA+*qYK15>ظyi\&/΅ٟ(P/EK37r& M l$_DTv{ cX(TyB1@r1K=Oe܉o/1Xz)+pbZ,_Q*1R&Y;5*ِh@ɷYV.64G"dNFP-XTk?x;"r{kԨqAA53e?;udCB.1< E IDATu`^")wHSz[ **ź5j! m۸o/J2p|60J!{{[p1>*-U {(z"qϱqy^}žoE1Wļ|0}IeOMMg)ǞpjJ%$2;uگk|H7lEKش!B}%'*DӝbE""?h" {Z7cM FÍp7Hh0\t,E[xS91x%'ҁiQcMfWe[y)Eᴥdc35jU5F*6q#KC_C;HdUAġ*|ymtta,78 *yLJ(/EɗWcIZN1X&ϙW>r"o>~lD<<ǘ}Vesytb0Sm@ A&&qO4L`A)XfOW_f|u/oŪG5iƄ"lP9FpG\0v|5Ԓ_jJP-8v1K$$҃$i?ػ`gwl%<DZ)pԨQƾ!xgɃ\2\ƝpZUGhH!(mjWj(KoMDUJWSR5^]z_I|<! 1^:{*X8ÇsUf^LE興!Iz`$2cST:4V257^ Fŧ1&A kFuŠmkERj8Ã/gȢ]=L3.2cD0TCӈkkkl@fϱ"X)-He{ QxBgE\d>t'rtHUob Be-@ˀmĺ-,ݶ⏙Svm:Q/DJud5BfG1H0 G*< M?Ѩ wl :<ή}NP@*I42$l@A66$ |2`wwlc_<;~?LCu'`lj ch64OTks- $B"ݺ3}^oX$umϨ,wnƍރ-&.! D)dcb#>n]3崚gdž/GEV#`-ˉkl ʩH5<Ʀ6[m#+T5_@2}_Zy.DEPB@"9c0Dl;ӡ.ְ1ub+e_v^L Z'x"sUE)IB냒G5t ap!ٺUrhk㗷z6_3mؐ[ϞXDBKTxxRǡjr5`E"BP!d-1RfFFk\]I 5:82 u݇ҫ6ř}̅RFqHNBzX;U<|~i%y'!Ļ{5Q>6Z#Murx>*f!LRvqE7el9?18)c'3:ŬSȭ\]r^c^\P:V>~BW|R bEJ6(M?ޕ4hDǒAQ3-vP]~Sz ToӒZ%_5 !oEJ&Tj}dYU^`˩@.y,\h$I݂mYVk$yVSW3U} C|FC rj3kIP#0APՖm!|/ptZooؿhɁRF VřkxK5 bm#ʒ2e̤+!VJnZx;Zj _IĿFRU?EhjO9z%ijP5%ȁ^hs$ZhJ%vkr0ЍD/"h"O'T(}EqxI/JљB|JZŘ[q>qu"UUٛ uCP_#psxּU=iinD$CBQOX<zQ B%B}G}`-F 7Wj͡pA nW|1݅bm)9P*9H5@dݺӷ%gǽ-+;|f$jISB$C|؋RsO܃xֱZW6pX'dYER|n?+[{T%ڝsmw%~r6,"9WlhP锒F"`JtI^PC$Ps?O]]>DB}Rj?\//"Aϋ 3b ;-"5Cԍ5A}p?/m_%fĭSَT#W3 yuIT,Z&H˘=ptf2v<ήr05U"h!B,%,kRGIUqq'eӢxwcgg}SG[~-Hcw .4K[ډ>EL -]P%%eVph(*AQ7 B썃0Ps&/$UvXU^YRR6?Xawt`~>w! l F*vyFysH#8y`,Eb[$ 7G9iL @ZP+`im@9:x߿ꎢښYR22PR6_L`J.re\B oOHL:KC}~ [J52 \R ͙Zh $* S)s8kDOoXb@Q|:*vɒ\sjG}12ٺg'5.ї$0,"Ijn&uD:A]{ydR_4ępy,'\׿3_!b46fR&5xnD9lw7ٴo+#S@pF,GBhPWn<=cchFS?*sTÙq# j0~HEfw^x kO좡PmcE2%łjbq\l9r2&7q|ZAWNpIL9(odQo~ WNeڹV0T4HN,"!Uƞ{,<ؽBX8\#+TBN- vxRF ~e#ku]wNR;f(Ur2U[-[hwp6)eI?zt<6MEN3\>V~y6ơG A8QLwcbBX8Gw1mdF#7"ɺJɸû`k3ϾJc^.D[Ғr3F [ھ<rů`?{o< /zU$VN*$H *Hwjg6-YR@lGZÓ%Ok?[Y[,5&:!2"(Ә5c&26pl<]Պky~ֽWJ;Ȩ5@d1ak2K7G11<4'U?fhB-3XE)[l*LZQ™!p GsզJihc}qcZ %rZhBA]yK4|_n:NK2,ƔEŤ|9x N"ţ؞-߮ZxcڔgcrŇ<|$^U =9ftMcH&Q\9#˜s,/j 4F_9}7Z8ƽ[ٲoxg3%2'b-eΚ*,ꏅ=+ڋÔq H`Ub d^=yӻ&r1L7 )jz *BN`>vc< xy$)!hY M4Ef F#)Oo^o9L爑=L%dQnYS^t "U7I71 l/NverJg[x_JgA,UI'Ş1VQC=BV,={/$XbݕUkqTcŎ5 empI!!?]K&T݂'8E|BC@0J/G4j :JF1mD.0Qcwv;uι*hU;6l޿Gxi&6BSK}Ո r&- N4FļꝈTTQD] jPHr.wvUh/o%^o;!hٍ?OYs1Tk~+Ǻ2=kwE8/%8;Y % ]0[3i_6I/'_1[yAkJ>+UXZ$>Rf'ZcSP|RQƵd !Pdo= >`[<GƠZխq.bPt2c'qAgOF.x7 {]o?yx Q XH*zsuv; {ʨ ?T;K6Z(b9Yy 7*P[xЌc'=os\U߱EkptSSi )?31vtFaCFw1\#:{vr`qVֿCukǓSi0[92OdTu(FѼ=+9gDO(Z%%pO4V:Ί8X֓ɸh8d@TE *p=gᒉI'2myظg}?{^Wxqۆ.8]ƻhg HpyiPM_-PU4s22qрPL~œg2adc"׌'NTv`w6݉w?0[9Gx%%F'-Y ]vSNU'_6\6a\!fOouMuMfՓyp#6RPjNCru% ;?ϣ}D=ɅW %Hvlg=6FM%C$Þ=18T݀7x -ȀӸ{Zalٷ⎍lڳ _-ٯ{uOkW^ćTtu.lL^ڵm9/n[Ϟx Y +J;B"!Hk ';]BNpSoSHx=}wL9YOHf )?y;૖$p,(Z:qs+"/?;`@G^ڹ+/=o~$I^x=_y@pxd; j2P'1ks&_GQp7}/y7w=0.$CP#9`QSnuA VSIIwRdHkxnK 'S,V 4Ak)58IH t˘={.%'+ DH6qiˬܵҶu͟*& Z#d"ޒjM}HvWYD$3+^Fp sȶ' J c'>M{{;+j3y Y& 9*0BLy7 u9(s_ bjy6l2NrjjtE1bC⚘ P  nu\:bdҩTY|]2MqXrs\v)s&NkXBLگ|IxL< 1¶B.GWpSߧo(Jn$Q~'90‰>v*,ϕ:wy/|.~2- Om$#A%P4 G o-8w"BUeE^qe,73gi!<I|Ʋ{S&3f\7p]kyn*vP gȇg('Q<8IO) jhJ8A +bE!8)x#PCq*TofXF^T\6 GBl2:pʳVg>ƜvD6}c*.%ghuUa`TWJWX INJbKmUQ̦W:B/5$*V2%eR bs9P{>w}wu3rHv=}*Zx=a40"&^yG>F}IkxUbu*9m 7ѣY2{1WG[ٗ"xl]!!ޥwMA(fTI ga\͸{x|OظJ"i^TL]S Gk-(ٝGtDz`e,r+Kkw'WX{wx{_&s j^LwE$ ~} u8eCB9קg Udw]ǯrs~J2ywHtuɫ0wtɡ &30BTp{vxa:NQTĪ:14MBLGsky׾r?A fkAT\ !!Q!6t3m$ .eɻ1܉qqYVNČ6G Nę͗C!n\[V܆%2L 2PJK-OF%^f|8cuI$/W4Zx`d;/8εR>`#ŒqŜ6R/KT /l5)c&}'[FJԙ",Pg*z >82ܷqn6+T=*wx M2?%׋6D(Q G䲅0t5z( RDδRK< '`v n&Vn[w+&$3hSY!-xrr+@NFe/w&)!=P![ih =mX.MyeX\bd Y"!XjV5+>d+'ŃS?v"4Z;'3=!!c,.uOc.AU)V(vɿ4P `A\z)Oyo=u}}1b-k rf٦U̿7.JU7P̅HvPT^|'?@GU Dc sLYՔ% Alm%"Ӂμg^ s5v~HM'2[ ca,$"1E]N)Qvzg--/ֵ4>c9|~-teҾbӵ摫 Bf1&^(UEoȢ⒑"^OغQkmL0 m1*n@ͺ8_xj *_gG EMHqxi#XqX_.86"n\|?~t7Hv$72~sL '<n:*'@a2& Y+@=L>[\y.wvU滴3=|rsp?v噍+Xm}H!E,c^C>FHޑk!JL$/\)Hߦm(~dXLͯûdMe>L>Tpd{K-+q΢#٬#MdaH\tx.1)gb4=KUBj@ <][9qVvg;}]_lnlgg|*UAUrdTn=s5.? ) C{}Ar%H)`wcs>Mײ] }`C`LX BIY:7Ala3#K䠆<]ͼ3/bI邤FLc⳻vV\w;6=3c*/ s\ +}S$#JBkՂQ٫LEw?\2~flZiTlȵ@i1fWc{A+U==-Qj#N.X'DKJYξ=FKmD4΋ G`)JAC ɢrM Aҙ`ɜŌj0]eq`Л<) g;^g(_t/CXi~;ס!Zj]1*Q9u"~#Upټ+j R(pC_ T2>8*Xx3J3%e[hᭁJf*^<25 B$㬶vn% hU&s152ٛ U:(RDy[t7_y=߳tʢh\NŢBKVkІϩ=W>4p~f_;l$cO^3H$d4GB*LmcbOB3/uʻ?M}ldbDBK8N;q:ݟe3waV`dP`q-Z;NMP,4eD>uǹB5+RDJVMS IM̪E=x5]ɣ?)wvj !5>@3ᘢ T3G5K3**kAz{S01\@%CBZ=\32θc'P%s(>T@7ͮ+UmD,=/W(gF"`$L/oZ)sb,q@1>iIG=kh 0bu[5.ll|* UUoqU5iࣗ]3/R*#EƲP</-Z*iFY,>E3s{>βM+gxqX#NuZlI,f:ҐINݲo@\!@ |5/xUۆ QREH@S +(8?ʜP&B-Ķ.މ|qVo_"~uE*P##s&఼)E"'RΠ)KV T'gIq>)wuBl1u431mL7_a ܚ.})$R xag!#3u%èQ(r=Pqr~gfڏ(K7+x~JǑs,ͨDVb-T7+ZrvR(/uVm_O3<!]s \vxN9 ۤL qPsEH *rz"18Ć[yh+=*:vraF,O\uvޥœصz)bHo9jF[Tj!&z@XU;ֳ|zI2A=W"H]>j= `9]bEWՓ +otEO.k=>+UKފQ@8t7_~ybB>VtMV,&&G%Nro M3?mYzͪJp^b\4qFyp舭Zw,&YW-P vbuP4W}K,22F1UByX)i2P};6~}g]Wmܳ _%'r%^ p3bA& <56j>DBnVk$*AܱtY-] g<W#F1k"QbjIA "eڪ\|S,ۼ?{NYC Qoh*u8SEzX0m65˨#6AEJ kCųxv=[Vwpcq!%y-)}&bd;w/ mvp^e4]_thFp5< QqړiDdt(.8YML٩*=IzuE~*leΗزg'!3U95@ZLW cM>#yF!Eg|3tbIr"8:Nub؜IIO__͟ 5$W?/0;,+'WQi̊\ #FVT` S89 J.˂)sX0uʲMx~Ӌ8ڏJ 'x=zNnm N6I[RqAOA\bN5CB<>NR9Iplæ}3cqBRNT ;Ua6ƁZ?/Wup?;WmXUTm;l:R#iMގ'!w NW?cH,V-UZxgQ.'pJ=\tNM D@"5NabnwyG}?,%%R :Y]<˶=j̀uvBa۟_4cՎܵ!\ψ%Zfg>Ϲ9C5Ȫƪɂ՛W6__Ɇ}N%MZ,+z7ƥfˊ<#|^lW`qz*VC)c&%aT{ 2L%uc yu}vnLlYgvL"8K}ciVT3&XEq( _mJ.YDBPͼI{ld9Sϛ:gs:WakO]AA$V9';z}{j N]EknUO/<Α#@ "׋XDSj緷$~?g;@lhɘ<MAXFAq^t+P!~pdj;$B/.-^!"jY$I$c$(8bЭLTß|lڷjUscAA(4hC\T`G~Vm[KaC~'o[3E6| ĖɆQ. YWk>FGh=X̉|[YIHS+}. }׎R%s /RpfB㳜yCYIg [#&'T犺QJpcmΟ=wj!Jze>5V|d? 2 KPN#vǯtvQ趧"R$Bz W.$ @?-ok~b+ầqhs&'>zUrę'P?!U(mȚlS7^qټ w4*=ZRT>S&r[v,JN.sZJyHy+}IM8͋(pc,]ܻ́I]$G2`-U᝚~ o#lw7ٴo+#S##Y1"YPQkqݫXxgZ1M7\G PNϞ#Q̟z ..Yagdo2Di=W0^^Ya9CW%xumvm =XiFFɔG?ȳFR!P{x̤P5lOQ`?<`F|-+{ o-HR'M5x+wlhVl0c܅̻x%?ѳs{ uhkEf"Vd?g{zmZawVxM2g>N*d 2P$%_e#D;PAi0\"`Q IDAT5!JOxgHU|Zċkʕqף*¥G.O%$[Z`N} tUrgBТ&>h~%~ubBPL[81GWws>:Ն vYpqx(}GoGZ욚f|=7wmVkSaT|zHşYjIKq_|᾿`A2g!K\KgcvV<qd i(HS!R9N%%=7b:vb~.p{*tg`Ůu\2qj[x#:#vbTi}h)Hnc]5x)eZ#C>*Sш:_DJ@)UzRrGxM 欲_o;5B1e?߷K2" >?ƍD-C:2kWE5X2cysGXIVc bh6߉3|* ;̟:rUgޗypSsFrhSĮJTnq&|QgVa,)bb)=-yP7t´s&o?|oॆ!Pc΍CXk^og ygFHydk+ЦPf"YWmM#iYT2U#Iq2/ۘ%3=bW-/KϲzF)wH8KL0hO:BAu56>@&%8f;wMە,G|"~8LIS@ANK[7Py'3?<&$8I  ˟7H%Xg֓tPl*v淚vjx2qZx\6+4*0w9|f\5en@/yT)Z*źqbT/v,:^eR,IsNɯ/QF2~rhV^d%㍯Δ"9wz&7?pH5ȃCњlhWTv)v|XiKh5:rK;i^Ӹ]1|̼ E1;NWKmR[TvÕSLQHnxbn}"b8+YhROA;SyƢ;NI4^"N>[½@)MDՔ\ %g5r/|"X}[ox~s iACLVv6ۯf-(b)Ɣd^)+J|P$#Vopd-V#ώYWw=#kAoS)}oDUHq5Fi("Eqݬe˜AZjRȂF+?Ē9Ij@Hb ƛ6vO7tF ߷gd~g-|z-kqf$ K[]`ycsgp?y P<Wh$kzgRvl$&̈7+J"cGo#rՍ_DOW`Alnd@ ;!ЛCplFR\[ 'q*VClٻ} no[o%q\,V30CZu+ \Y&4V gvZc[H\ ќ,`U-BS<)8sTpu%JZg%&xbU@ӻ&>KGۈx}ŅM}X_ Ya4})0w,?W>{Wk͐0T3흇p[2XaM;/x;YgA*HCߡcx8HFg˩*$>αo} 0T eKRNᷯT`v"ѐAFxBΒ+Y}U ;\;LI7.H-2\y{57}IJ\:J+B/L_'9p:ƖjWAF&C9:Q>˴&k(uw81ym9;f,=lܷ "\Qg'^P _/ټ+)Fjr~NM( WxF1rSnڙ>;ȯPNs]<%sUxЀu~1/֓,4q *hTOq<\;=) jL%r ̦ m8xη.X =ܢ_0}xEb}UtHEWo ׽ dѴV7<@H)Τ%%l"%OEَ[VJ%v:)K]J%UIJr\vgU3UՕؒ]dNA8JLݳ}νhx֢w߹cU:?O5۞*5qG;|Ҹ gKR/=QGFT@F2ӱC!+^{ϣ|4ܧoV?L2%KUA/uٸZ^0Z{2V/aL7tꌬ/>e.2ZB7>ՐC$ZB?NdV Ð-f ?ۗ?>UvRYʒ77.ekh`W8淿ev؇`v\k50n~=T`cN<[;<^lXvgc߰9QN!4[3Z] V?1Mm".=j+2W]wulXz//^ E]¹<{hw !c?A<q)-@lN~mYӟ$kK Y>r{yfsb/ԩSmncԿ!& A$54[]dsQOh%k7Eۉujtɘ9)v~zgZw:۰8r ͿBԍ+!{d^*46]4yf@$ՑIMn 0P<vKOM~_䗿?οS_xnl/bf!? y^؝|]/$&Q-PjԔ "<~ϰaٝ%*U1D8I3 Ӧ79 /ɡ.լ;bTYf_r'Jh %Yў{FwyVA[L{쇯Ñ7²Yu#O0xbZh)ȘZ=ǽ٘UhG< dkY˭,ȉݒ$*T@Nl~ fL9(|!3[XXڳ=IL~: -N~]]9rDSV\ԓ:G-W[;w[:)vr Ã~m.N7ºh-GR! 8IkChkvԏ̞Bw'g^}!QQU~QV)ϘK۟ӴTYl J]xu}YXeGдN=ʂsӥ"[GC+s•|ɿik˒_5\ʢ#+TiS[9}i:Z:%՜OOy)\'b8?1˕}j#L 8cvjZm~yMTr%[@Jwћx`=x#f@8~-䕃;9rΛbf MTM^~֫~ VFl9Bԡ2xcfv\òk7aS=[gN!e8FdoI?x+8N|֤(Wgݢ|ɿjʁ>*cf1'|+_bkI"+A "w)uutNʿxϳoilАklE^<>꠳In뚛_h [E-w۞˕˚k̿jSZOmiADӠU\óhsz^͟*ƣ`?n+ڕp9#*쬛b$")'7$9 \RHʉPHU`QٻyD zR~?+k?o^; lS'.IΤ2x%LEDMDT'US*CBa[ȥGhom9$bo~2B|${p#[%i dOYs[;܃ TrIƣwv*69C*4o &myfwj#!u}dͻ"B] ۉD!D;*UeYgBSCgyj EfhkY:% |dJ;/Yo8]8w<)!d uZŧ}kq>cqbYX|g0 7p ОqY=_L vlgry~_bωx;OJP#mRp{u*OQu+THo~;f!>n5%Y8;COg8)*=9Syqf%ryqwGJt3w]?kCID)]fV9粳XDÑ9eZHP\v27| 4iP"Mjf9@{ϋ{_G*}(p1xQo#(j^!h &Om~ШˮՆ  ] uLsQ7jfvǚ;2@]׭ 6S<]?oZ(:Rۣgzh9Ŕ^~;;C+Ϳ=2\G<|wi23'/* {6["}OhvǭPQeHkH !'ߴ/G}BBdu2 kxᛛن q$3$M/uJꙵD$% I(`QgLXov' [<ξL ϕ1VI MϘO %IG%,K)&V={luEŤIł} DU8  ':ЪsGRT m~B]L38@=(/luf+`]P^4[>n4)=XB |m4Kkh<UsrǼeu ŝH%ymd#?zdgoj۱fCU;>r{:5 ?6[xBܶ;Xݷ yd=5M5}[}?ub<AI[Q\ @pX+,ϜݗD=/c7@$. 4شj`-fw*Q.lb\x=zR][Bdsj(Lsݶ;7̞:ǻ:ΧHRC%;:[4 \]\"=ǁe"S;׌?>ϸsGΔKpÉX|&kls=*5=:K[RJ#NH#3g44aw"NbSMP118\n%+e8T)^m_A %֒ OeG;< Y8$jR4r>Q| ÕDMGnUo[L&vJLrw+Th6Hetx#VuKgߍ ڪQ:i&<\4 5z$ VoHGk[ipd*>\?+VVZ8Kk{iP=v Nl: HZ__T%wybMb'h,eٳ|a׵m=+}^υ W_"{_?9m;x*lg Y}:KfZR:;O*T(3zgf ˑdc5/I3#o|7e"Dew~='ۓ15w6Bvg`eJ^=w.YeSSk"]b ׆{wʟxG ,8`cMoZ_eOwQfsx_ jwza57 ռ%\ԓo}oXsjY*ەu#%4.BEj-41hHj/87./yw;Syeṃp}~p`7gHB(MQNqvvqn)ɄS6=}7BHkv! [GKЮ^g}$!JilWDM5ZDHu;W96L8()HSGi_v/ ;⑬x7NP# ҬRJT߸!Zev͆)xc2PP,. -e A.evgw)Y  DS^=gx'fhC D{a;yCl P⭖%'RDHJjҸ(|_bYѻ:^9g+~ĶfbQw/O1fuvά3+'x|o+ٽ:aŖLn5sɚKUircv4a(>js&$Yˬl~yu \W‡ B츅ӻ: j|CRϞ:|8ɭKJӰA~dgsǓ>w e+D]Xģu;}ME[dž%k Vd+38v9b7f{})&Sa).̛սK$HU!{̾8ɕ%.AeDaS_s@< eÕ@He-?7>xCBj#wW1>%\~j7EYѿ8I_[UskvH( IM^AҠ'J5@]<)WkxlF,[I4iJȢ_2-凲xY˓Da6fqc<< 5aO-Kf3˳S|_fF6a Y3{I3Hx'59--.dV >6_̕heAFT6MDwd,! ApŨ1?KKC49i,(O}'ej+8cOHӋ {bZG~~y>x@]BGK'2n1H+{?G^JҰ"Z /<")~| dDnDR"pYy=V!VoKsffB͹η=gE=9]L׋h7Ѱel>D1Yj帋+ J3+Gl] #% ٵ )}cE"TS4R Pء6ev Ύ %A'׾cbb*),nņeo %ALB"}Jd{f4HvHʑֿM %u5tteH j͏0b[vH##4 򡝅BP'/DVmS&pC0%HL xN:q g [ IX/q]$Sj0 ʋzȼ:뙥p=kBXT`͒;45JW6w1s|cAX, U1Y5M;nnрxK"z-:ߡ 3RHR *4 mAI({}\;ĮSVX`m=ÔB3 K|+t:pr]8V*PM3+נE3)]MxPsI{O:dU6nŎ@Beʦ]i9"PSޱnN T N4R#)/᷾'|zD#д3kO 5޸8Wyeòu<1:ZڀpWKoz> ?K4{x# Y\ ړiY9^$Sq6=4ƪ~GmEZaW`Pr*Ȋ#Q*ڸk* O-jT(/lw9rsς7R[ }i,+^L`AyS#s(?*{u_ԑ GT@F@\ }HFr?jV!^j4obQ&n0E1[=qs RPO/i`*cwɥ ,);GϝJ֩Yh,2 owzRuĕi¥8w+c+hM܊((¬.c׉7ڑ;W~)"x7xDNBM;O;"Ջ$c=<~GQ¼IeEE {P't|yJrk(xs`}Ll 6Qj%(x.mnb?x1sWj1 eZuvΧ `Q<Ǫ,KGԐ$kA$[z- gj qi>gLr VKPp70]8$Cld i<.'dtऎxy`+Fq ZCBJd{PžCQj>-eSRٷtbR{w* j%?,ҺwI"A;7='H4͔b مŕsP,+O%%\OqN1;`UR!Si&KvԳAV/ r gJ^L§ZV݇Y.y$ ^qNHźb4Q8 An(*q"$YTSg ;IZ{^Pᦃ1U!UeŜEȠAUeUeU[R_jbm9 dU>.BwG8s<~8hceRkJ|/½ V"lg  +zV- @gdpe$&jN;qyÿTTLzafwJ/ePC$)).EU1rO4EC$8gxM͋zƂsƼse&] Q(G Hl]q.sy CU**-x)"RXFB!)d[n[ͷv||9とB*wӡm$i+"1I cкLx_B%,Q'ܳ`u/&+Ln8`ò5 PC^)5$8 9Sk" dDضg׫magp=L,6ٮWE=sU6g\P]rd5`Ån7 Mӂ5}m-9k`록TB!40E3f? ! Ey+fx:jެQ c ^Ԣr:!фGz'!˪P[\cu0lQafG>u իDBӯDhTTU?` vE2B$@2y9qʠXp$;H`yѻoW#fhǯhp^q { 9%,*F\50ʑS!v䕃;9|rc^Swþ }Z:CSԧ,K7ښ&#T,]F\v1c$! oIT Yݬo6e Ĝs :Dx3$5{7dx:޽>c@TkR~^b#yAgyv8x@)NzHw- XϤ(4 vbY5лv6zQK \ R d9urz,ݛ/p3[[q{bn1LjT҉u\ZaD]-p7:ϓ۞Uoϒ- ;j8*p~fǂdE.Yl`T  k.-x[] vO4%*qbv0Jj~B~ݡq|pGb;̈́BG=g AR:3ҷ!sI#Lђ`̾Ȱ'} IDATCv!ng&O|xT(0U+af5,atQp~StvZB"b\L.Sǣ>ޒ=?őTd P3Hj,D7.nFh"4X1gGOH ީ~LxșC?cz<(":3h %PP.W8)3V[_ySa$1wiy9&&}aT;L$JCg8J laϑ'xy`7~+{9|j0{c $*Ʊf/P,\8Ih='$^pR3Yx^'!\ ݔQiTgCA3+C"~{,$40 ㅨ>Y."8uIDHTL H K6rJ̛ X{8.za凒"$/Noi<.O$Xar; ʷG=:ٜXys]fXѻDSK iVTPNIbѐ訚 Ȅ zRO`T[nǣxgu6E,1nw"Ixxi*T *v37,֒u.[(^VYe꠺^xhN!J[Խ6 l˕Mwt߷)Y5{1B"iCX.b>` fKRpQpuη gYSR1u$M&}:l$_a`22!W ׃|E5 'ivDˌ jIT Ʃ$MOvBJ~De&No ~Ҩ`l!*gX 0"CgҖ=n |6 o`ClXƪ9K vɔҺ˜é4L2Z%jFh Eԛ,&!'M<}"U#fb޶!}.YQ3"3SVhJPsvL(Me[ ;suZ:=ѡLiuo6~Aޱ6pPS7 ?:TFe(_ ym6f 5x+ku'>utjI~K]cOM?uTH$؋1&Q p5kxJ,^>o=fD,s1º)i~` Y#ekn6,+`G*K JT[\b 1d&hJS?Kׄ_:8L )ڐL!yϢuԑ (*6?N@Tus,zhG2<\1|хDalǸ.`Ru:d{L 3Lyy,.y/7"X- ?yd~a_nTYlY> Wmg.e6 lL]cj; jRϊyKiMEȖKnQP ̞ŚH AEPm{O̵O ](sf{.O@IF Lv^jĩ$ u$N|hkGcUj,Aqx$q1'ZDҔZ8^PJPFbV-;plV-~¤FqE:;N!qb)?>i@GQHUIAV-͒^EɛcNN>Ǭ'ZTa!LC CڪNh3=šJgQB٠$xSUY9oI1 fꮐGPSzp'+6KGxA bFyeW+5'Y(fˊCUH 6`&Vr ĉ@ ~m~%*2ه۟Wa=-1dj|Zh{$q(/ T#dT5a](][$9w)vkg SߌDi<^ڦKo_~Ws_!Ru,˝YM99z3K⁉3 ™S2f*ϼF_nja$bƈEk濁'=R9^p)~p [ o-nc%[K:-( a>g{ѹ صo V׿5P&}/|qH ODx`ZwMiu|I,)G W/YdhglfcdU`Ydo^w/38|aam,ì[QV}{3xPB=!ZL^JpH׾Bsp~΂)ġ* o!T}Kn #*%"^qWY:KsdQ r ݟg& !A1vW'½@* @@˜b0aԆJv"^io blnlpPnhoj߄$bL+!5|}l]ά bs~(]\C)y_}olh V5UD :7 <ͮ7{Pz`gZj3@,hUeF.QlPaA$ݦ*tN|E]s'BSbURU*NK2 s}$4ދNUaTzx\\ˆXom6ĎBD~p)W71:S|6\4Q;Ȯc(޿uȪ9YӟO"oqNq44=dήp-T  cM]zD;g O%QEPpa)AյPL)jD9R4O8M!kHFC-ԐZK) zNzSr84q=~M#@M,sV/'.:aZLbUb;p59R(0@P-N^T1ͭߏCBDpgo͚8#T{iFȳ)w 7ȣ2hBO4%+5)??+9jUd̜O\$ a?T.ppA#^xv閭Z6-\iը;%ˎ˩y芐 vRwrB*6:ž9l@($Ib`aw/Ogx%EI]77"i#\:aE͆+Mg&hBǔTJf]-8]ؘ$Uu>I8 e'eD 5o m스>E,7Վ*L<$!A6X>[;C1k&)OB1HԖoM`WUfMc1^4itX,bfmt Xkǹr"%ԜC3B$:@K٣%aVkxO.fiH#I8jL-&ݔ"3Q,fgQaZ6l>_O.#w+VJ̚7_'9fͤ@f]snTix8e?nogOCB1G{8r"0mJ{3,g*i:X=)\]W}Dʿ=u"SÚH0}b kr0lŞ>v{LDH= {EZ30su^#%5wB_##g_Ww^ŋA\zOs*\ rU1͜p:[IQwJ[1X4cibDLT_E4|d,kY\:pvib3LWAC̎:_q*GO =ai,4q m] V[MO v..%/.R(o6 LXU&~q=Ė"ZkAUI1k_8Au"x_-]7Uw&6-T$*T.9CL5$љ{L366 Eٜ32E+U/s8[z'Y,/P"RZ FpfA\ß7% 2'ivʍl+T2|=x($nl nlfvx{&&7b΅@SRae )|a3G3~8;t~m*4t{W8 ŻnFg1s[h B9))5aq<~GliMCEHlWPn5Eio`;wđS-EAG5gP2;8]@])CnZT8x`Z˴fotV>WQr"L[d‹dq!3;oMQ15oW W;2soVZT<P`k88I̥oAu$}iSkjYMYc{TYrKd=&:"5"QE;ϿLjmx}rCbn{9PDM-8ԙWՃ+ܼZ-Y>q\\"*|x{·菐p-%@ C<38 ރKu:ppY~5<|;13sr!㇨ǰ p7Q @}_zoWMX΅W_H>̍:$JG4̚9>=},TVX S69;m|pPt̻5/M.^KO@B9:{@ց;=ͣ@|",1)y rlnlJ1_CH I %5d3Up^%_TNh$ewFZ~?xC.*=v7d*0ֈJ%)ձ{BGԆR@+TЀ;|$Kg]e/o{I\ǫ+OT5$飝AQ ?(cv+@TO:5;/]ZW2{(uGO 664,Y.2F& Ϸ?oHII$pg$4޻uUwߵϭت[AL0؆'X&aӀd2Lw$@3|Lt0tf! `KvO`IUzdUdIgݳ>*UF%ScTunվg~뷨dzkG$HpBIPX(\+ZaiR]1%.cy|]iYYfqNnF#O5Y$s\sʢ7;qܘ "lܻNJw_9EdzʛG1 #Ù-DNYhENn!O.xpC!Afn3|o3ܱM0oFwv c& &f4d7x3g{mQN"|#lD #~_T7"pJ%JQLO {͒͜9 YѵW.b܅̙}̰<"i}nq ,b=/3to-[ǷrtjF,Mw/q9SE&IA5;`Yע:K\V44%4!FVk*ChMt| 2P-_?DcRw<~d '\37#kE8UA <~>fv*4EAEqJR9[=>9 3j׷B`!+mp0AA";&0Bu-+}tJLԛenMwP$C ÜV[ퟅyLcؒj~%}R3]a7ypz΅Xbp(drna]_lZn xggOeŖJ m~2< 8)Ry(]" _<x۹C mu- .m.{n!bj.=sgz ͜Mar8|i KO̊yhGFEQCzh-+B<5ciΞϊkX6{.bzc,ZvmQ8I)bjR޹6Z.˘R-'ΞlӠQ]}N吝h/anߢK$]brpբ> IDATKÿi8q(Z1 GNO[,Z4lV5J$pbBpc::4v8iȌjQHe)10DRHa.e+_Y5i:s䪡ḾD0犫F<KI>={>e>f셡9Hjxws'Db8~M[:gw@Fc2g<>7_Xni0qTog3;)Z~^y;>?Ip70DġD$UQvv#wq篻whoS+"aqu~ d̢Q(# acN3V-Z,^̂Wjъ.`Ȓj2A 3+$ T"q1,]W;sO|닜<{63ࢮm%H$Kqn 1+\~7q)8 z\ ;B"S1kb|ޮ BF\D13:%RՎ9''5Y;6|gwž{֫9Db[c9n^⼗Z-f8iD} $`rp)/gV<ds ]ODDm!'G,ǹcfr1b=wf7Kfw/HUB"E{YBfV#e;!8s( t-d.Vt]êY}5:D]ŵΕqRZuJt$e;GܺgX)442qRd L$M#͌F1"w\J$&9 IfՐj܂fNr0b{?ϧ}/3;;]|t"HLzD:s\ŐUi%O#peǴ̽x-/T<:^J].]@og {E!1$ Gӝ;zy=E>{2s )O`[]:H1ftLwދfFhb^32|/@tLJºAyuo7ĤpN\$#ă:Z`(x`;o|3_|yXf[k DEUruX8Js堮ZŜ[ p.3v[ĪUJ `JF4j[F2^,1.sc"qqTJԇ+Kh#{"1YtM\JpPrqТY3+v'<eZi&0 b;Uvk\ƴXArK\BZZt܋-`h cXbQ.3!xֵ".Ďq"QwLB7# E*f dd9 lΕy@B45Χ[o9[_ NAÀsd5:0łvOZ̒@զ6 Ip7(pq|hw=KVd!1(>/xj, B/sۥyg9݋Yhgt{f/ _ `RF AZaZ5d d5߳SgO~6"Q\nܺF^x ; Cqlڷ4 4'չuI= հ؉[R"1qH ;.{Dbleht 3mp/{s|{1=oD"@9ӂ&VzSՖ^ՏIu3r)w0/ۻ !>86tfj,Xa$\%:$.:WEw).s:rv ~O>f4Z!EyprڬN$. L".|=_>M#гh9u0Qq40/sʉ3' KeZc 8D&-q$Hج8Ԛdoy-diޙ(b3x-51{\u/dEk,d٭?8SډkhwIͲ̵jۊDpGSWo]zyi?;C'Y:ip0#w9?]>ƼsF"PtS10)ŧeejL$^RwN SzQܟHbM{ql0sDh/[WXALp({gpݵ$\|їZ^k/$. j- #.+Sc if~J8JfDC\]9pyr KGfnRrqJ rSa=7?>wLCM0c-IQ̒H\l$~1`eČYܷ,1hXyvM yț94qk`@)G~\\U% &(Ve@xbΜS;e2[q*7-v9,Z9 Xֵ1,ΡЕĘJ7)oZTu(oB=K2vi^|s vǎ^v]kcgN V ń #[¨/<#׳5W=Zt\I\ָ26&•p0IF^j\\Tb*˂(wP(/,SvdVbYqLɡ3]{ǭ smk!-J5eA5kǜ(_?̜2d !tH@PU_L'!"ΘP {g}/-Of\!vDQk 6:i FE T8p!ͽKz ;(7SgNrqyYsNfl_ǿ3gOphf Kr:ڇe$5vӡ6@YeZKm5[$+lm? KHZ`Xp%N9] m啘Y]!&Rb״y7\ Oonza꿮 (yqip^Asf]D ofZt1q_J$~Zfqq`}YQ8zʼWtNykQ+{@xB;Yߖ8t(ȕ1y"p_7=XU71B3a .(s{m}q9j/bwr ^ bxe`o81̱|BftNtB^M9>'d8#?]Ϯ9-Xޭ4UhtlfƉ)Dͷ -ǣR"q!pO g7iS:1]"qa@8t|r+gF[E_;s*76-?d>Zt F8h8{̛Բr' &+ir"Y &FLDge}sEobK]kp!v~{p83̋|k? 蚨Т.*x Й~7L0{f&lj$3 gIfQ*r^sT n!MDVx+rbv1F`c}OԂC&HD1g`Gn 32R6qS4Eі瓁a-Wu:W)|KbJjL;X2o1{"h(2pll 96t .i9Q_˗Bx"ټ˝X_ILUphKRfM>S}PA%[ϭ'SuJ)IqaH\B d(d3\jJ^`%f]I١X5Ƨ#F;'oxdr*h)~~6BDK\Qhyu}G^[_'S&hb|):9Oi:\(,sڔ|o~ ~?mgwnJW>_'ދb?Nf4~h7|{nJԄ$?wSù8!(m3qhvw+n+~k'Ξfw^ve϶4BKYz1)͌yyd4Bo[GVm[Y>/PerqK,%k=3}~ "4LD,~x6_*BJ9d03c]i͑D.}V-Zƃ5\փ3xΜfڔ5`HS}N*-(Xp ɣ1qycf>1MɱI:dd@}Zz1%P_aVcJ[z.b)m'h'=vlqa :UkrJuU9ML6."]b1^kůn!ǚ8q!vr+jBB1-nA#z+[ qMA_xp*D7/.faF / sQv5>=qa.o;{,W ;/;>s_}uyH$.o\ Dek<=?d 8 1Ew5wL|J {9u u'KM2$l(V/Xt;>9#aa.j"+8/lDOvd瑃lG@`h$1DTs AgC|ou`'QCd9 `׽WuK|mC͡!U?$6A,[Ih*em9EP|D"qi)&ܜ-ȹ~H> kUm8zzy>C{h"N۰nPTh8K(ju۞@qmhI'+Yp5زw_}iyԬx-fV`ց,N4+OPz0ktM݁a1MulㅙH/tPr\ E{2phYh nO4A#:}*ͪxؒ%!V:#˖lN0qt0c,޹gyL^5Pq~oY#58vaI2{,ҕ.,>m}[e W8w1OK©{a\HU47Y*X @ MX _} 6 8WDb\^vOzq?刷ꮙ3yύocKŵHo f~-Ah:/^7poB4bDS* BDįՙ̎(9.z,BlVez^I :cǑ=@6zCMrS\?']:݇w`&f9&%s-|sZyV(]_xtS 1/erFEq@]"Ʀ[Y`9Ę-nx@/YҤwL\20w9V51g_U4<ce]Jw(F9Z[S1bws]NSR<Z׊${[l Y3"cOhZǔkpG+ܺtU=y~  }yÐrܓ3*WvAk-v-ӿ'wQ{]RL.M'ew5fǶ<#羵p(~5hUϹb#.ݶy$_&~׃9>' w1ǬiiӈLB^f[L1)7 dJzB.}o n<^_N`rAn'bNA1M?[Ζ'hqOnFU=&`άو3/-$8/_p3EMb]2ұ/H\RWΩ#4"t)8o9:cWLWȅv1q8)`|2hBuZt`gc AvN1_scHBpu "dw~gg@,kO03'3]Kׁv),o<⼈9&6z oպόw; ﷅl|v־j$H VBcAB Lu #|pYDsszcn!UvYLg0K{h +j^&.]W\*h 5 AxG)&tLq1e> ;rr*M_p9ZPݵwo#Q{θC]l}t nX?i_\ Ndř1qh**\1v>vGo!K\ol33M]u=/?#*X^BY43pf־6-n:P@r=? _56W,1enQ8 9:9eJukyt~r;=&n|\2Ǔ=9t0sgBJ; 嬷U ?g~Xu%O˽rYeVGi ׁ*M3#S)|w3| o{Vwh/HV*ȅ3f_w3o}Gt Ơ5]5 g^J8!._)˩iuL I$Ie6\k!;ָq90 S}hv;TO=ݳn\Np.of>HCЦȌS;~O>EN=Z8ыdӿAbDq|އ9APv'$eOhgM&h+cȦ>@B%9rz$5k~ax˜2`)8s}Elz[PP^V{~Ǩ{8)(uͥw,gbs|O U [͸"X<Ϟqhbs̻; wʫK7';OM#cN0C{0ED$r6’^\R铸496=cZt~_? rOc@s׾ WƘM$^ftL\0U2\ ܷEp7GfFVE3h`娸q xCǐ |u?r_ xc74WZ}o}&^DR)/MU$JӘ1gO'm]S?9^BH$H4ܾ|AY*"zG\}̛9gdIq?e0 %f_p9*-p ^c6b-XNaid5 ^c9. Vt/w~?:S##c($*IS ) -Lfp/IEtHLbp1FMQ~r`;k毬wXo,DU~Վs۵X=D4i8vq8 F䰴j߿C~E<Af6fPU3b/s }~bڪI+eO^8k^ ruW^rLʂ#|pǴjJZ!!X:P&?,=Vks=#mVoI\:Mcw|?ωksݎ?2'QWG,+5\[V Bᎃ89tSP?{Zw?'8`u<^xWϟo+R#W>߅܌zJn]f94xyq "Cr#!2N!r48թ_Uiږı>LwI\$$ܰ`Dgxkߌ/`Wv9@>v0ZT . uf%  wB4l*Ԫ ܴHꚙ <w뻘7J9Tr _27gYլ^ͽۑ̿SKZ.!,}5ni.a0^ͬp|"NZ]G(xfŠٰkSVn]2@10oEבCxE5^6jD`ь9NSc5*V *Tg{60W̮E#qQk{ S01#%S:ci|>u}|?36ۛn#4Sm Ci"d|w̙*9dqxwmBޭY O}OD0'{ &Ueyx-\B_ eՂBp lص׾1?)儙yҮ|ݿ?yN=UhCuC٢KpSQ .͂cKq&P_Bh.DXhe8'jqbHzptzزQɪz4ΉM?C?"BEB+3b5ktG|P y ,,x);i?LαO$.*1G ӻ8{ n婝ӔZ)mp+"²f%W-w ;X9{I{ D^y'̼سEqߕi(jIMf!9aQQΚêE&Zf?0%cy‰0uw^ęiM,6&h/skZftN.famO Nv։dEѱTDq=m@|MdBwOo@,,_~VAJwS Wy8V/o>?|_WȜ鳋BDۋsQ) L^ʹW0MػS ڃm QMhXF)diS WJ;S$7 :|Ey|̼m;zD9ďh:˹qJ]L7{ňDY8G1MAņ(ق=nɜmUIKю߉[x:&ǨTk:+Ձ):h0۩  1Ȳ,Ծk"7]?)*,̊qpf^-KjH*ײkф8:slع/Ɇ3s'(>m=Bwn.%tFi鳧xg3*jp!eF68-;s޾4qMF)]MoEQ ̝p]w.dYW2cT :X!o7e"myfbk&. /cib{jq~-(yt>U Tb"`ƒEa`ESp@멝14P ˹{E.9G. ~͓ g{9x. E^R跘'T80oK{?>q~_ey]Gb9.L wB#m]ݤ$$`32rA=ף9j\.,?.8غb1"duoVձF}%'kdȹ. 8ds?0g(CŦ>TY)AP)58:%&97,XӆB  Ѡs2?2^DQÒIБlfnU:{^}E+GtII[ٱP4/~~QHmC}բW'^ȽfV>irp%myG=÷lKIODk+o᾵$ƈ0|p)g9#'+[Bc0)n򥇿ʺO5h/c&#Ü Cș=opWo۱oznUp5[.K5:lề9d4"jD-ĤJVĹmfl:]fRв01:OĉAY-k%Q]Gr5I׆eM:T Gv%3jcͼ0(NT ǓC:_QOẕj-͂0sY>xFs;BH0"f]}FmRxV nzIfhCߙ!ag}rfOh't犎!9FM27}x//?9) =[@˵/. c(-G|s=4~MF~o|F\xOo\z#."NRu_U DQq8: *ps>1[#=:p`cAK˒jeqg NsD2Ζ|ck.g͢3r++_g W<:ê搩 r_u^`P JAYnE-+r?n4PA[vCbpP,:m8/h5Db4nXiZcڎ\!txy8քE+|;'3\nTr(+C;Cr:XeR, dzU _^ϽMc+*09(P~ а F{k?1~Emy.T_3%>k||Dq]3ph{62V-\9L'ܪ7Uc8|AaV`#%z!&Rf룗 ?3AXߥR"ןǶ-1>(fS;7יi=DSB!J'=8Q&S ů3bJ.ًrw>f°5s$D'f0ᩞE{pB/E4~VY-mXG,:E!*f *V0G6I,,ۯ{o%2Fwΐ<svS;r'|q' Mo.6|5qR8 lɣ^̄gvo.x%7'd/fxL O):|AhM x{o\cH[wC%p33UF})eו*CIp#1Q/*YZ]xb" cCxu`S:E}{[ytI& _fBU #+*4T.j1wkꊹ( ߞ l˥iUT'&7&Y8ס!Y܌6>q=/7E!!_0B!-xb"pP3*,89uݩ=?€ -J oJKZn-d˵hUmC|Ae~<nۆ"1C%/StK x\}{ IDAT//hp'R~Ax`o|Cpkb4l0DW.0#4º+sDҟ=1I1g-uʠz\b\ 'Ğrk[-񟘨nn ᚯ +v&t^ġ u7xƿza!^;K6B`my-~ʿgP㕸&(!?%=W*1k qN> 3WtDd=!"LCwJ>Io7G/`w9tCu%L<0hزIL_ɦJw<<{PDrY X7 뷾-_ >_efUuܷ μb;s36{SǾYphuekI@,7*MԝUȊu%&>pEgٵ ^rzEZ!6ҮȦ{aVTkokW?xa@|uxbp2 $q !0"ܵ~ݭu(y/ )%/u54򺯱nd9IL =KMmORR)Y`qay额tQ5 W p\i 7-]bFs^C *ƃ@)lx8_Z 1/OfǦޭ/~E-uD~36愼&U e^f6=]Vϒ#B1z,/ǽG8$Nf099E$:J,4~N O(T\(![\&kܲ("ޟ޲VXy+\tppzI0p*>9Ůmx ,Ϥ5h) >? k[ŭ->2J܃!ֆ;6p!{gtN7+Ry(0/WTCsg*$ :K"sR"Cck+_饌iP2;G[^'o~?y]u羒@@0HF nt6IC&v~plc'tFxB6&ѬBRi*jTݳs$P٪JZPOo<3ỿ;(ѥi vfb|?l<"vX4}>_\8b,NJ$ i%TOQ˕Ϙcąyx{ũI+޾v'$ڇ NJ[ 5@c5T#k$-i^P);zlבjp%n zW|ǀ+A_5"L5&CҜy򆆈Zlˣ[>ƭsFԃʝ˒aegC |} Q S6 57rX=9ws/iUM?LiqYF,H\uFhYGL"JwRrH>U1ogM3j}r_5 JpΌy:zUŠgm~ [$t9|'okT,CPM{wlWϦLsC%d-ZÇ:BN+{*[?ƻܘ%F5zJT2NU%S\vj1w~x]\7yBRhQ;[$/`kRj2Jbn롮,\ݳmWG%dy I3rXrMq4aլkiV͌hUAL C%7ES|wfDPZ#_D@S\EЗ[@t*J6 DP%m<:xbz{.;e%9"V n|ޙJTye*~u-ͬޱ.kIܺE>N 8iPhnCZACj(J(˷c2TS$bu[vUU%}H8Xhi;Dhptj?4|LթP7_>/Jf"l}X _7ϴ49{ T1m8/g ^Ѱc72$N&ZD-]o,@K:Wp#%84IڪY}a704(w̬SL= =;-V"ʭVSsDO1SuΣ,ݺ&Ē}UC/ZD#¤18v֫bJ|0mXux E $i_hP<#^ri^Z寏[C厸hԋ Z~ {[; c"I4ݩ /1Ō>& 4*^1Px U `MϷ}t։Qrֻ^%!UIZ{kCzhlq0oJa_?N(hb sO 2l 'WFj\;C@MSOfś~=TO*毅kfXPNȔT`Go9&[~ALʹ;xݯ f,~]mDZezp 5bkqgW̎&]Аf%\hJLe61Yś圫ۓوP?~)a'; Lf炦1ٚx% FʷC8{W?DK\ՌxUG5IgujJ8۽au9:^YП~GyU *YBo!\Z NWW/fՎu()`6Pc8g\W7dq~:FARRʢ,rRgA}eVeݠR Quu5KsїXy7/I hS# U\^IED e|8aCS {Q!S`Wt}ݯj:l}A<*A"C7-x/Xlt=k^ śdxpS*̿J؎׬Nq̝2=#h$to1QM#=_%!VH q$8Q:}<  gBA_Vc<ڍqd&\<?$Vl~*T~>p=6O,6NR;N᭳'.t3hZX䉼Uqd<XlBHٵxh㚺 Y{MYCa1^ ?qS%H>Mռ />A\J3;OD5p ~R EH[׉BxԲ@uY;uc0_[w ??τk':c¿%'p,,Α ^"B %u*pqlVoS>nU"`9H$V:β_ԕf\-d<~r=NSpFVTUu3 -Xc͙WLJ6R'#]bQh /|7=GiPl~@x_h0RM>1A 7 S8V>>HWofTF#M~]]Q bP&- |cڸT\6.B^ izd>EQ>;ן3 8Q:}.<&y=d윒(^׿ =.8WEɄ1cF|ZK3Q0v|ЬWCDAEH3YJR_56T2A@IYѼUcKbPۊs#&C'燠D[^2^3nwn4^|UZ‹%ձٕy@B!;Qqwm}WxAOoK]}śd gM8_*Γ:AdSnO)mbǫ-ܽ|!=]a3BPwsײg6Ɛ,xdi9Ą˘ʇ+.l24D. -lYY}Qt{^ϯS@ U-eI(8Crۃ[ž{ٗ >OSkd>E@W 8 uL;SEݫH} ;xT*F6PYUxq]Xql;yxy:|LJ']˜ j~ $fVu޳G(> =oj-%eXY*X 4 9bZ#U@q\;P:$SZmrHχ4t}lL4]LEʀ]% NͿֆ:Yfd>IKO<%{_/OWHwVTRS*$P $vwʷ\=`q69o'йP4B4_DB<=l/V^ c9&g~xmg y8v9yV?ğ_+Vs D Nu#]G OU*6T`dGfVTwDU!+ԅuW)؏N-,J,nw(:_Ӟu(9e]n~ُY}Ρ8lmjoYba83؈r93S+6?_rxL64pxʯkw! 8$A_8TUgH6*yXSʱ{V.-KE½?{U;FgRܐ`,4C> `%Wp~T ';Ǿ߬kA@u(HpCGB2d׃x F:Žѓ@8-qkiy )rd&HfhTIp(|f),c^-wP! 3FŪ-6olƏh2 wz [#8|}N![ƠPnY]fz[g?Tp`Y8^}!(LI[|(ܩ=bx7FNTNrWZP5_H,He5Uh`p /ݼU;6 .VŀH@-il I!Z᾵q[R/H4W(,tJM+0}.cM-jH J:GÇ抨OtOߩsI gϮWZ}= {cIB*08=NvsU>R& B b{XU:V'ʾ|H?M=g\C\[3rba<4NMB4K;7aK^4]P}י's؉ %L˕\#Ǟ_]B4fcR8+2ƇV 1ܿjzG3"^ "1Qsb+yHMCw#Ԑv=ܵ[bwYhMwRFrH)bѧ5N8˾atZPp=zG#&*s_.}h-^9rk_]O]F$ ;[[Cm4m rQ™BA)x@6$F#j . ,#a`GXj~n1jƌ]-ً(Qeu .[JR,r'(%"Z> k`S+EbCR5zg+ P =VXPJ9\ee -kT}錣,biu=q~7iv qS6a*  8A4kM?W>S"fUlmS8d;fh=kiI]n|c'rY"% BW 2QSN%Q%V~gD0qXsʞW5p#?Nwڃ8 %)s?86h gBA<;ɮZQgād C>7hr)%y$w+矟O1\@?sQ|̨3R U;Gg _,- O4,r 4pH~KSʶn><`fw~sr~YYv:^#8S..6N!ư4Vq;#w=7C`w!/RY2.!U: uYk|~?q(L1/цy.!8\ /fxyJ^DA, ~^Z x+,''(k[ {$qt<&%d}| q>QOVtb!`%ޕa}󺥈ZiŶl7ب,7_40$i+-meDW S6L!<܋qSbZ.sB(>C‘6Z~]M856bx@p`Ei VH>Hw}y]Kcwa{B)%p+5pNX8ɣqYS|(HuR(5Rjµ7t0Bb+2 ^+MںkK۟ A,i)Tl#K'?-~lk݉ qlaaX*6XH%-% кTg{^t6ȴ]AQBwٌ9<3my锼=d_~!:+ǃn[P|kuc7P?* |T_kR;|V;Hj6 crJWo\/knd4},,9^+6WcT|?fTcoG+4O20Z};ZdD;?zk/;@b7&ryx)qX`㊈+QC6KΧgK zFFC7i)8DLHg7=-" |[w[uxWhr!+*#d;xߛoAKl]p%R:t' ϓS2BDI@'6WƒUaɳ+E_͏hIHLl5pX6 5ι O%SEo8ix6%*ur q)A~ڛo4 m 'T-v!&蜩O@Ah<y"6 IWA)~.@4gMJԑU|YM+ǃQɝg4I$ؐKcw!l%})^9*ǎ ~\M2ōradJwW4°H5P)Ppݒ7f'y>NR$0pFN ['= q\ =D3'UlƱBWoV}72_[]>K,\]Z[-N fiPme;L~<i|l+^Xa"qdU}bz3”zA_GCqؓ{^+?6ݽ]%|HlũY$Ȩo6{h6PG l@JoV"J$'F& *NM+1sgj>1A msp_S2(2lMH|O8]ٿ(-*Gs6gZPo}߰}TG-]Gٰ *=ǃ1*dew"rҺ<~Ƿ=Bz(HB"iCaaH_{ @]DXL4bʵU۟3?{]dy,v: _$L}'s8^tk[⏖|6Hg)ZnA^9M*\)gS'(YrY#~TA˝2-!_!w]AxVmL*JƯ{śK64I1O_1"f" zX1OKD `+N$^, D;~ $q͟/?#Eԁi r)fۜ `=$$}P|QxV^g:˚}$TLMvS޻xPO?ٲ?u$>h%JPz40lhOL%%@O4 iy@KV<4$KU8eˁ|C4ii؟я;>r{ɔIUPI8C~'tjhĶ:ڻ`O'w @ش~)>`#IPk4 z{dEDՔD('fS +H2w.>y k꣩QU%=.ܷc?˷u{cJZ‹by.8l8,DÔ^ATUK{IcyY`CAىB)Iyv66z~-ͥL@%ErQP(L$ ͭYs?{ii J=)CCxJ\E9#OmI VjNK4:ʢG8@IdOb0PTI#"ro>#ֵla.;3WE,nۆY7q:C")HH@%WB'A^yq^9025]un'5EExދJwfٻ_g!"Jcڄ̸rƖM_Ɉ2T\})jzmwsXGz *+& iQAS€KB+"J ݰ S\"֦1QG+_~omݳ|2u,ڰ[V!QIQp^V ~ƌsH<^}km4cs+v8mnp5u}RzB˫9QtB.AڞzNC:BHn]l9n2[Mq=Q)ݕ,co 3Of9?I.MSgU9ϡ~soda ǜ MAk U lh ?\љ 8uVy+$CK۴ HHE=mW)& Mh$zu/7c:d9ȯ/Β\o~ͯzuMpo׹# VEEC]7W-*Ey!ȣxS>~ҭOrw 0v PBՄlq/YyBwxA'2IJbs 3*@)B'@]2d ZEuZxE(Qv';kv .4B#Ȫa z}{xj۳CmFQAIH$4ǖݾqjbb"TBf9?@1&Q~m{?c0Yˮ )sUeVo97۳o)3,@mww~Kv[{WɖS!)}g0$l^ԯ$SDH EK@޲ƫ,i1z *5Na}$C{S[#޶^>n>R*y ˷y]%W{&\3uF~C)WRd*W[]XO!ޛ,|}">̼)3g@惫cծu|r$8ohJBB%$9N}Vjwp? :PS YZkwnbgr7eի<9yi{H)+,Z].ZB;LqZ5T:>}Fޙ1~2c,\ΔZ")͗DjSRcܽ>?a温5{SZcF nkGzTHq~oooo QC 1j9Mi=BBgyjO3ܻ *IbҚC.Z)cb9_f 6;% hzW!%@?amlON׌T8V24CM#'E# ͹_qxn1ߝ{] ]5*xA>TlU$x83ᒱ㲒6K眆7ŕ߈#AJ]LhRk СXIu3?~؟W=H.#G@h0\ '{xٟqWb\2fUG9% "JrTիn mzh7vsj{-%(i?+^b f/=ǖC/ƣPIC\,:،;#ƶgeնgX{ nF7@vUտ>ҟ@9Y:{ 7æ=[y66ʱrO^4N#~^c-Rr0 ]$$c$i}/K6-%%RWA)e9$ECų*^2yeuڱƌguۘ92kx&N &&z{z`'͇w~tȩW2&q© Y@[ǭo< |/#E£))37&qN{6d6H#b$rw-wcԈN]JH7@- K~asĎuٶΘ{: u+ Q;{~U,k_zmqaNd\M?D %qyL3a@o#)iu<9]c. ZT[} &^g(O$-!)cR~aqnM$#9Aa3Q0eTrEzxmSn;rѯ87$1G:wwyp\?c.~MLhJN>({YjUWT,wq׊ԭ9 2wh scچ^޼ϭ܃+M1 }xq-:e9!BfؚN<3.癟agdHŜ|/+W] Ʈ*6t{z`ǀ@J 1;P1q; B)$50^n;G N^Ӭ" sU7!~*d$GC&X $I5#Zodu;wg\TR&DhGT PUJI Dq/Yc)9\qd?i&Q ᾄ 6kdzcliNKAv[6 IDAT吝c Ijh`PQEЬo}w Zƫ`&e'A >©*8W˝,cf^潝+_~*ѭp33 8{bLk ݫy *^!1({=/q/yv'*Đup_r)Yr+k4AFl0Y"LLT{k$#c\{,S4ʦH!q:2x 4gk=i?Ȯ~8pfVp `gs#* , TB!ܽ_‡>Ψ\;UIΰ؈R6laݞ8WKf2+1a2%6a2G"uDZW8Αv`k.E^Ș ``j>+?]wBI>^b$qxE$,2J }:m >vSSC@g], m/Փs+bT.1+&LuT|x߃wB_ [[y8 6QF{ы'GE=09̹|,!tw%2jyL?]mHMXCxבJX[usX?|q؋ڙ8S@~M&sr;O,$]a\#?1 wE1 5:k 'xMW_c2i4\Kd)̸d*3CJ( /'hh{Zپn?L HB}N Qwy+_]$+@RͨoDn+U00W#qwCA@>_<0|_rtjn;}'B@\Ub=OD9*mB,Vyuw|p;-mAKa)Yw M\u9&U$Ŗ5;N5Mq3\,(i:NSpO1RT+>͊ͬ]qm-[Y'5hg S#A4.ݧ_rY'gj>ܦa_p8gʪwKVR롁~ :EV&egŋ:5PVC6`~&i6R3L[a])RZ ܨ" N' k1V%~osٮoZy(;v6lu=Oz$9{AdRP)Ȓ(jsx7C✣" ^qL` S^mkXF4R$@|l9l*]6[gOw> $PeWtTk0AE"OIjJwêmuLRw,Of,y sP)㫂 g h/Kʖ8}xh7{>'x a:hḦ8?~GᅔP "1q; ?ePN<ƽlسԑ:&&O1 c/b˜ ! [Kxh>tfOARBoRL I҄V|YU-Pup#NHC, VQV;1rs+k7#ψJ:b六j/N4(zU{}Exs]!XI:L"dʛwllN"7+<&,zQGR~B%D8 4?EFP\å~uXcٰf5*&ˌ8bIRs2YPr ֲ@IUIU&SU:T̽JZ:z #hR2!uzyz^;濓{;N9& HNsB[g;m[NJvΊ^+w[GaP1?Ʌb\_ͺYg&_v%&\p.;G'oۿkݾ~pJdjjsY28_:H(H-op׫,(m‹zY{Y(DH5_./)Ff:^Sk\W:NMW:k ZN-kYּ7-`71}e 'E*cP3SƋa)ڪ/do~ŮW[f$,4WeEӦ.X[hVE{l=ͭ;U?bf+Mꩳ4fbxuA,KI:TPCȳf&dxԶc<˷]m'SmOס@c;!W ?! EѥGFroAzDil޿ PJ>T60,7,%TiEkT؂ׂʋop|oOvlgx[yx<^ݘ($ΑA|+?Cy% }!ГmX@b hUPzr &sT|*4Z8qIʖH,PRHZx[gV),ZɈ4#Gwd罯jWD I1y&V a PlPWFPP?|YLwJּx'D#6pOaÀgT̽|3OډsxL{9:Z wJD:z{h9zNj+:;hikT{ZB$T*8!uof}s!x"RJwqU9u; R j; MXҠ,`I%ox^x^KT PxuUs HSJcZR^aGD,Tb;N#wvgHc,DSRb* )8ݞܵ^?rHxH w;RBRׅ}T-o=2" \̬S;eMaɌiEk0Ѭ#ݽ]`W;GB`O/ޏ o8]Abܰ>~@ln^$ŜҒCӔ6tsd9k'f1\2b&Ϥ '0~EwsC- P$F nIʶ~vrwrUCr‰H]bT߰F0'J{J\;PdWOݛY/Y{pCp'-6v CLm8 iMcz᬴dl9̶3y_N_IIbT]%TpӔ$IBpTw>[qVX2NQ ejgFd(Y9tNÝBUk˜{MPx #TQC "M8q=ޙ2 ܮ* y@r!VO`o Ŋ%sxPp8VdH!n:OOhPD&Ndp4TʼnUߥ)ptϳ֞${Of}^/̞={k^>G5ul5FIT;MDB |搆ޮ87p# Bџ=nJ:&\ślߺ5jQN(LߧRHSânΚꋤVô8l5(| 暅+7J]/@ WLC[M,#ɜ s q^ݡj 5\q 1=>lk-MJYp 8Sj{)ҀYT)!>N^ShxBr[rMsgQ8+ x_~!x9%AS#?c|밺`#x2/4 3@JM{o[7x) 8;)injvZ$ua ^kbvkS 6ms\PBqCRC-_7/q՚-}1?qyGy5¨r[>u믢#k?$C8(,>}mh{ T电ܕ.\^Ai;+!| mM9YPG ep,R7x1ĔG$. qZ1 ظFWYFEPB㊡7Sn8Ly2i‚UTL6$Ongb>i_ƁTuB4W'yw~{T]ݜ42oAJ"rXW @),$b 쁟<3x4+d2 _v#lZk˫at dm5vloɝ VT2T@Zђ,S4 Jey P^%oR@s {A(N$__yVO4ptlb\[3N6-'&|9:JcCE*׉=s9[ k"j5jʋ^b5RZ&Dah=L[S =S61g`&`I.y4hX 5TcaJFR*\l-- ~)z @疊=Ԭm|C铧dZ Ճ[E]9ƈJAUK+籹%6[ZX%@yO% ΓAzTc@'KxWbzؾv3Gt0ޚmr24P#1H{<*7ĿFx$6WUA W~7N0 Mg;4׏kA{${B*j24DKðBڲlN6 ci&m&%V]BW~B# cIät3.H6]ˡ~k7]܁)DBo둩!VU r gk/|gO[>s01>WT0V+rI#Q["1E/OFsc=i OE-P 94=ݼ7 fb!"L2ՇQcdѩ#rlU vҠI$dž2vԕcsѳe:z;z/c]Yt|.Ny\3Okbٹμ]RٛYNMc*f?FW\uJg8MC(\lVZsp7,CLZg2i4v#kˉs\bk73!a>Sk" jd^,w̜4ᶨ!ZM$ofEy$1OYe!ظW \I RHpܯA!v冼;iZ`"掕{JkPa*:F:.t Gd IDATKu|I(@#-1XX%D:x*yW8*!F?XH25MA˥45ꤹaS% ^xO۷MI)_2E9ǩ}|'TU%@)c~D U[K 'NE6cp;)]zO9>u2) gEqF"v |YaA3EYSyߙoGɤ$2֏X8mru +Zr Ǒ|HB,TBCFTiU:{t.=e# h]YGAtj]Ze1vDRӣ$|rWi~Ď8fd# G"&Ѽ}g]JVywiaٖݞa^7P4vTi1ku߃rޙ #xz^>J !.^XX͂,#Z#d ʯi2(`8졩9g̮]Z^M?/P!WZ~];7[aK#w#ьJ.#?3Egqـq;q|~WXBƨ%_<寷Lt5 U҈}m;*IzVD\sdM\5]4u! ]KR/lrx&7svoNE$D]'YOůcӱZ; 0Ŀ]ѡY,?<1B >=D^Le ucuTu|^YRɑe_*C5q8SLIYT17^ɬ'Z!Ց9RD@'so N?yρV\q_ʷ54,]BO5o[_~v98emWuёkH K}}*; 45}??0RUu8čG֩w'|*S6Y/Fk#f-" K=]@U:p 1ʽ"_~Fub$~yHϘҎ%ܗq@j'QRGUƞQ%$s8I]p|̛?_l9^1u^tiȎ>V"ؾq~0dS[!\ ߼Ҵ@R{ۥMܖ6Ijt¥gTLtTweV*ҵGA #D}TIxb.!/B}\b-Dl]XZ{#cLeVIQbQ׏Ô]C#C&ԥut?t}CO~7qzgGc~!0!׾L-<0|p {,Aċ=#͟W;^ܻDžksZk,e킕xd9G}fuW[^רJDXhL'(9мUɨ tvn4@ uzs06if&\G H^]1't~!/~x VK)pƂUMhQx=}Q cU? -m4Y$TFSuIc: _EiwDd{5&IDž'Y Vl;␀SW!fd\c|CdSAvǜ\Y@(hETXF̱(xk/²TRE]O?Ѡ,C†!՜||bg╆hPh8M xAr䳭eY)4jX)ۚo>u̘4ݔ`Bx?NK④<=l[ ]4o]x5GG Q8p):hļAа\3^IhhIT}0dPb6/= YL>k=4X5[㼓2HCb{#/gbc0Z6hP n>->5F U,nN;P'spoCtP/dN {y- U8HV(I$Rm[9eȼ 5Z(+Dfژl?W _j9BeǨxR\9gqA8#ԖH txэ3zԼ`l_V 5ktfv 6Q4Jz=7dͰjZ#2UL>a5]nx50>| /{P` $+5B={`uuDG7'7׳e|<{E1f>cu _Hf_.k)xQƞ-fJ!d\yf,X(mp43wm\c|A"šs&䚍~Xju؆MH Ywq݆+k A 2H9JxlU4L;67YbaPpQ`j OE'AT#dIΆjˍ`ԘdwDWWbbq;z8(::IiBsg oT_4 3yʡ-L>j\T YnڏG3틶VY͂$a!6(0/-\:z룹JfO㲕l|zG#nċw=LkyK8:Lym)stwL䚍碌c">J:Zpov4Ҳ_E`Kޏ{pţ۫W9GgBךѾV&/;M yFT0E&VpΤYmD<|}?o77)ԗw>ȑڠsC@-Y TOdbW7Ya׽'OV5jmULǹ`㏞Aτ>2-DTE Q:TϚ];@9QpdL<N]hר1}gD]əV&vu}p+;&C[Krr׍6Ou5jm9:6xAh`ugڤ #.ǥgѴyx$t2ІwrDWϸ?CHD)T6tJ ð6WP f}za_ Z < %>~78 de@"QWMyO3kOvnyq T*v3ʤL!h?ߴ:𮱻Qc@MI3wy)j1yPqC'. h56v9hU,$}0ox'ou5j#?Ą&\uqqcŜhx88][nZIT5F; D.xGN,P2`uEb1pŧϕfNj{@Fz {lJ,*hJ64|3u({'q]0iϾYC9{WHl0{*.4mtɩ W0(3qeQs>O %My ly 4`alcE j3QISPҢG9}r ^LoW*G=YFv% l[ڈ.0bݫD5_Ksf`tM+qb!D v /ElZ[H~- Ŧ6&^!Q'hQ\QRxkh<42Pu5ǔ$`0{`:}8.6SDaEJ-ѕmf2u8? a "`MIC'z`@y!(kmʡZFCXnLӎ•Civbsbg7;-S_1J8k 6y!:Ơ *|TQq<˟ Tn'2n\r=]ݨQ;U&qm6 XZ~d} Sa;iPڑi:($) 7/M4fUx?BtvC(j_}RFRh ic&Ԩ1NךH߸xuGuI`FĨ^?#'F*'8[zN ALjب) 6y/Y.Nwž·qHwQ*yF^FVcd!|!%X`Wn!SW尉7*VHE 5ǻw<|Iifɠ5!(NeN\N,!}0/*Mұ#Î#9ˬ1r&RU:>x uG^x5sqd4uqhƂgoQ)n8sd5뷳nnm@ej*ķLIhFAo 'Yxu1pm#ڬ׿:D R2y9#ׯJ{t0פc;OކC9 BVeΗCxƒ;a͂lܜ|l?( *9tu1[ Û`hު)/>9)j֨2H%ޣWձvJ_URMPs&nDi30w~|οj)aTCAPA5Q͗.X!+:tZfz:+R]czؠM}e)0(Rb\Jᓦe[B<ϳh>(A kzqs1Y\ȶx)isx4As)Ɖմ81RkFH&-Xɕk/6҄(X_S9-G ȫi@O-n$ڑxË~{{Z`)pS9P%#pFhJ Sps kզUo8SHype|0*utKx }~زL"at1zϱ:q\0řާfX;$ĭUQ/F$Qߝ.s„nm IDAT܁V8(es)<8%ⅡdQ/XДL ELmRy=q}+O [+/6ȧ[][G :=}7s +ޙ:(('rFȼkRdQ8FflN ~ٸd;Yl{gl/g$^6#}Q&[ҢY ى|h4E4 ᤵŔ(đITe"/U%4BYRZ?Yıa k !EVH%o==|laR`[DN{4߰ ͎ VirPtCUà#DLsq\tK7Ju1xK \ ^Ph:9sykix'GηGFj)9Yw2< &VM6Dj?<[xۚYp9~z_bHf#"rBfd<2i Ah/6 V<. yDp⃒Sjsdfᑜ{G@(r*@k׿9SQ䚥_1x smՊlkƓN jF)F[N:C7ϘZfHv)q!1:{oj25 ċ9Q!OCJV(LtcU9to~jutޗo*vZ  QUsOMD] PW(#ZqZ-DUoZ*1a^4.]}MbGWF(J2IٴxgSVlp8S 4M:x0s"8΅6-$ 2scb޹_)546̮;גQ"Ù=g?^ziG ZfY/kHd]A뗜]nf=\P RA0 -ekNPZ^.YVz ֢̻HX8@Gtud9MŘvR=m6zҰJ3<ۯԖdRzi,T5r0W#XLŃoEADrRTTL=|`V~UѤFŜc~o1kPG%A^Fݠٺ DluY'ptwLD\vX-ù18[Vq)Gvv^Pg)G5R5!!OV 'т#7_"AJP]Tk5bUIP--$>FS|sF"6*<; ?P(̉ɝEU6C>V"dvjDBR}%jLܾ-U@Tyg+x'@-<PP$Xcvxa a07u~.ĤؽwBveBY`5nF_ p,+Q5oVQϰdBn2~(c#h/H l:i +mQEVd.Llj*oX h2!j#È;O1GϺa 22ƬהnfM.UgdKs.6 d z_?g?Slb{I>x'y@" 5ȹ[Ny](*7)5V&a*ڣs/x7o[q>=f(93y(kS7ځQFBZʂ ʙK7|sF13܆M%5vF|R/!/*&D¬I[u%RQ?\Cؔ`oG7o]nh@+,:y#[7bj9+/f킕h|aD0M̚E(bu}=VDF.ɘ=p,n*.x|Ԉ 첱pV!ZZXx݂UNQg"B~A`Ŝql䁈jSq]a5,; e .3TDa۶&ss*,Y${Cp9}JZT%rz]}DL=}|0tPg -ѶyH ^EI^0Jyf"^v#"w qk63ㄜ:N+= h`SBog7k7^GN%3/'ڳ}1\e%oFX(G0ͬL$ :S54)['eÐr_sjq)Pc,d H+H3?!yu@hn/W_!Xo3QL;, D m&x$hBE?͞pV; (ŵT|`3Ӷ}82%BD{^A4Fv^J¥g3PfUZDo- 4 MFiwz~S`(+ƌ_JWneS0]+rh4q){̒{OYxD)0rH=_#ǔ Ԯ |04l\P/!քnGyPޣYUkrSɀ XL,o;}AK<{QGwG8m|1فTP6p44e?ȌIӭE!]?q ΅Zl y6ܧܺj;YT\ <:дG&`;`iq VX#i {B"%4^j w> Pf!]F5oD})C\1KP7b ]ܬrTHwFiL (ey>sW FB*nFY7*{^'O+׮}ax-8rCT4Mu-mo }Y +F.Iʆ 0|L0Ʀz-J@\<! =/Ђ\]?Bﲤ=rhwu`%$fbdcۚ / D]%|8"١Z Υ=wa}"dV 0Ö2>#<] oɽ%a`Q%"\%wvfy yuEE=H)(N+{xOo/!萂4ƽl{!{.gr\L+8IUE\RX~L_?̿~>4 \֨1j\qMIbT57 O v*tV!rkYKX5s1+,)''%$#x>3rega 7~rt|ѐw]m 3aq!>W;y# 5j8hJ"ncڭ,>n_n$(\j>K[n/Lx+9fU%4-$h%f/y$:WZ¢)w/~./&&_4Gijj.=|& vɭO\s?y*bX?jR-=v5(Z@Xȷ:2 w5wA>q2DAg\vFz^#J]a dR_?^zTP!W=" L| 3&M/k Y qt_YtE]kzS=gN[UqJ>wGQdmqը(P!@Č8>f%lgg4EBȈ{ U7{s## 5X4?>ŬdW-: ]f2?~?yqNjxrr>qN38 870kqާNG ;U҆=[GO?~v@+d뤯F V)a:Ncfr,\g < qF,{"|5KhʃqEh;Mz"QAc!T z;'pʍl\u w>m3p!\GW5a*QDR1)$ң<,2O\{_ ITVawB`݂\zknw"ʯmj {A4upfS+BɶKahrnVn{4A 5Fy#k7]·)ÜE!ed4U ,w>ǻ^-'34oNhwS@%*2%6p&s՜2$?Wj' g[WͪKX=c1.iʭAD7qQGF!aGrgql1H)MGJP A¨ԗATJkXdၻwcǎFwO+ܥT[/[y>>== .7[U_Yfx`&[eb5q/h;{;XATW^9sļn)hCge#y)@;p7IeasϛH 7mU[,g" _"4VٺQQ0SSeװa f*J0Kϯ~+;od?|N76PqlYzKAk y 2?+=Wn\O?-3w? yT㿴ÐKĪ^hQ@?DН&4{ 6v|Y&L;jo(W? MA-DH^s{TνyG [r0Q@%CV.H _ܷ!&1>!+#8vv|lwMU}>F:uG&_zy/&P^+k,~%yQsR~`jo+Gw:C3 jMˆΘsLꗉk`xO*;okP*&~d/RH9TjMJ\r>xӟO5DTy d@95 NY;)'zSg{B3pl1\ nz.>KχVX+(G :5m {_|v@bWYN拷cPGe+6sWIEC_;y DG7[ņ%ƭ|H莛WULnԋi};ʜdw24jUL; I[H*ƏifM{ %,OߥP!qx(GxT%ųvJ.KUɂs',_cNSͱ5N5㪮"BjIl޼7,_ [rMk-#tws{_H{B<܁\zT+TTYvbJMUмXuS J-cNbf.|44 nƵ}d4~eܗ~V1Z{QY 5J[|IKEnd9~5BBoˇ7\57q"U&bW5xm)&@cy2ux'x-p o]([Vx1k+mӿvoԯ[Dv9!k5=L,uwpsooSJ|`Y$֮GFيZ;␚Ӫ*k1S/.Ə1z;|}^;chGp^tg.e s7-aKa3t˯_xk8`XQXS&vb>:g̓쾮Ϲ׍;I,Di$Q )zJT#KUۢS$)Ǚ#I2dLbɒ"ck# *H$ ) @wq~ucdΧֻ|%~KXb7M~N7>JNBJa*Sf5w;Ph5'e$x_^R&DGVLO__h h8qE^wճWld[zV^+8>Ǔ[;p@!7,*dTQ J0GAc۫J; qXI)w(4X,L?`~6ӕH"67}7 in%R4y5GAʻY׳xOivܓ8 t:)\=qazHX'Us5;>[%.j3MGe%+*8|=. y*ePI]{>r$JX΃+ =k S;7ea<.9j0dF4$ׇ#S(xqBb攕>#@uBǸI<>^Ʒ]iI8eU&u>ģwl}2%ާID9!k"-1a~k|) K׺,\d,QIr\bmJ%9:n Vfk9cOs4dTў_%AM]0y$>?_<۷oHz2ǣm"Hˑx sVfh_{*pW'~Q/8r䡯12Xu_gˁ# Aw- 1h#͝X3= deuCE5IHVn `]KְrR-^ìe>Ϡp-3ŽףClͶyN Ee̶ͦ)h)Ѳ'!T^qk(^t W4AZOڧ\4Ud S`j:G^B2C^C<'{1yZ4_)+s ܿtϝdq< DdY>9ƒ2e+fF:uɮNX oIm7܉)ჿ=8gσbKo|S hY\0])KTX1GXA6| zG,_]jdT}HF+㼢q) :z26,\ ˡ.s*qg:]'{yj<ڏ9qd RLB+@ xxjtghּu{$e5 D*^IP]lMMj.uq}ܳbko;~vHpɓXDv 9Akecf8 -̳cE1nDw9 ޵FK v^p_?`#B4 mƤ|Gyd{y鯲<2xDž=Rs98'@Qm/lF]w x_Zw0kJ'1m}آW$(IFc8ºլYMWzt[mS /9犊  /6Yu2ChXę~Tns3,厞B5g,MtW&qsx-J'}m -YCX5rl*3@<#iPڱlyW^|c .˲fA Y)s Z,ĆkeQm˨uЮ,i bq+tν+Cq2ya02%9GڦUq U@hn@X, #/r~M]F J._#?SfƅjlS=IjY#k\٤Ü6VX=<{/}ށp_H\[ACKqC8i#TBJZ5Cu_ɇS"\E0x m ZaO͝=kг]qcdcw\5i,Zɩ1~\}GW.(*P+^{oőǟ2*pҵ]>]LG!9.sk'kCI`496p{qPJ9 !LYEv/W1S]!EyIA?*$&TɊ)f@_3jJ$8 үs9H"!/P*V~{{i}#U ;籰s¾^^=/ 55i6 _|8Ox'UO(j7E̍ݦKQy/rcqVCRGɃÆw0srqi"#ءGu: oh)1Q^=?9[gy#dYO )2I]G~ N\ѧ@WWn*}_#{x e].F:k\pCO }@!Zq=pHl @'Q-'Ր{29D1tgMRn^cK9!3[AU2R_6r$ OӜ6Qom"ޅмB:!:0 EVdso_?KIhC5$ O|8WJIJa1/--}I~$L'7w>ľS-pK8ccR& _25 zC9w.D!dN5MAnaPfa\z&xm%RuDW­g .ffGW`;`0iNp/)Жׂx7ʢ|:~!:+&RGZ Lg?[獋EMhg5{d6smeS\b<5;`\K-Crg>i /|g~Q\N;p(,aM+-aJ6ŝfQ;pd/۱o@x xI>?)tEPйm߯V,i.x헬X#,4A ڬ$dQS&rۜ]K1=+W@?1o` \ɴ_6EMYWCŒC>8o.i<7Z(mx8Nrb[-f/3_e傺dm^G]6SyQzQ0ʇ׼"}gN|of:|5h44/ʺ,zyA)`٫i ɋ+&Q!3o%I !s!9҇qCpy C>H'aͱ˺z2nǪ:9$aSюQq)9tvKGvJfsP=  r SV~PU4~U1gWy|Rv=})C]"Rg}|.`9nԤ ̚ ºYhb>- Ҽ8¹͘9wq 18_ywΟٓv ?ȅ (t0ڇnY,0N2m2Qɑ; ^(w728`\P/̞q sKQ=E֥".vdWj჏{U{q^>;^EG4Q@(ʀidˠuX9Usߊ L="Ť)fK seC[MB DwtC\٪-ⷋL( Wz%EfRFLiMO*Bbjinj(H*DһbWw@0h,T2DWJ^Tnא4Z^R]J+Jn<>Igy^ҩX9o9]7U&Y4+04Ñ- LEiv-`J[nߕДJ@fQt2?ard2-/Qf`<[̶;xw7'Ο.m5K$hX +<IdL}z٦j5SD!jSn+weqzee,>ʧXCJaj7 Wc'ٴo+[槽{8w,CC4@V&{) IHԕ4,"ŶL?UKmV] ׁFg`;DèeЇr , I &L1q{(!CISuW"n8 mrLi!#cV'd20Z_hFvx`UR>cW5e: \<3?}vmb_}B[.ey.,^nR-aL\(ڱwnę1]mQ]vDr%#xjޅ)!ۧjȪV޹f6,\ׁ֢69!|4sC0c.d#qx7jބk't56>6+w^οy!΃}16 :fh,cUfk4٥vl C`uNW0)n5zb^zVd&{D D~yц^K5Zrs{ZQQOBgeLj@)ƛ0MkU-=Tzx=cvhoDJ"R%4Fh4h(yhV5weyUS.^ʚwaqz:$fgEP0}jD#:GmӨV㲐xΜ@?9~$;f!9Pڥ&AQuiގgժ0g:cjVۚszWu9˹uV]NGydz_\GT|R;]޾F" q0맢@$oc36 2ff_kPD7SgzW5`NEbSj97odm|zRr%~vdwY2:GnqeRn}3w,ZQ|NsؐHմ4N_aרiJi=Dڥz ./&=] *Q]ּp;6,⵲kL:˽{~l/{_㵾#;`IZL EHs&!599:Y9/aL7X7I};sзT1Xm8hoFq,44RcH%hW 3?u}'r~ƛC.|ePU讞jƼcOX {&Uk>S&tBfMɲܧGL5+0mWo<-<& >$VND/ۏfo/N,&Z/1Oi5N?8?CI,[]4R\IIY<ә=[,g9^981W(jV OcTtk 䯼>>+,yzF%ÝzU{7?7χ+Zغ9޵ᨓkF&Yع>Lhterc3ib)Jw;&=Ɠ܇~ * IDATx%0:s6+iݬl3dbPL iiS& C;k[Š5qdJƾ#0IfQ.:&&LIZRJQT @07-踆[%?͉co YnBXVZ,zeDQTVa,JhXr*eMBfFݰ cr8mM֔>QN Ot;Ż&t['m*\f+Je.DZ!d1e3Leb8z:1n aʸX7])_%28dMʉ 嗚@B xGΝ8J=sO P"pyhT-\”M,>]shH2E7ƅLpTe9 TV'iWC|K8`3& wM_BڬSp6m7]W4fKY5!iܲwێa!?EؔPʬ|`7,6]ݬ^mi;ÆׇqW~FJs~:>hylV7XfrsJbp5r|dOUrp;P#uF:޿Yӽ *tpc4K! N A4Lfds2#Sǭ3mK泲{3'M+.iiZ-dT3gTꪬ|YswNC{9|.zҤ]5]jͻFF̼i`nMδ0Bsi1)_+~,M:pj[8>oI)KV:_)!wxiƉ'9~u9IHF\2_ % ]| +V ćܳUe>2F7'6^[ tZ>P6u;X1Bi:l{[:k Ru*O5h^CLiyĐu8Msfjm`1;~4'Nwg9Mċ'K-~_0zFB潉k^,wQȼLSYٺz_"cy|%K KkbNxhκ<>7hƞ:>p¶8ً9s߹u.p n3#qc(-;N uһԼ)c~%Y_d jaaa-ug3Z1F֡ "d  ?9Dk-qtOECaaaajC|_x?ԡHck(uSӶ"{ލޓO~Of"PāRy  ݑpCǕhk@!HjtW5uqZ&Uǹ{;6R:/1c}d(J$s}(G>!x7Ȇ⓿3:B9S 0 0 0 hF 4>[d!t h[@By4j_2!GϊťJaaa1h9ÝC$D&ɯ}ʳ8@. />Y`Yƚ<˿zq>xd탤\4)Bؔ~Hc"6MKQ)kҘj2%!uDNkJZqV-9UqPFz^ } 0 0 0 JtM{>C8Rq],%F%/pܳJ)Z mDWf>kCY+e/4:;~&(/ų?ڏ|MsԽljPr |Pw1z~(h#ӌٓ;q߲;@]y- tDo4U90 0 0 0nl$ r'iHUSox@bbyN k/~b$)xw?pwOj 0 0 0 HtU$ %(3%J#^>G!޻o[hpxr]P~W<=_ܵ6&MɰWJU^HȺVF`(3*N-}AE!aaa1xTCfUuR{vmww=)QjIMEX/0%K\'k?6(9*Y_](kyf2fuLG'qgAaaaa˘3ܥ':qo޻b_vt'J%URZr|ED EDpQuB@Q_Zɘ$=ނE3Y4m=,-݋CA$-KA}s WFV6f3 UMdógo=>ω~^>5.!YcfČ+^!i:7MQ x>]j|h{0}bYnX5laaaaBʦPJ`{s|X4u6@Y5_eg"8<9bE%a,q<{3)YQO]iJ#!hZxhTV5kϪ 0 0 0 #9]!iY4,z:=ƦspZ/x'PBTőZ4]n/XUAǪJ6LP6q)>/p(aP'SIhaaacpR_DxA]\`5k/-Olg/^<5Эj|uNBf? T>$ry^˪шA*.y|.ek0ӥsR>213x$3U_wi_ݨ PsBN)3x=f24k Ox 6X1U0 0 0 0B*Se >-ԡѝt̚4WV}b4 Rߵ.4*Ao?gpJ>ShmD5'׈&`~s(*U2pqբCL|Mb&4ղ|kP!zO9 i9|GmN 4' s5TRyBbR>G9%L%Zri,aaaa>J2 !H,Tu9Ξ /=u-\+6p{ϪiyI4Rzd'={< GU!WʷQ׷G@.]r aaaaOKCP9qRd 8hx\2ݩ0߹ܽl-rN9":]d C9ް.Voy)98 σ֊lkH~2ɡYX6$3,c4m(k[=!(9B>9J.7u![+顱8ot_O#tugsdVtp3@6 0 0 0 c.>hzȥtDA1}"zVg zV1q,{Gڅ}˓BPMn P$ԃ#k߇HF Ɏdȃaaaa\`?}as$ix .h[xgvn♝0n"~jV]̎YcH!CQy/™ }5 0 0 0 F3I!LeE̎|]ew}?ƋP ^ A,jKYԜ+& Rǹ9^o/G־"nMł 0 0 0 øC!1TТL./~oP@5hAyF=ӢĬ;=5'E9O5QSG.u$sw`C̮\0 0 0 0n ͥEd+}9wM#ҨıtbW2B7X28sc[Q p)pWyb$}QKkK;W D@ 3;ѵlj+J}o 0 0 0 0n8ÝxsBQBq ZceR_ۊGYΛZѰx_dy+4Vƨk9:||PACz4h<8UjoXqci.)910S~랏3qBobʆaaaa\p;)lj0RD VVlΞwׯ'A^u`+bd+aaaaƍC2 'ć]R1\xŧ !@'2$m2mr }Q\N&3hGlve<${]< Laaaacprѹߢ `:JePό.C|\m!QQrW>qQj@ymWJʊGq_laaa A.edx%݊5d5<}Qn r{Ui,*532Z񧅲Rheo+*fSO?q⑖gaaaq܀˘+QWϾ˓6ًqx婜ԅOfEDa\ oޢmɖtؒ,[|\ےpl#_ RC` IS@C_2ڇ}!4I└4 ډX8_c;e0^هstd}?>g_笵ǚs2[hF@;rt6ٺ4P .h%I$饩icXs/Ld5ְqwZB)IcʨA.AOZڤb-X}C.}#2qe7ݯБ9tnVWk%I$Qvd2˰ֲC.c8ae.t@:E13)Y(QKB WAxƱ~$I$IG"t:>t=[_4~U+DM2By{:uƚ@ƅDe-탏1ޱz{M9dfGƶ/Er%ؒ$ILcY9Z3ٮwZGfO.t!|ZT*B_#>x{9oc%I$I^^H!tGy_{/p5Oߥe2Fg2*]+RIї htʪR0$\B~卷yf@i"r:#M&$I$Iec>Uae/sĨ vg=J'̤Z[QAIFkϹx2-׿ $I$i3pRE%ZL:<{7{_C>u<RCN:RG6z83:Ƶ.6̇i.z!e7fx3g;)uXT I$IGR8zF$|I\/g1Joq-hNl.FELxYj-_ljq‚$I$IZ ܽTYxX`SFK ;wI}&QrڽL: CLJutqx7re71tt#Λ 0E $I$a}֟|z:n-OkgdofpU\!tw_cQkAAZja5|}֡5 $I$I+(Q-vWANC׾kw\'|oR{P01 'I&i^Yz`2ҸB3cck]˂ $I$xgSJS ܵ=5.>< ]):&U=i2zEY\)I$IV>w/QP9tVfp; 7߽3=&[LJ>FꘌJǥ_-wY @AHo.iI$I:R]Nw/Tr7?g,dqt+iYnP /sN+TWdJ$IlfwiգR,_b5/|qABTgN^)D)E,*13[- K~:B1f3ng:{wFrL ,6.D$Ir3tv|tAj9h ']T =% dGB$Û~R6=n-xm\2}Dt$I$I'wBzj{Or{/w]K2Ⱦ .FG$ղUjOdP 5ޞbq$4Yn޺cϵ톱Q ~!I$INmb t%P @IC]$X*-|[KLr].׽-sI&u*I$IWQ!`%xh?|C1q] ̞ArWJvS[\.i6&+JtobӆLfal$I$I+_N Aem3K_հrL*]n V6㘾tI$I1 D3w7|?_$?xG¸(+=] -&LJ(5h,e!#Gyny;GNޒQɌVwQsH$IuZXBt Qv$,Ԩtنfb>F邫׿?0`YI$IW[BIFvν=wg}wP"eJ􅞞U-_(DM2J:ί®̭^˿p텗Gd*3j;h+$I$IV 5feL2ڂȡEO:_aӆSa^S?~3O2UR6fTWJP&(p+6MHp<^_Z;G K$I@C A.k̘(}pE헼s7 0LkV Ѿw$I$Icc2aѺei.޺xwOyv|zI[AV"  \[}EmltCn2:6 Uٷy$I$I+XT$;w>pw?g~ǵ-7r{4i:i-=PEø T$I$ݱUxƇ oK_?ʨB|."̶KBr :MބPhqU=i]q#oum T4P7gG1N$I$IE#(Ɉc'.ש0T&F\u:g?7_/&CMfR/TjZMtbD_GzYD3Nj@GYYᮛ\?O/Zg{R[j{E Sꆟ`Uv\ ޾םkW柭ձ|cI$I$egLvJ%E6ts{o|S{JV:ƭV IuT$KJ ٷۘdd-eO?NVOky?Xӊ1[$I$iJZ&g))CI"+P)PJ ϿWs7n㼫}?x~3O5 [ &jQ$Qۑ$+oKY|ka.9g(ܰ`.,A jQɨ4@cFcd8on/fI_o]$I$ILWbG&Jt>r%ʧyO2¸ƕ.bZVWFt$;̠#kY o/.̝6-b}(I$I5OP-$c__"tdVj)d!N<nkma9mT;rƣrCySTvԠ't$]&} /I$I:^DL%›FkcfIJtC $I$IfRNڈҊۭJ6ɣ5ҷ3|?F v h]VtiA軶9K@v5W=B؝HF$4%I$IǍi:dNU:%زT`(뗦nKY+Q 9-p[sOf϶ٶcw?̷z{~[|MfR ln~t"ٮ=]׵1(FĖ[ngeF■+IQ-vVM[9휳e+ϝ5Zj0:%֦$I$I^\bnchP#3ɠi|o_,?TVfd+pǽWknemwtu:&Q#$I~~yt=c}7jѭe{̨cfZ#)t@Zv?ViAsU$zdnpԶ<!~Qݘ8Ș=}SC:P[gQi\_=M6nv\YasO;u1Cp}B5.A$I$鈲23?z4J]m%h||~]|K3vuP(QٵmxcA]Xߔ$I$tZEciȨ %Ii+}$І4|6"x޸~4 `g2| z} iS\5Iؼ6o8e~$. nqjG$P ߫юj2]ۦ2$I$Iz -7J:J%fB"cnq%Wnޟ1Ceu$l1޳-\i!MYbUl4&]$I/($OdqvzݓcZJ5l1.oSarurpaHI9n`vt,A2?Nsg^xš6%ř4384w1?#e$I$I:[_TkCCu ;)߃f#_/nvd,F|V % Dd2{Rm^;/իWO -54%I$,S{h G났!vTnzLlˢsX=\gXc.$w>G0iwm@'z RJ$I$ V2|ѳef~;n'P,䃑2*BA,:VBd>їؐd>շy)$Ievb:6-el~I^N+13:V$I$It(w+0"L2goe~ߗ!==l$}BNz0fYJD=3$d2l;d e%$I$8y-A<LoiZ0f336li{ԝ$I$I$--2(F $<3wgԾ;:::9/QiEUQaBtk*>֏Np+|II$Iv5"smR csu$I$I$y[q*1RaOc?w71Q)Y}J0-7FFV( )eDJݗͻcn;I$IҬ4~hiwB jB:K$I$Ifnʡ_HaO]|߼SLp$I$I$I$IdN $m,l aJdi]&Ln>'{tY<Z[nRLjTvo.]gh0!XغU*e%I$I$I$I$;-49b:Jěρ}^w=] f o;/{gr"Qϸu$I$I$I$Iw:ԐBNSi7c˫|;}_':/,ݾ[0@M pg^|P[$Ε$I$I$I$I%uS}7w=G$5t52t65f#nm\lD@mAI7ܿع5qe\yƂi9I$I$I$I$IG;Q\oG'OQTK? Mf&%F$=PYO;^Õ\Uk&kʱ%I$I$I$I$ C[2?>`wŸ:t+5sr3goFƤ3N^I$I$I$I$I: Țu^t mh|2 ;I$I$I$I$IG;CWnrޥ&`v`I$I$I$I$Iz~k&w#%F#/|5Z-z p*px@"t'3!)6HKjqG.X|2mh.욌KۂֳT`avF 9?%=Qۂִ4372mv;ӐDS˹@"6-hMW\+Jy0\15n ZWklv[kR_tnmAwHT ,̍n.L7X;dzߔK8IkB)㽑<30[t)g*jW\wFONElIfn w3 ep.7ёW-ȯ?VmQǍw_r\(y7z?7xW(-;sN4FPʅR'jom`פ^U6^rMLVH2s\h%- $TrN{fֲ2437z˗wjTf u;#Y{dVvVa8sD8mv[`=&֞phu 񁏲qrn_FdZfF,L4X~xp\.ӷo,uMiu|%/uM5؃nR+jkjFR%o$(5xI1Zh2-x4z[|aJfF޷]'AmrD8}G>CùD//&'FO^l$/*{@M+m|Ǥ$e!A84{tRǵg[^m&97E9[p|*3H(rCGG{ tcm9V O5Ker~i6{m…lh*3xH bf%}{F_[]m7mG|սcvNծ;KZ7ʅR4{S+y&>TTr#[ՑQ, IΑS5-ZeoCk%/u?_0*(VKTXTo$(?w#p ў=ɬQd6}ƒY+gf.(˅\ۗ ҷo,큹Ԇqy#A)}ƒǖu'Q- [T./|'5-킚/:5`x^@\*3y*Jy A$TrPq7.pV-ȵކגKtҷo,-̍n.Lk|zږfjfPp5<{kx >ƶ>Ss~&ȝuг\#{m 3^%}Z?IJ;Ų|߫hlS@"h#!2o$(b:F__axedv7~i׆=w6߯wjq9-$7\ۧ1 ϫy6WJV`Xe8߮<_{/1/Cz8{~ir> ~?QmE'}c_MFHH2sV{yuɵmymVc1Uj?yxMR\{O2kpNT.?~xQcxHߔW\S[Yω~OԊz+j³;smxZeocsqevct-mv;ކ14yGHT4뽳ՆV jӛD/}h@%}n^yHF4V|uWc"Ɍpz7o 8v vMݎ)nbB)D>'і|?1471K%}@ bCj+ 2);D'UplPʫy7T3~0Fqܤ qG?yxZܑy#eA\ƿ= zo [XP?EۡpN 2]CY gH@gmi:Ktz?'w \ Nw]@+ bgǴnyiBTB)o$(.Ic=ɬ;l Jn^yaݾd_}DCR|HƘᠶ{ahnntSu(8o$(5+K3sZ(Zz?wnz%QZGu.$AX|a⠶1^\~0WиV -&@ pdFᠶLxh|/g)a:试qW\[Nᴀ-̡* '=fEVS-|4Qܑ"8i@rﶼ6sgl`Ua@G[Zvؚ39-qV <*AjPA8 f>AU7 z5 w T; *g N )p6Z27D'= Vhʑކp D0ީp.28f圖Ϸ4ը8!W-?K9)'~OAPYY N2j YVfinA<+a81𫨎0U{O2{kx>x⋕-"QjlkN {bv`o9zD8{kx5<{kx+ m:4"wP}ËO95* t0\+ $@"I[=ɬw7WiU 'z,L,̍6v:uݿZφXY*3S[_hH@ hzݥѵf ZI *c;,/$ņ6[mhH@^l8SAZeo'/6[m'%V1>f[^գ-ОVrË~jd6@ wJU*Xѫ=О IqUUwF*>~yWrJ0 dFW;r`,yQ{O2wfF_rp[f^SuS[}@p lks͛ĭ=?,U.;A8wnD/$Ň>ftHߔ>`NRĭ>~y7/y#Rz2]mhV-{]S)}&& wPݖfCR|HvA@]~0s*{Z~kB)P*s9UyA鴿_-]={F7?kZeo&`i~' $.43 kP;VjC|IJrҺ7S3X\ >_*pMzl}5at{O2_`_}_] 'Q ?vA46werW*>Ve`37{~F:cpZ,~g0g]$y.~rP߭l3AVkϯ pPjCB\SbY>ﲨ֦|W:DB)SŲ\--%e/|w{*&au׽?v' IDATS޷bYOkE~ew^o$iՏqeEPb pwuA~vM1޺kAz|Z2kDVCUsSᜭckҟh4i o$(2sZ#NNm|;~q + N{k:'.az9¤ ^/VRz-jz8=% ϊ_E|%}&pZ,{#z $I2#%Ewq|U [XPs)7߄/:ZeqJQ^WPz-2'.x#>OՏc34èB׿ Ҝ7m(5r s8g18RB chٔzva7Xo>|aj=YhO^n]ϋV(Ni $ņ֞NAmc0-ӷo,>~yMׇKtx`q*o$(mÅddCJGKL)sLZK NG2R^Jl3^R N׷MYS&`7iA\(뿛>Y{Wgw'd x=l*#ʅRbe`\s\bC&)7~mXwЌ%iAmw[^-Xp<峫T0cCޔ>bW\`1J؂1tkC?mN͌{R}-6Eg+Zeoc[^vg7͑ O7;f|.=1jeoiP-?KyLHaZeoc˻Ķ]EzËLX[H Jʅҟw3ouHӽz3,H1+^cɟn.LӜ@"vwᶼ6/X6R⽳>q(+U|| ^4jDy^kO{&n׊@"O}zfN;WkT3WX}chkzBN\\^}u#=юI3_>uf N *}ŕib'HPR Ij߲\(k?m(b7PN85+3ZDS\*#ٮut8s}݅U̞8q:]15152]&liIf4`C!)>ŕi!o6-r7O/̍2a ;iIfc1{d\(W|uětsVV&Ih]Ϣ\SCqGRY⹕(GA8 /_̴rxROC;&Ԝ2 П7p@my}Mao&=XV.?I NgJQ0F"ɌU#jM:+7RE@'=e8=lH NԨjv ڻYύRYԎ+_釉)eшI}$>A8I Nw-r_jhl욤Ful*]Ɯ4~FR і'.(n)ΑmymvO 7୆130vB<#DOOwdte!)>?;fYr+_W-[a8ؐa/Vfq<=$twTޓv^`Nԏt'3K_ҙ)AfeѳzT+TCЃfrxFʤ>I|UOٽ/Hf|o˻ܗp{#O0iE\9evo쎊!`v1un~qӏּ&I6\4acPU+g+ӄ[a8~O4$Ň Α)='S^{O2eB=sNЉ~OiϨw:?W_ a˻LH=eR<P]|aU'"a/~ickI ,~i<$ņZu@"N߾DoWmkK3_rMchCۃЂ@{XQZ.pZ+i轳Մjmym;Z!8i XGu vMm^r'x=Kt:GbX.dNfIO^d#%}n^yeS ʉaz~ih2nI=p:ݾTf G)ppiIf>~㔿_H`@?4Nf>f[^7]q'/{Ki(7K9:D8=o||2`jqGfv :GnJNZ W:&n1}֣\gBH~Zf4NHeZr` ' ܏Ifq׎raQ@_4Hj/VftjG`f9sNW5PN) tknd_#:GT $>A8؁sTdRiiz߸P'mRIcN1Gq&Sv*T0Z.ކNТ,\('Zu絑D'N.v<jfxBS?g@z :?D@ o$( |Ʃ׌2O<}A?*230bҘpT*-XYo$(u^]ʂ1{Кr}8'ć37yOl"`\\=j9i h]v 3)zPyTҟծ;f$|FcӁD֟ʟ%}N*{Ym8=ɬqgv9w#tvߖfć3f|4*M½RINi'~iZ?B)G@Ύpڮ׵u vMZy>`437jv;TzߔIO Jn^ydv;hΟ1V9BR|Mj5V{D'y;`wnSD/$Ň>f[^MeܜV8Wjyu+qUp:ҝ̄ؐ9(cQR@"V?0N;Ojjzgv.j#~bt*3m_v)$Ň:G,L؃7:M~i1w?%=QB뇘Oeik*{9G0a8Ed,vwِW]UɪJaAmZ,o MPʗ x4W8 Xur5)@m9'5|/?XX{*3I#Z@Ƶ$?0-P*/W [Ϭ<(ŕir;)|=)N9|5Y!HӁD8m0BR|%}jv_|=ӊar?ʯkZOJ7_렶`a;y :j3p0S;!u/_1-GwC5;Ojjz_ nᴕTh^*3c!~bgZqWK\#1_ g7U^[P;c;s7F",@ĥ}3Y{w2f6{PuZ堶[-ȵ6޷bYV~fqB) l$=Y*o$(0i}=* Gy呓&€7a0Dnt vMWB)ov[ؔΑ)u5g{pn_W\3-oBH %Uzv~Bs{0Zl{O2˜`.p:t'3j|2m#;Je/~2s3ӪG07TCD8m@Q@ͦh Շ}G*3ad ҝZGNgHPR+3V )_ֳnX:(kO{`' ϳILTrp56q& :P}u5+.0D;&:^Ӊ#ckr[^X֗^onGd14Z˟/LXeJ{x]2ş˹Wv7FlF| L6V;raFqF 33Am\(RtP:p սؓ2b#p <Y0ƿ׿0#KZ9?nA{=ɬUB-HPX=pTrn5~$Uk gRv^G3˲h#$ŇZ5@c"g2fḯ؛7:G̪"?N"Q#秌q?X[ N[)Ju~hT~i<$ŇnHgv˗w|,0D'Hjfl3a8'g'Չ~OKt̨T3uV-}E'|3+NH1Iמx=q<*=t^ږgzI3_g>VR.sڼpch8D[almy}VϺ77S{2pt'3j)kHw2 x^He NbCfL:)TmW{קHbWQO8x6$ņHe/a7F.;! :_-ȯ?f(|2mQ)9XKtҸޕ=@YVNzQS`}E@G[w^o$ &awKҘab ziQpx'O?M@OMB-m^Z\(Ų}ej,+?S<}Ko$ DݿZ *%0ABRlhsŕiw*ΑG h%/?[y'+罳wlTt[^md\(兂 Š`aB{~i;ۀL{Fw?򴔉Γ?yxMRĨRGڔ/VۨwR^z3tG<]HDۅ@-m~QHPFaVZJEqga 's:]#_4;ex#A)ҝq>]ϊDSƶ>3o[*p H=F?[.pqnY_f@,Qn3eݭgYiO]`Bv`Q.s\(W|uWcAk &x`פF/uo*/\-myu+/~O&[s)cJa:xɕq:S 'e^Gb'g z(JyA*}o$ 815437jv;Q.ֳj, pmymv[^MH 1V]̸^ZRch0}Nx4$NVu=m?ܴ$}Ȍ/,$ŇR˹ɟFR)!N(Qv [fwe*(:N( ʢQDۅ2NHҸ~⎼F ֞6_Lw vMq_O+#^rM $iW|eW\O>o8(:\e>08϶6/ʫ[5H#Ɍ owu8'Vk: $!)6d8̧zѾR뽳סS{/ڡ:\XÊᷓ*{kO{kO{!)>?;fLH -pi߉7F4chkb :Q_T\e%} [>xP]jԨu(pP" gV|y *{JZckR X81DgF'J=ڰ\ieRJ1vIsO]0b"=%iLw<T&*g&~yW\(D89r~n=0ձBq݇ V 5(/Vf;>{Gvw6Q=ɬ].a1 F=MƟh>v[U6^rtX{?ZD`פV@1r7rSs.n{g3W8Y[ 3u^WpZVj^Lerv<*I=+Nch*Кr^vF7D8-~gt3h Ni󓕎UڏJ IDATb@ƩU0+^3Dd][pZdBgvG9|U'IL8ʘwfsBx[^-Jy;N xAI hv]TK߾dUגSY)hyö΍.0c1_v$> f}&jqG>w#GpJ }4sM0ct?/_ٖgX}Up>7ZCY*33b&tH'/=lx鹉LxJ݌uZQC{VB)/AVAO]I!5Hw2E`t;⴪W_ 6iÛ3A;8JM * #^_m V^F݃XV8ڰQӆ:v t6>OF+Zeo'/oX2-oC3g@YaCKݯ|yg|//;ὰ N/̍Z.` fVRv/_yl}+C"Ɍpv6Pm,?XX~0ޓ?܇_݀Vz>rb_?sNKu vMP NeMmu^SS٥}'=v7v*d+\Ju^_-&bRѫ\(孴}l_A6\-=Q;.l 3' |RW_Ov?$>POov;R{nZZS W.êPÈP.Yc0.ЍZYOYsW.aPmymN׸ӵulE?{ֵ |KD6Td6GIL(q3Mh#ˣC'/鬧R;o(@N7KX$〶z%v@o5Z3:鱭ekkZ2u^@g8N:=7׎yuz _[y|¾XJD(|m^\d_/I!NhW<]꿛V=ޯ+ٵd\nG ~jEBDMdO7J{hcR;xwҵ ,6=Ôs10NT杍DNhOO/W *ﱀ}D_ܺ:R~/@kkv̱)3N;C7j ËXj>)+5ZO9y$"J'Ag8 s8|~x$H%&jgY-Qh3y0 hD]@[=Q=b+YۍRw8wQ9z9S|V~mENADaXCyB"k1;?7) wt){BZ81b%Cec _^jpT#yK)q4~ 9̊?=7_']`/ |zͬ(2$h\VF{;P Q;;?7m)pkኩ]oE1ޏNz螚Y-XT3%*GXpTUv--HlEYrޤkbv9Wзb@1o+cQ* V@IL ]H"$Ew&tAsd2Ԋ{)ddgxMί(4roE Q)_Ɇ{Ax(,dgEZ k*8Vp2qe\A`lVH(v) yıuwuF ̩E`^L6wKKD QS]\AO|(1b%Ce#10:Di`+jo(]G7 /]Ya0 e[w~6݇:uwu{R=tzqѩo/(㬝Zj q 2J  `?9D~=V,8}cFߦfVK'x-xKzEV<(yMĦԎ$z^TͫՒavV6,RXv -+jCEw맞P)o241ȿiCSPb8T'ÕD*<hz% JTWnȪV]`2R cxv"PzT3A9B8Q-QEv Y-Qb#JFԊ2)ݛ@ԮY-xT`CCdP8S9ƶvZ00cj't ph; ]TH+8d Hp ( #K^6yMA[L__Cv~o" B3o $,La(_slwÞH^K:7?ұ\M ޤ.2P.P0ZF*>Op4&yd#شORzs~~ЄvUX+Ǟ8:8|vDF*8nF;+dvCwPI%{AY'Sz9/~Q?7wJ;(wy븎Lo,,Ob%C>m͐O{rU sd NȨ ` ׌7ثEuWiCkOEqY?b%CeMpӆ&]ԗ;_ BVQH8|(0À@ZVlS(- !U^@ ݂yg_adAL(Hzʇ>,b8EH[IjSOP 0fd֡IDP(ؓM+,j "I|R(t:2 GLyr Q '=JyBڕ>mpR.G)!3Z|$DaN(52h0RN;;zZ_ ~ A1`'"SÕsN<[)t{ Yv˦N,@Lқ  jfhZ$TS!@yN82a<]2]_V50ջaZTN{o(EjfuwupbS@~jgޞ80Y?"T Wt#(v8:b8i[wVgZܳ䂓h+__tXɰzAOs7Ыz%f 1T 'SȩfVKOwF12uSCY+yU-p$"x,nTwHlpBϩ3P*C C MLk֙9-=K.}4Ev:'^;nh<>mhgSsvKd`X[/*Ji^ml= J:ɶiWL(WMux4ב I|kc"&nHIȴX(t#x ݘHlSsb*p 2дܹ{ vkwNg lig+=}gqgEukx=WG ~jG|e˶ ^[ qHAju JHl0!i/6* ÁbNB8CcDޫ#y=S'Y+uЮ.K7#1̿0(P;ǜ\o+r֝ՙ7?"p+PY`@C[\1(,@D\AO|èLamxWU~='=8;\g򩹓_^zL%֣Dd+b+rr1|kɸLyԹ羭oJ|1r>mp27=ٵd -؍T>ag*f9g輊1ɿ]zv *f$}.kv'9$@D\A3?(;q-XO@fGՎɏg! 14M4g{i}vtm㐝Ku GbXv-^Z} p LR (,GbjfTbjYd9cMm>mJ `v @^TxA@ AzuRE) S3oy3*pvİ 2Ϋ6tɶ6:5^8/Ϯ%g+h=(F(?>OhYrb1\حW&+zrc[/cc^T'$ +ɶ sΈɨ@SnS܌(bgxbhp?h^9WзgϏ]=q˧j>"rЮ=g;V$<ƽ3- }Htgy2@ldb8~X2āc (^Vc14:sтּx~}OO/SÁocY< '0 yZ+;"kx+kfTbj cNR3Z28vqw,+ Dy>0vy_{zC5`+l,mhZ @v5Zz3ɏKuN(S]b8i Jp(;?7F*03|a3t#Ot3e @ =৓{F>8oVN3?CAF>;5N;=R=}d+V{txi>. jhw[;|)!MoTw7@ -+/`q#És|xn@1NR W3Z2~ ZIgA yaIcזिX |7{ 1ihBM݇dW3J9r8Ku޸wX>(#OtV&}/7w{~3tvi9_Н4q|]{k@NHŃ6 ;h5Zu 2A~D sR=Ǯ9@CWy#6at?MhW+<ﱀ=P GD68]Kƛ==J79\gD$ Sz`=6i+p98br#p'| @VǮ\ v '(  Gb#Ϗ]wߐӿ4gvD7|~x$fe7ث5&x@p .%qtlEy5xӡ< *qbhkl/Ϣ{Zn G7ث>Oj),#Q1vF{5(Af89L= yaw;@dCQchObÆ5zAS<(xN\A9CWղ焜y1Bѵ&`7X{=5Zz`zzEЮODpmo`<^ZE|P Gve qDU!Okƽ3p~FѴ'}Ƿ3d;^VAa0`6٩icTf3_WtoWcT!%{AA|~5KZT>QL>1wfb8*(<8pV>mh2xé>mhd6|~A1!}$b8z5oЯ·Â08=0fɮ%vS}//=. ͝R/'N|BHKu TgmQE&ȖAA=8ږr7 ٵd}t"! ➞^T} IDATP68<Bmpr{k rA1!j@' y_8~͸ﺽ>kg+ 8l p8JqoW278 ގRm4(x*K$Ѷ扎Q3}}Y&t%o(C(0LzEO06)]kTwO68]Kyb8b'"S"&Yћ|@.:8m58;L=S}Ѐ)t4~CA?<$nM ǵT#[ASt * ߧ&lZ2uw],8K 148]caKuÁ/bTprA11"hQv̩7dgq;3J~{ B(Hή%gpf1rܙ('+M;b|ɶhK&Tw&*c͢RY7Dj6HfVK q*T"WTbh: q';JUQ,V2{zzŃo=?ѧ7^Z^Z5RӋ.)mCg8I ݐ'{,, $@f&ٌ7Xo3ȋ2 jԧ NWyq׻)bw;ɴ`Kx?vՉOMÁ(-( PQtN ~* \pi~\A3(,G R)juVt 8 ieaW9g4U T>A)8oW%oliC$ '"S#X '3R12@KͬXUyAu(;k8&TU'ԧ M~xa/>w_s}KG.(x#V˹.CG;J`xrSQ*Td]y & #/]=2oyqQsbj]cj.Cy>qtsOn6F9,1Bf4[x)oWC !Ƒ. q* f1Rċ7ѫU]hnP G7ث\>5wKs}/ǿcd]>?v gp @6wtk0,{>?v ݮؔC[>4o|-i2R9 V*].i |;G^){x[-"ƀPMeЎJ`ro~֝JEQT!ck *;ݟQ{ڄg%l;T>A-ICi@Q,BʹN)98]=W9W)2:u6_t2t0T1 ^g1ߑ{A irfgchjZe+J1cGdchl*vPX>穙RzE_*[/R)3b8TpDG`svf+ 9)1N7ɮ%(z7%JZQ{6x?Q̝Ч G^L$fۻ ch󄜶dCP,&1p===={,os=@1NÁjfD0ե{&oF!Qbx4|~1P@yndO(=/3о==HZ0Yd($']G@36g`<RP{.fVK0Tw叄ᅱԮyho]RG;jq^NйVFacb#B5bhp*E7'3;Zd*C1Zx#RK9ٺ:"roݥ{LXbn4m >YKyBS,Kdܹoꖡ *:B3RqpiI8tCkACUJj'0 \gVQI #1XIyB4ΘJBQ=pV΋-0mw[ZJw8X4dגqAQh&KE= ܳ14BWѹTwχƮ.=KEQF>.*ЌiJC2RjjLQ5٩9T'tO~=pNQ:/ BDjVC< dGeαM羋rTxȉޠ_9!ာvu՝v&Kɑ˧5v v{5jEp~k¶Kuᅱ@[/R8V0r؉BD-uU,V2 P\.=>yÁ(8hGsGGyB @5r ̩H"CA y?b=.3rԜ՛Q6pic ာv[w;ܫ* /EtʤOЉE̝ܷ_ސBaa@T{ ဴ[XRdȿ}JSw!JyB#Ou1RչCןoP92PTԓ('yC㺭3;$:) <` ˝E8F>0TbVޡahBJ8!Cw14ȌrvCĺ;]ѩs`? h6Dp4 ;\'?fPrS ^'c{y nhE?4- j,hЮv{^F% tYl'SKSHJ|p";:\>5G1nY_3%(. VGb'v1K>z""i焔nc+8.УS,@Tﻈ0f1Rq4yCDp X}색w_$jF>cp"Gp{K?[ Xrw8E&eI*Á//=hI20!m`<;ѯ?&,Z (ruZq˧}EԄe(Ek~`<QH);P.PzA/s]aa>mhR8[)gܹQؘR="vDeS!RoW;mQXp/]G mY-="]xđ{\=f!g//=aԧ M6QD$9qt pV`õZ1 ` p/lR "Y_^z$ Hx DԹ"܇jfDX[yBX#r v~^\>5Gub3RgÁh7욋5q Gb7>O<ΔW4}l &C :R=1# #jtȧ{B#O_{tm?m=/ipc83*]'^;>@ "uk&R=o"`;uxM$,+r .}WOԊ։! NLd>-.sKy *u)oإwhB}UyBCןo iCN\ 8{Bp[+mhRy (=+s~u֫Xuku6,`zk]KE,hЄvuD$~S$x$<y)v,[/R1 oW<]'Ezb%^.7b }ٵd|{i}Vĝp <B9?\AXx4}O~=vd#O?݈?ƽ~!8vf #Ocy[R9_9CWղ8{KuU'Di|yZ7g_5_nܧ~;}rSʕxt# {F @ǮZ ^jW|X?A.K볲5fڕ"i ,%o]!ѣZC)(khD$&}s`<c*rϒ 7oW''"SԮ YōijN|p"8=];k@tk/s8ey|iص]]ghBjun#Otr7w>ˣS߮e`<)ٱh1 bh17!yP̧>O䗗WYNXӆ&{zz},^K a3H5j(؈m޸wA14#n/1I;ޝ[~(=6 ɮ%g+2ngݎR}?zp *d ٵdjāHOa\O%pz.F,o+=}g1,Ѝ'[LOS,t _PK~2,b=={b:l5x"2%KNF*Ӌ}Ф] ; N]ϣX3OԝYwTkئi; %<ޚY-m/ώ\>5{,VB o"K 8P 6F|z;XaSsW8_gN MhW_nw%_V8C1'{jWxvYټ׫>OH$:c3'8&sR1ڪsnEbuzc`F1X]&ŝ"&\ՎjQٞ/_s&CUi}Rz9_;Ik@1o.*L|JO+4uk;Rz/k2pYbhl*EQitd|gE)bbgYvN\CUEs^HߣǯQyOdw9x p&@UNIiH|"GbbڪJM9CWA{=ݐ^yp_q +9v zr):$P;_P0lgg^VO+{pM8Av-z5(ā5mtV4,V2K볲u~:'(CB]#8og1KzEr‚"8k F Dٵd\yBTT YqЄvU|/(vnYdʹ^$.17R _o~MSO0 /fh-kEw;?ϴ=v_c7`-+ŴlۑR=71'wx! Ԯ?Ia>θ ry(f#h[^e* |YY[L>AQGOu]hHpQ|"Sڅ^ĵnCg8fw>Nr|hFͬ6My }qrcWګjfVK :Nnc4'XXVEA2ʮ%)^3%'tkgsa Y=֍ Q>ۧ N"A 2`G.=+5R5w6@>.8}S!:[3QgEi]yBͬb7^}b8plE<]i{dƻeSw="Rո:X'C2UˊG8`Z2n^{"aX[/F|X 14 _n78u {>cnlŵ"fض|\e7uk6⏦[>O.({Y|l[wWgp_N|M2~e|vpm/Z^ZEV7,`hTtT VmbhPI(S7cvDq^ϩ.cmAԎp#Okx+_>Xe}sرhUp_+x~r2]5Zz3H8K9W+ug رv|tnXx4M9hbhA9Wѿ 1@YkMuM)J=дi]KƟzxf_?x~E;Nwfnv$+ܿ*v%F*D]KƑN 8@(>jg~9pY-m-ͮ%ظ vcRC!dגq4rhYjfZ*+ Qj) {zzfVKfӋOo;ÁZ: T@]-C XŃxC'ʹ{g' F>;5{(Dng{;m]ʃ 'fG(TC<,V2XAq<|:}<;T{T R߹o@mk0) IDAT8 :N'lneGX\ڧfVKCPN&?0N{L9_QЙntQjG'AU%Z2Zub7G]aSm,,kŒT>ѭG}Z>&D4Q]Ky(lĬi/]{vWfmPV}:d>8 ? @4/_癘E-q"};S-!:v2G ش@(wاb8EQ=} HluV'"NuZ j ^ndPV}:%C2]2\Hh6 [wVgD߀&=n˃GA x||GBlQyN&\AGaPbV>\ٵdʉ`9$r'ʹv.sO~l{eW3%ZccayG*! .pReגq*T9|k87}r'k (hd\AXߝfVKOo;; ٵd\,k3\zQ I'I{ әmL ѩoZ{"na UM* [whs_\A_{, ɰ؍d>q3R  %‹ȶ%jf&bhp==HmbhXX) b=oX7lMpbNjTzEGݾvjƮiC~PiZ2s,'8(N8k`ʹN5ig~Y3Dd148 аugu+Jq#Ol/L*CE5jf@g׮HG' [@8,~ ЮzGtgʮ%ԯ}$A$(; X<iD)hsvxE4d 4ɋEkfVKx:78]i{+Q$i+<| f7aќ64R:9hE_"o"|2x6dגqsAЀ⠔35b8EQ3.>OD[>+*kAd`<{ɿ]z,KT>]݁ XfVK[wVgpl*t%'5Z%C"c`<uP׉LX+I٧7Q]F,V2o=Vf'OY]j(k֝ѭX15 +& Ь  "C"c574:N~dגqVؼSn fI|ybEE~ߦfVKo~0OXXF,\߀BA N%C Me(5ԙc{lYgMS.~f-vvDg q ?pu/G=x1 +Y-|FyY8Yu ԓSn DAjN&MAEÁh64~V8E!\s60'޸wEq`Z2΂x_'C)u\*W@v5ZaW Z2QcVFѮY-"zjEq,rPrc]Cd^`#E`#O,==cDAFٵdb\CAtnXW;x&g4+~֝? jV<'ќ'ڹpYdF>m{׎ 5ZI^签1 GbSn[3e>5F*0np :8qJN,kxv-ĆϏ]S}1Qg+=}gs Rt{i}v`<<]os'%Ocʱ40 MhWϏ]b+;?R)V^&)5Zwg!;^Ƿ@x70[3%; ?c1ՙ *b!14Ȉ5/_G>:,{\q_*ѣ?Q<]KƭE՘ 'jW(?fI1([/F|>OhSs{#\>574]}׷l>DfYof%v$W[CJ*lY|HЮ p?W{zz=}gQ-]XQ?OD5F*x7Nb+vog+ܳBv-wBK6  "VD2|~ydиxԧ M=6i68jgR=f`^b Vۧ M\hYd6l,zT >+YoYE @f,ާ MO|8ӋQfɮUs jp?j),wu<̮%''"Sv,V2lo{#z]V[wVgrϒ #?{ӎrΐv˞^={`n59'?ʴ#N\A~Ew{lYpaŶ[wWgNDb+Fj7oh ^-x"2է NR^T9Wsϒ Fj75&)H惈RVӋSt64 ֜,V2ncG ];[];Jf{i} s=K. _8~s>VtE M_ͬ6M#Yy \Hc{7A!^rWyEy] Gn'rϒ X},V2}>ϳN{~ˊ!޺:c#7:mXt2!/ ,;ź]ǿGnbEx&.ӧ N:/+H VR< 9C7RaI;3NJjH/7wۻ ,Jɧ+(묤AS:rA ZV  Nm3ۥ{^?\ joV):ߑPQ-ݐXN)׿C#1ĵ,V2o~jQDD{zޙV'Xl`[}xakv,X 촒?set{I%_%9&"&|AX#ڳ 2r`N˺z%_D'/{,mY⾘}"5#=Zo. Q>8ɧHOo;cu:bw3`:yH6steגn]17ث ծp<{OMQ>3"P}x#ڵNDC8/(E_M(xuo|'X>m@=&=?T'$JNMKH~ExkgnK1 SlWn Nԓ#GԊnVzǷp/yc`&ěدMc{7A @f,P\A'N y"fVK|Ag]ͩpbQ oOO/v;UE^Z}`:q8Mf1#am^#Ni|Lc ^ zp=J,9C<`kޠ_;}n,V2f):`ۊAޢ' _=o~|}EA7vt?<[iy[rY1ݯ `;AR=piENIvabj3t숤$A7wP1];(D/F1~}urlchG64yO~=iD ;uac}V'Q4@ @WcGwޠ_sqt{mߟOgsn)ؚ?(hyufk1oǿG1L"F*`+"tkǷEp%n45&M\|0wo?mYrxoAs4Wv~ HP ôh0:ux${o_3bbsVl!bV[^݀.;鸗50aPsQE b8f򩹡 j^O&\AXx41r\c·N,e5w1DQ^ E+^Q}IcfVK q(å{}E8b%Uz[/ugu楾sݶZǎ}~E^"xnqNQ]+==uTأONQ|"K"u{N~yƮL\ c}p1[/>" 3|~14cgZg8fVK/_Ϯ%Ǯ5Ǯ=6)cSS}veD$FaWHjf~Ń ;8r:o]/cb%<{\ќ7ث 5`>.{0ۓ]KƷǷyfb8:0|vj]7|~ZDdj{i}6\O<]Ae{T>'x)&T;ɮ%{Єvu5ByBSn?c]IY,V2OyBgc78F.4ק M 5xiOYtQ` Qͬv~o^y}yMT>ƽ3uc{7.~Sb%U3\#XNq#UϒܳB9Wy..G ~pӋ[w~1 |Ǯ|b#hy?:wJ!TcGЩ=fYo݄ qp @xdQ;?`G633tK:ٵd|{i}EqyGUEY*;KgkCU㡦~9Eۻ;T>!` 5O7PfY-m]ɮ%40 GbN/Xg _ p v%F.4>Oh` 3JY:``:Gy_{l>>+zQ$8CcQOMN9hԹ,gGMc`1\Azޙ>mhrGs=&z5EjfTt=ʯ5Z* ]QZj% OXb+\1ƮծTw1¥{@T  X,V2GӼ:D@,V2;+d\Ccר6gyb8Ń}~Ss膚Y-m/ϦWychF.|jnOO/枽XܑDGcߦ|bYdk8q4"_ (Bܳ@D]KƷȴkOӆ&kf,ܹO0n`<<]x$~ "1Rq#ĆϏ]SF*غL9Wyj.30 Gb5Z2R͝n,V2xYJ68I8ʹp "Z2,MhWjWdI24,V2[w~#S.:)J=?bl&*f9gF*X^kNTh3p)~t׳kX" @V5ZIucIyB㞘Do\A7L%olJ4~^UKtwo~ԩ?k@&c[>BVH[ ǘJ@tGs`=I Z2>+ "wl޴iME^R}c,V2K볼p#O$at`<Ԝ;4T>,vtDyAdG5Z=8ɮ%{Єvu5hGͬdגqcm/cC&qpR/$z`zv-}|;>>] F*x`:q#X,x${4m+co~4ڻ85 !*2ڂЕȸN0x ^Tki] 459ut5ŧAXCTUSVAYg(겇guG^||oT΅nxWp?sd@&>;Xjj-\OF.;cfkgR(fͮM/jƝ雭-#C际ōJ \pU` fpw㋥ō^IMgB7qyU ޳ݸ H$=,}er8E_{N\vp?CT7>Gͮ$B7qy4JSV)_,8 pR(fAnnԽٕ=\lq}\ʹ-\oݸxYpýniYٙXqu*zZ~=q5JnRln8Mp$n_,̯ C7^P?Y`кqrP̆n8 pQѹř{8գhos{QC@P={*3;#B1[rSfW&|8] EJT:1;Cݸ||2zzkRÙM;kkQYsspò#hIDAT>?NMgn-~Yro,ts{NZPW-5JϮI?In狛%kW~ăp7<'{?q}XQ)lי1\ovV/'3oM/tNJ-U=*Q9XJfɛtˢR(f_Q9"0 wZ_,,̯&3iѓ;kݸ pUWk=}c_ Iλ0qUrͿվG9Ϩ1\`գZu.F;XrيJD8z5JQC\dpD=*Z~nd@&uos{Uaq\Owx(N}?2*ni׎f/Q ] ݸzZ)SfWFC7]Wݸ:x=} ?!ݸjj%d ?J${'o~ށtu2tkWq(w~~:LJjr8:ٽgp?ӱ|g!/5d@H*H$}éOnf8i}^lNnD"q\mD'o~ lx3(Okѹř]zZ~os{znt|Vʹ}w1%P) 7q7xJ螫&nU l3r.niYٙ.ni_FO\M1%7ۇ鯿KMgfGҡ.zTmn/; W1%(?թ=ށMq(^nj-Mx=ށ{+wӏC\Tݸ{zR(fC IZp&tE|g2zҍ;-0n',̯'B(K d@7ۇKR-8pW\Ow`ѹ-gwZ'WnΗ15?2Y_OMgBninΟ153:78uov%9744Jq( c w ${&I?I9fpos{s[᮱`Խٕѹ-owZovVC1pffa~d(T 󝵸> \,pѹř/թR-ZTˇn.&c8EOw`nУ8#8mF;xS#8mG#Í`Y|q(~]ܨG><.c8IP:5y338yWF|vm=:uNSk$Djf懾wZFGu\=Ϋ 8k fyrIENDB`check_pgbackrest-REL2_2/docs/img/logo-white.png000066400000000000000000001203701415336775100215720ustar00rootroot00000000000000PNG  IHDRGHsBIT|d IDATxy_ey @@H ! -ʮUTm+تO]h<> JZhY PT!(V"f ($$!L̒9swMIf&9^dfYZ>UU*TP8TPDe|*TfuRSGe|*TIYC*_TƧB^ 8 O[g;s,߷;OQ zxO_ASoXk;GTd0kBf?8}맨O GsW `R\%TJkG \2 U!ܽUgO um=m{wVش;뗨Iˉvo9ߔF-'/YinM_l|eiMq@LXz1,}%_|@{B@ZxQԿ $mtAٟpBR{d@B0jcUE$3:$jwU9Gfvu|?] |W ~q|z9YP2$} b*:MYߴiVZ|'d ΙSxei#8cH&y#3rv3z\B _(TieݮMx΃$fpDAŌM\. :e_~+ ?Q >,y>g,+6bm!A@32HO!qxHT/LGiSp\0f2WM|@pNSB~/m_Wl_:]{=z Ϝsc922ĹΦp|]ϱ_ǥ7QEx{#-/13,zy%{⤆n"{o %b"HobJ}ݘNGW0m m</M~!h'?JvTqdQ2<0; \ ᳋*<5F2>' ,+1^iiBœphFFsrrcFXvfxã$*TS>9iS..FkKهye ]Dr78c#"m(|ݗ2qxs)m'KNQh%|kjQ%&J=JBb͒[E QwfIb $R3C(BO>tq9G{9)T<+eƯ~Mk`xr㽇h!72QhdoߙEcF2Z9Imk>;Z@ϰ"0ϯ{ QJ]PZQxYyo³ >?6ށ =ݮ\2<*P~;]ljMc[UKJ}V!צNѰJ:8 Y#sn 9G&/L 'I@{^vd%,Xpe/"[;nCҾx+y[+fQp+8{(V̫V, 5s?.3PP7֨ˡPkl灥Њ4xx'h!lX_z1Q[mCD (?&b<9@47,cenJ<+ u FX8eB +']\ĕ]̐dh0d9SzO)8u7P,^`n=QUOfҚA-q^ `0b|G쿒 ܺg7мd"1 ]v¹x{&F&KƝo5Ϛc&7!]6Duv&C.} 4t2eF,[WEnf#P('8yt,q'#3xadCfVlyM[̫ ^Džp9`\dB^ BP]loiB|"lرQT2kzRIܴdXd %x-91?(t(dHv2'x?K.ő(>x73B2($"Bt.AR y)9d>7Hǂ j,msJ Y} 3}緯 Pa rF PK$o-41cyn" Y/>|u<y 4SjB0KXKAr,JfHrB1L*dNXvON-(H?T縡O{y*{9 G[W+OY. /Uj"$ݽU;$|^1OoF~S~ϱ;|Hx=֖蔯EE2> 11rsr6m|UX҇|-]mj$^ˍQ±@e|rcG;QgOLo\ɷ=;B^SW*T@qBD%Iqd](Լ,z֘[J]j֘ eeZ.SP-EC3kf,{=!2Y% M<%C@L*s X4ijbZ]p['JGő&!=3L nK 9 (߰u'VFv3s[XЃ)kh. +Yyvi4 Q/TxXnե451y8KM/]KkWa޻BN! M,/,-{ h-ovP@k  \g橗wo OӀŭPy>J9@q(2mk梇Xk#`Z1OQf7TDUӌV' ` uoO.C`R!6 %[x7|kяxf kT:!|g}biH;BC<0˃&ʭozםЮ{TƧ~G[^fкqe6^ywwIV8(rIXP >x)O7|#*MŕE8Uaޚ%X0;ښ@I$s}3jҀhfy Zu\o(N=b|?gw-6|\׼<˷6 {)::a\zpR+ 5U =W on/.6QL Duҹ2>G 5lnA@0:GB#T\1!k>z͟篵#/CQ(xђr^yGI:!$vKqj|>^@Tex* o=^q{7atIur 畳>Be|ʐ-;դI3 ٸo, Mmd:dH+gLǃu5 X)Mk'_ A8>yeqGy@NEQy>e1/rAlM;wrF\ާ^‘KZt 2%unIxa|-₱s/:)f8|mRx=Ӳޓa2>^sؗJfk/?X0W>AFFn*)(c'j YHޘAJMj1qz5 _]#_Fx D&A\B5/w‰h)z KWLyTbknE& B hl [v5%I}d{:2ܷq[0y C\ 狮b7Y"|H2[zy%aO<W%u_|[lص!$%5Noq^Q1|p/]- B@M]v Tg$!Kn:/\eKP00xAlvYZLB<[Ԇa|³_L"x \mhħf~ִŪ[!$AU|z*XO=nEp37]Ga5|㩇X״ )dQ".%d&G! *)5P<Ԉ}'ʕK@jhgl#e|F`qt!4usBөCKOf'mC]$0GBFȐ$!|[GyԜá%yx*>,QD D)8|{p!$y}4ETbsC9KBW?ýO P c|b́ͫ.E0;/dsOA"BHHQ8<:< ͧ^>Cϛ|mډLg3<7G2ϥ7?^v ϪZc96bax\p?Oy<ئ_Yh' @+3F1lr,vI K<8j#b9\k' Xgpx44u]2hvW}Z{vl' [k"BSLLxDžoW36 ũ둅YmLOo\I&LԊ$,_N=%g OI|뛷[v)JJ$ .q)("h^Ǒ ǹ}i92>t8|H8q0?1e!55۶Or`k~|={[Y=N[G|H5% oNx~1~|!{+THxdz:{2\II%n33}}up:WCg0?g9ߺquPbpQog{+!Q)>o8Pl2)?3Fr e}KiSk[w-IUppҷ2ri3[HHyю~joQ;17h^1/W 4,@B>$-ad3} /iyG'[:Nf\qp嘆 !$V '$rI_~^C`iq+8wY>kuF}8kTqr%dHC 5i)l'ȴ)׿CJ{F&$ ,nc1>1Y\P |*(@ԽhÏyY+++dZ!Gӝs 4F \褮]!.vtx>ocë[A=%H*P5Xә*.z)_P^PƏu&y0lmm|<Ғ@S눗=7BbR5R[3W<_Yf\OP4ls:UDt={0opT28ęvTUIpxxL1IZ  $i0L_vh*SQ.s/.&ŦD򡪒<S'dH{Vl_˷Mq`joRwBvdRI #I= φ]7:o[mZ5 wo/xjkM'J܂bCeM\1bQR| ͛cag+rO Ռ4V B[W>N<;ZS3yj/I򄱨%5$S22oɻJA>)~Yϥ] &NfOX诺w3SGoc„kΡV7~T33X-GYm j9\y suNtX{'y`O FVrxi8p SG4&Iy?1w(&OCU^m|Ə0qUo9JF򿖃y~3]O>˔ąG Uđ3qX,ŰoAu ! µ,e`07GCs(jܵ'gt7|>A%#EaC0 Kccczu2$(lk}>;(U1D2Z2񔃾?&.ȉ}=Ac0Kf}[{7&_a d܁2(pL1ϼC|ryEo/-rHaͅ.zm}EH^T/G)$"$Ɍc4̣r0&"Ҡ3~O{ Zq`p/&|<\sfa^ytiJi]gi3wW^*}I6h8wlO+"QWUWym8"B(!&Z=Ckq,4hCAXW|x7vaƞ+T> b}`;D4b`o9g CMRHHUB:'˟u>e (;4Z2 vqf>=cwW+Id TLO{G'aUYLg?(S! 6 [_BUiOM9A|\f,΄A&kټc3| pl>2[}=YGhV`U*g/-fݮmq- tJ݄3!k+y}Jҍ8h`"^F U__^!axD OqK83+P|[e IDATl$U-R'}K%S~|#o7gLR>dZm5o DcCfG_bMOCdGv㣕qyZmhɣL_P1/t hdr@zI;}:N|W/ N'd*y~@$1Bd-Yxr&6KC,J[&%!Wq8WZ{X64RYn&4xZR0<^4vь>|fkࢅyªdJ!IHĆ^^2ּ;jk5L;[wӹO D)u-W恎w%B(뛷e/g<5ܽhQ86wأҝg\Ow-|oy7L>E",K<@ {xҩ]p@!1rx,sdΓ͂HN{G!8K'_ACwV-tp1{T_ m'}. }\VШRYaOG;;Z8sب\)Js=adb|!_Vmr;`G˯KߚgM݋U{ަ`lk _z9l·@j,Pմ^!%CK}-HYѰ*P=@xbUZW0@[^ϳb#ڹ '^غ(BНHwݔx۳-}{GH! y|cݮMyxgSq9,&\uť{:jEܸ_|(g8W^8l;scuk 6_;"9½p +faTf[+L5 Ƈ|LqJ",H[ډxcWHS`P',x>=^Zvi[Y"7;>r9raJB}N)BFvy)91UR^8}Tw|1Dϰuͼ>m{g1vVY NJ-/PY-;.f"|]U@Yk?t7wmP_vyyv/tx8 Ab ܱCmv{wbK;ܐi~9+~^]hB#ۧ8Y;◗ +2 ,*a\hlcʛw6q M3"}T'ԼO;vճOUO?jdIve#9O `.w^_oXlu,~vwE?sJ[W;8ǗE=Ib Ԍo>c>ӯ˼(a$r4 'T_+ro:phr> !!gz. rds젉$Z:?,`e%&0Y|Vޏæ:^raGݗzӢ摖ܘ)yEBU^ܼO۵r/GJ8sqP<ܽf=J !|p|hjU ,]֮v 0!Ky(cqk\ʘg7 P`4RO0 njg' +*^kaHB`>|1)p8y-3/>]]~Mk~ [H| qwybЭpdGBƧf} M/+jXMEBA%go< %JFp*|=<%p86Jl4'unp5¡ [HXyroM5I% \xn}Q|lavsVwYR9N4;nN?ۨv ERQY+p\28xIgeuQ4Έ?ypŇ-Y-7rYCtSuiT8: o{Z;{z M}BWyN\4 ;#6k,~#T|m{s&nh֢_"q"sWښL DccX:YњbEߵ=iUEQL/{CחhAs5Ҿ1w`YH|lڟ35cT5|֮v,E} %FX e[;Z#eDq &PqV<@w镖/>\ыkpljΑd֝\MMz^{й~3! kk^ne|{xwD-(ً G0 Bbz(U,e.g~WbՃSQ*1j>$qhbr|!LIBdz5߇f}Mδڃ@e|)iޚ]uga8aq1&_CJ GشAZ/Y4WR(PAŦQ({`~Y!\N\-׽Xz@*|dML; b]KQBk\g>o tҳiR 3]1G}TJ w.NSTO'\%KV0{Aغ ^t{Q" vz&)k k̗gz>q9̊]\׾/Hy0/g w$EC})SWvOG9w-abU ϭQtduV6mbgg+ ZNgu Q9*t9?5$ lUgTU9lsXV˫i^Z#YBjd%kobߩkL@6ڂˉ),y{7(u\Jes1hM>w^p>O>pXdp(x Nj9 9G JFDR֛ƌLѴ+ G"ʗ**H!ۓS -n 5IrL,)/!Ί?Ofe 'zYۥY33o^Rփ20HS4 ٝ0'?Y&_< 򭫹mF Ŭh"@Hy /3'sidlܵu;ܽ#h*}].Cep/Toe҈9y}]4w/?^ua#t71mea! BېWfWl]s݆9~L,ء,bFgXé6̼cB7>e|g`Pj(]9[C#er9|a𳋓/+~/`gt{{"c!B"ʠA 2q49cmb+ɲ^:z<8 ^?v6b?>oS-Flڽ%[I'z&[59.6ڼ^ ۈS⯧'33+^ &,FW4'HrilصOͺ\#5x}'Vp8 `Ǡ /6<%6D41q aw >Q5Qgo>1.66 *u3^Sk,3".1tKطA1K;ǰFå_[L<Ğ(FY߼y/-i, ]AWqVC861o"Ӫο~RzKo8dYϛQP#fB-ۑCaҨ>А9pIqT7dU}7뚷E7uO4MN{/5i .(VX|T53ڈx >{nfnM~޴gǦ7ڛ v]ܶUs_ ?lRnh M8q'㼑gIC Jǰg`T-ˑ/ve|mpA>uR iJ~o@Dh[B`eqZ]];{hl&)$u=ش6Xdh'rŖ,|ɫBZ]wNXV lfG{sN BL} ڍ׎l~'C*[0ᯎν|z]lhT{Z c&7ІAyQ&Xes$q0A4 QQyVuZgmatZ:r9 Eyԃ5WR#DKvEZ;֜jb`skR]DAgԕ%gT|1Rn&&hZ{hG/hiKXU\B(0b_o0/@ &b[S& #s;Ga6DŽrڦmٝg+h5T36 }x>/7o˭u;80!T}.-mkg}_-'a8c(^7мЫPشg(]6<QP)!?r _~Qc!XB`FNœJI#nah?hD O{gYV$Cq$aAڌNCcQMJ(tb>Wy~뚜.>>(^rA1O@5pP=u*Z#[Io޺we+/>/o8w86>uOϽ t-bɃ]^F6Iƺ{N&ּRCIݨ*-C37bƲGx`O'5$/gDQE|>$ `Dj :? \2v?ԎMGD:6X]I< F!%eHPpL97 ?o7 ~ky+'kR/H`!"j7܇Ixjݮ%݃Ó:O[8W*9։#>os>f R+cCZ2J~c\`x8ẘ>圏д E{c@yJ.sE hHR-.ާ۽Bc:ݼzynJagîM}9YW~Ml<2o|ne#B\.?bƜ6 O#?qHPG;x@gzZuS37~a ÌիY$Γzr%pHKR1|4wLze<~p]O> zW Rb#Q9Y쭡n(✋zhVq Ywf7=۶ KL3=Nb yY?;l$T꧹gCVxu&(x-EHh:ǎWٰkUBQH! R)VVʏY"ڵf3/qn3 1s¤?SǙxug~ :rx-fNklMp$jy ;s$;Q:" =PzR [3$j?~D#kآEN4r,QIpѝ[%vwK~/2$@ kikko~:Ll|bC Θ#nݗ(`6O)r? $Kϑ`ޮ9a՛mJB~;m aɼna+ bSW43]Y =rjqw$$xup[w- Vgq̰5瞅@$[ϑ WQRo3 -,ް&^\9!#^ O`IĴoS$eµu7؅aɍC?ݗ+~V.9xo_ʘ=1Gz|o9Y}i o7:2K-c?2&*S(J(K6@#ݩqZPNDŽnWt.^ #rvqyJ) k{Q 3`O>g/.+ #n|7-m}`qf 1)6H?(Դ["p#Iie#4QB%X~}O?F6O-5oرkMLpd_6cs?5nv6n4BM%W@r;zB}(aU$S5KB!1qHjc2Z=6NٸuQB^k.z>N; }*0zǨԲ7- rp.x *YJ[Gʗ:W/Ky77߷k`^.-YJ8vN2>6d0޻Gk):0Qcܑ.SЀ2-˥$e|ux$vԅBQ'>VsV,Kſa%UxYtV'@$Θv*Vix$+z\ IDAThxhE MxRVa@C}ՋG_5m0aFG_>aԳMa{k UOS_2>ӂTkov,&LD͗<) Sd80] $/۵q+#1(33dYlρg^^'f͆]wN&T'HH\&&tgᒇaqQ+v藜ߜ(^MM5͛ҴsFi{!H4 UyxxBzMUҙ:WCx;|&툚{> [S )y|Ǖ]jf3YqyD:GJa9#1qXKfkXf eMJش 7[ 'mxr~#Ē/Q AZX鮙zEx?(UPVDe|zUe[[ֽ`F0L!Wz%,?E^%+Oa Œ'_~>prFb癠󙧎w&_lME U^P '*CHybt I4S~KpYK.4VƧɇf?.o)\!*t=k:ML9?+G1Fs!W9 ܧ~HjM8oy)<Gͽ)jX8T)4*?Tg0,Xyw~ڠ&z(~ϵQyP&f*$g$$jOҰ!\4vcN= ZLP2[]U0C$J.NђdJ.?=U.^ug͒ўlu<A- nb.?➽]lKxz/7<*d$2V8 Mbh5!1aĘ8ܘ|9Ŧphy^f,}Y+V]!2qTHąg=G[^:7neNd%g)p'ɳ?2>=CX%=L[1bĩ1nƏІx*u5u"z$ˏ8Ukoڍ*?Vn/gdKP FHdX{$YTÂ;2bMng^L.nne˘\iaɬRMl1=S*¹?[!oH V|a|ư-y̑{#޵+f. *0@N% 9* [מvuybfGҜj&T| iƲ΅HW^6l2s`ELkk/: BQExN4<>d%h$< \챣?$ ϐ9KŅϞSj`0>uáj<M/rEJ+N| wyx/'NJxZ=u:S.jgf^ʴ,z*Լ Ps bћ"bd/WJKM0%f0/C>OrtNs +_G XF3Y|ԞW⩓MD!e ܲ䆚iL t,OFJr!\#w^c }RP%z b oW>ϡB@40)>Lj8XmIG۬RD"aΟhz=5ܕ[?G/׼6>)r/Xb,D'`WZM/^Npyhu$1ma|żGd˜u0L8>#;~߲9XtHjETi̪Wf0! 7H`dL!Si\5%=ha}:kx|sZi߉e^,LL7%t0a.'}`/oWyNN^Eʟ_RU)֎Rxi3緽ʪ9W,Qb;15IiZǼ\Ʌ&XZDF*#T0BJK%BE*&/a$P{&_ݷb'KQ IrfTfm^h f}hxJf\?oy/ڭrXhW8ʵ0>{]C=ozzO~zN[KaS@E UuC?8DsT`aM|fJնhY,û*2}Ylm`a| %u{@g \?we-a Ф>᭬-O *)ӽzO^ٴk;ol?'+ :/E2aW<# 絮L_bƧWϢ.̫𚴍gta| Ιr`[wXJeZS}}l=Sx{7d#9Bwϛ*gdT,ks S$lD)D-ܲd]:3# D<Fчk a| y/ūI6ݹ/Z55(z>&>!3 lh e!Sob6C>Ե_꾭[%!œ Rxk`ԡ$$Tu~$ qeW"yze] RŻjjT3iw!zb4G@Rr8p0סYkG[ gTa| /xn;TUBܹrD|,S1Bu0Q7iޭ@hExM׸G#0_ ' <š_dJA" uҼq5C@^;H*ёakQ]pEB5K;b^+6600> g$eMxrGBيP[/̗TȂGg%{  )qjgs)&Uظ<=UZX:{A q﫣³k` gHwG'9he(ܲ: :hؕU\Ib^?W1+JBÜ}<>UX9c16HQwŨnVRj9]P܍;'O8c7 w@L,\jf\ 64m DWJFMKߺdո ě&>Kպ,\deUd50*0>#^pbi4n5gޤ70j0>#gw-v8ۀJFk$>T2絮D@c+>c]hҶ= R# $Åwq!_~!_]Ea%IFc45PAH5 3ґ?F,r«X  ϦLk[6]h|3#ß޴.=f;r)ķD4L0Wɞ'NS_ݽw4@.G+f/@øF{D 3B ICa&znmZr_{tKepQ "Q&D9%8]LD#s᲎̘|){ aᔙL| `ؕN`[yp7=cry\UfD, ΅tGgae:v+2@k/oɪ6+J^y"ciJ G w9*{=*mT>|"@m_DDh.$4NN ն.~I 'O#*5[;p.G5grp[s /c`BX,<fSoIy!JPbvoP4Uqd# SfrYGgxmB(~\F0>#H/=EfHuarˉ6 cާll5+ޘ0jiL eI]Z,}7|?ԶV>=a"x;ͳ;sQYP%gϱ@FNte"UGQQa:PqJ{*d\1k:13Y6k!mDEH4(&4#-y=%SIy?gW>2%E٢a|F na96̭o@ Lp\= ޴ï/_@-cL[$M32u t2/IR%"x%!T0FӧL}|,0^p< FA[<|s7Looo@m0>#%'Q>ġ7^*E(y?Q\]ËMpTZ !t9ŋ9~ܩ,͂Kf1oZ'Sۦ pI <>kB}nH!Dh`ՊLJg N)aqZy'Vğ=Wb&$wUA"VNYBKllfY %5TSQW0͛VQ.[].L)t] vd"F1TW]#Ռq88 ;^e+!M[tTPr*5aƊI"{['hlS;e&;grWB4pVhF92y9\1Q˾߅a}Ne^\MNښZL5p^A/.z!-g'J5Xpf$ #{xpŎǍ̩I'ȲiǩMOlZ~N^P "&Q.FGH 3Jp *Tq̫9WX䧔ûakll3qJ8ު6E^\X)dI:-Nϲ{]'ٸw;rr3 3]h'H!C>]q3B"ZQ8uJV$CEMʶ.wQU#q|bM4L]Fg9GEqbG{s?ma\ cD ;77NVOg.Esjՠ(w_7n'FIѬȵDV^9Zc*S>6Ë%ș`,Kdȓ?ʯ_}OmZw9,3]~\KgvͶC{Sr@t,Ԋ12 g Wꕧ6cŤn$-ɄH5a8}=x}6u$.{wS6vmo3!#X)Sڦf{mC ?=ytsTu~rU2'aܲo(R4 (AZY ~~)CE8Z$):OBλBWMhlt QO_֋Im ,OHX׮ w3шF5J`qƼ͜cݶ|5dod8oal;ȼ9Wq2xR? "iGzӂ_iVV> k"8'^8`P I1+sO坋$.*6\ч%/xcgnώ.͉ӁoJu 5$$s4SRWAXU2)qCzɳF[tbbk%=bчy_XU.n( 3FfVk<#bE߀w9"? zJ X1kM£2aՌqr|wT!ظU,^Ar Mmva|~1k亜AC8R>w^y$8lu+'][;W%sOmNv/FF߬wMl\~3~GUkix*owqX(! A.l l*Q2B;G-L3>r m-5 \U3rEey'aw `DNUХt\?oybW;6'}GA72#\30Ǫ|Ɏ IQ71ֳcp);ֻ@쪏0:ϑaX{&)V)}odxoBo^~ ͓qR!SȫquxxcfœZzIwvoOU!G$ Osd]9Ťg\Č^| ~|!Qr;5BB|8Hh~O)Q2 ]"Qucz2&wa9Ѿc ޖQ~~. y89-9kS'{'f2\2*pҫҁ.|'.p`!dtxD!8 S#,L7Eg#8(dŹZ/Z?A̰3 ;z \{M*! D= qsbH+s˪iqG!D#iY15bo`BCrO{X(1-B#S\2:MbLgx7\4E3nGŬ0*끆3('RjuCU&ƨ1=#rXSAܛrc`Y9S9%/)R`<{Xb E, X2{_ȍgݣg90>cqgм-TZD<2IZ;#=\K?=QJV|m?حKV1V1\1sMHD)P|&,(Ыa|4,ξP!5~D`db໏M/!q+rlmukcrr qUge4겹zaN+˧a| $nML,khK6Fz7vgO0|兙IcXsTY&SE!k]I=e$j1v끲1MZ$ t RŜTH|՞ jaw._#3|<}cybd$!:RX>Vr-Y(wcH>pLn%|=Cߥ3{Қ,q\lbxq(\rQH1 \Xȴ`^< :| 'Mb>Ghx&Ĺ_:~ĺ6}gQ :GGE5ah^wm1OOٞD)ypA֠z+;rE@\Q*QY/`;}`?p}k*d}Xis uAݐn]&6R |<PUҏz(Ɗoe$V7EWKRb0Ո|2].r>(Om~1)ҹ^#jY+ŹC<{$LK5D8 $?lܷ{fZ'LΕ7%`!aZx9-y58~_:+if%}B$#]ѽNQ;,$|q`/t3֜Yb㹄X֣ϵ=E-o ZFs4!{!hYZ'L49UO(K28&j@;ry'4S\] m^j0r q^yL:Mڥ;O%#4'.`#Mu?M*]O(VQ/PQ%ziɶZ-·_x"4s,үZ_j Kkg[+ZLl`Hx{ IQ>"K4_•~ֲ _ЕdB/fzhG,J!WPW r;/F5[Y8?[) "mN8w`5zªМ) B%C4/rX=U8})s˒%8QZ&?L ·_~w#DUd?~ą:!=ʛjm}7P*;Y*G= WrB'%}6pi<oP1hX8<^z8Y^q [| 1%{|Eh";׫TnGpRA|NSU@yD5@rj`hdF_ίJF$\:&7ON\/*j3T S z= jS_rEE|Ps(ڦū1|ϿbnGgl먅PExîSޙ~%Y W[^<bPR{xF Vf+QUАJY쌺t뿆;$#'/ɉ#h6+W#˃L|kݣ50„~K?ɿ2#h:Gqu$}osaK!@ \RAf\sJ<RqǢl5)'TpGy^a?o 8|sȕ _)ZqZPmr3X#DIA0VO.} \{ =ΟlZˁiQ-2_iboknX /\bN]d%CiFtae`Sɋ̇ښs6ZС]XT,4>-醺 D[ױXwS˞jHv7HX#g9*wghiϤQj{c,Wx7~v[ЅK]db$p6B"/4c^{rH9&UJI3&G;r/f}ڐAuFvԩ޺cr:NȰŌ\ٹ茯IW̺q6O7CHHBl7xO7&alw= !\uHqSٷ_[i[O%bR9ZsgtY ¬Kr5TLjIN>fSkJbbBR0cTZysc:M (}_ Uxe6ߊהo4q. d8ZRŇ!3!\6>уJev |&dxR۽َu9Z~ ;pTА&p!B_*-h Qa+8(dΑM!@dܜR5E ķ[o5V"dޑp3W$Hbi7ԛ Mh/.I-Q?Z*ͤsg}|9{ QGDMŤ0e*vHKNB卖5K4gy>/ ZfYi|׭fzd ċ;_㺹+lh+ZhKiE&pX =r?K0.W Fה .G Zv'/wXߵƉx*TT 96Y}ޚ_g҄ɉp %S!6[{{=Hu{$ԟXѡ7( [͒wBe( yﳬs>-m%֭yA5~\sa]d['m&/9hCfka ̚U2(e"H &&8 _| )xZ۸ssښ[PK!)T%@uyrKHj>|į*GUT\r3\gb;P@<<)ŒʽFcf: IDATԃL{=8?J>?/=OL4-F4yKM gH ®(5\|- Y{DTMCJ%ˤ$jjԋ:ᅴ}Vi ąqK15: 8V0qAS^(̿s\9s)\TLHӋ´:|9wZ μ*wTO6ee&j{ghZ:pgXscþU >S[w'\sxnXPU|0P[CTV*H'ȼ+} \7gRk"e ]En^Np珿Ȓ0xG}s>yWЬKw&DО2F(T>n ?W(UD+O9FW^'wμK;A*C$sb'wv#8<2)ZfZ<d bMX{8\YÞ܊iT f\@&$+@az[XA>WK^bXE('2{2`2^߷9V=3|((9YVm=K0^Lkq?ybM wJQ,5}2|EA\"Ur'O~kZdB3AXڳzJV{뻶K?fþ-X"RO6#SEus^|.$"ejߜJVf҇ fܺzMߋ P,Q42ɊL9ĮHxK?NbAIh U.qm-hH=ӾncKa֭KC?}Qs387<` Y ;u¡#|%ښ&!7)Y;GƧxar]_s@][Xs$b"q9/cz[{V$fV8sRfJB2< QfCOzmGvQ!CJEacI_/VDr9-7e._?arskh3ήDϢqG=ʉ'(ϣ.ۺF)95ᨒkFfݥWm&5\GRa[_Q3B/vxIy)r1sCNkz%,YNZ\Z507= O<'jϬ͗D E&dJ75de<$>xw\}췙4(ki/ K=¦7w[xHk{wPu6ߪ9,nnnf9deu.@$cᥝLeS;o<@J^ppԛ|geWq1rySd%\5R"t˽t0Ǿ][or Dk2=!!%JUTi¿$vw>P$z@»9jC8l1~E~%=VC ea:?,y4k(}lz!ytִ;|yQeLY:r932~;a9F*(OמxoO<$Ӛ*pwpϵ *ˆs_]HN"dק&T<[;G yx_mxGN LG O>lyь`jUeP6Pu֧Noiv~Q"JBP-zͷѥ|> a^1/yNMM΢_j_S!YhDDM6Ч5A)r{-aZzGxxOyrZBeI}T5 7&y'7p[5/|-hw/ZVwֽUOUBR_JϾ6ZyKѯT"o pX{yT5T q)7][yl< x'Ab~Xq6()L8nUŻ/^?hL$*,:jpFg|JnĞ$<_{{<݃K ey'x #CmcZsܼZ1yC Ԕΐ-hHg66|={GY>sA 9VC\B<_Q|O?4Lt~<#.@(S[rue͒Uk\~HMyYfa۰mdpf^ٷ2?2}O1*Y Ug病F||?`ڙ = 2a4+B<ڸy*>\r5K)a<5x-eeՖwGɑ L 7:u_{ ;^h D4 Ӡq\% .PqT|;kSy[+[onfLyoJu`1 TXVzC~7_1{{˕,3 Ԅ 8Kc?d7J#JՇJuUW-fΔ6HIJ)eX$TJl4>dzkma-w h/aV*^Чi"ΥvimSj"VϽe4!%CD vK$J^qiR¿'^xƛ{e6znF׈bviyؔTJ9Ӥĉ-wܺx%3a{1C32#6gˆd_{.-Rj4OħZqhǪ'y usWa#{J4~\RS+WP )s[JN&>{4Bijqnd.B$+ś:^wn//x Q_m$L }PRO|^K$LIl`„I̝6 ̝2W$nĢ|[oW`Ӯ7<=orn{=TWPa]~_MޕJI/v[EL]1h^GxhqԂoEOB>1$'/MXg盔&÷Rކ.`X)FaA$ Zɿ2 g.ʗ>9VN_feRfIw|X' |Qh IS{~[^hbv!84 M]ЃparBy>"L9YQ#}\(H.nQűذ~91j2ܹr͠lu41-]1XSB.땮|@JVÏ((&2ovyORLuΕTʟ3kH`F*e#]R3NԊA rlū.DͯǼs(zO]1>/%dzNƧy蕧4 5c?7ħBG:+GF71sftx]\?oy8]0'b@X<k=?GERfjORrd%/ 1ЎBmb3«EO|1>I576@O|F . sP<oxozOp&0HH7P/T(/=V(#bw7oM'w82>q Us\qѸY1VhXڡg-Vr+Wmb+^}X~21Lk7'Ҕ[4hƔ5/̘C 7 xDuܽ>L0/=&ʘƍ)ڳ౷p P+WMR 4p&B#dYѹvӽLi5 4F~ aPE "I/-N48x5Dj T\)k~+f.6*0$#w5H|SSRQ+[IJNgy~<˵ 2R| 2⋎|o'L7qw #YaJK8zz ڞc|`Y*U}][?}[9c^sCJJNX|4Q%xi헦o1h}ƍn`it<{kO GUTO/7>I\: VWI?R.t#Qz{{u?!>PG٦fhZAQ)VCkD͐@DDZPE!Z?I"P8JcAԁE}x~/m X^.fhnOhTô]x3x[9&Oks_~^y3-YK7J9 -|$s5Phx40zQVKK"rl qSɝ=Ek>LEӇb ӻG B0j<w8{ܙ`YAEN gLcqèk$ OqAoܳ2.w.@Va(W0vc ꏆjJrNgl ~6<&FV&F=﹪⟽!H%!CMiӱj1@T2 #(}9>*8L:IM["شp"&#M&qM"7{nrw}ZOU(TD X* aުF$!wf>ɯNrlh*ТBG(M&1詭PbIvKDo^vJ"OCw9ۤU~~zҵKHdӳ[WR 5mD`9ȤKYi=[9wiIAQE$J7& 8|MnNJ3'SBFTJ{J(2nnC$lh'IqrDf)Fm6g|ڇGynE*h % CWr;]~y̑7|*WL?"| ?~U_kҵ#6b7W B``GFnKcC6=]8S)yÖ2Ńr}>9lʄ"x ,~pK?+1}!g{Z U:Q8Say?=#Y}'@"wS>ƭP"Ns>maÁUp^PV M"4k[:8yڟPS-p~:8Diʾ PH2݄ ӳX"$6IUOch\LHd¼ӥx^vt}I%9VQmS5鿶M㿡4vX>7Q)L#CV 22L1s_Nvwo6׹O"O 9H B^:ǡ3ݼ:%O5RHvA?s-@jk=P?ūѶ|.~L^/B$*){fooX*ο˝;dTI ,KרUHH$3jjYeN<+G*W"OߙOn)#"<9=͡ӿeImF#(ekW#Jq}ywC$zO}Cاhr/&qÍ{6u#,_%&8Gen{+P۝RS'V9Ñ?169HhCJ†TFe@$))R{ ʸD&)k:yeK?/:F" ;PT~o;wHtBE rZ1=)fZ'43B%3ښ[9ǞK.1ɧNcy319km\[gF-nmA D! %qi#/wog ~GHu˵H>um>ݬ5/ gHa܄Ft10UN*R2T xk{_i, A˜Xyj|]$AeVMD$/mF>$;Mc8v71F$rm=߷F1U UKˆlIENDB`check_pgbackrest-REL2_2/docs/img/logo.png000066400000000000000000000251161415336775100204560ustar00rootroot00000000000000PNG  IHDRFZ=;sBIT|d IDATx_lSY'Kp{vi)vpwZ\jI')'RmTQ3`GQ7Аa4 UMa^ czV347n6 vW-׻B(DžY# x9B# BB{^P@c{$!hj@&kw_gCz7:g8\S!Now΀۴ٔK-&5n6E]z1kw?}QI}l{9gXnxK;Xa&$?r_?ty[ kwuw6$ԥ}\Pm4C0 㚺ᆷ<ᆘ+)\ȥ2|j?7l]\D1'nY^aca?h}\[ʇD1cC cӃFvvWMr" 3ΎjQ l,]>.|4~qE+ Ӄ3I(ULgEac!D˻jbu'5uZ͓ܹT&9sY+տlP'Eɬְil$m":TA&Zcaݎ2z/ύunk=A9JMfMaݎ u"q2-˓sWa3dhYa c۹;J-kEFYLu1 Okoa cՠipH.I9N /@cݎ`K~ԭw[H vkR͵w:rP.3 ❗.{ZͶhbkw5N4l=w[ uѻ4vѢ/Ѻ71 56 ac Zl2P Jp3†r; [qaC)ҳ2`8^gw?el606`^ a#ZK?aK?a!1}i9шJ?-NN 8ym!lTf`l3>6~FvQK?qM ]ԻA(LRE(}ty!lܴQ^,{Pa#J?'Q/D(zM]ۧw;H l l#P.?mY f(4o}i=uH+NAج ӃSI 6z(65ph"g]Bә< @9+N-ha@+N΀xmϖ ~]qraú惭qh"wũ=ƻn,6(+\?KAS45`/OK @P.L ]gh|^i+N:lPVZYdJtwց#g]zYS J?j,B4_\H!p+N1]ؠ&$qxyO>/=Rs'04a#"Sy5VvÇ J?z.ϑ`1Xei=lXME(霤 Qnbf8?hƠaӴ;A!jzlqmT%gbf6 O::ClO45/gŕ*%jqIҶ3bNii{#Aiw¶:oBR%8Ou}R6(Jjl$'uE*8aTc{$A![PyOF_֞%~vXNдt5h°GnG]ܸꐺvwU&F74hk@S6ƻ:NJeJ^7SX+v+Nz@Ŝ X=]Ix׀)eyZ\΀Ĺ^46($|i^ܕ*)!}Sw X'9{;M+N%%w㗫 7۶lqM+{v 6'8^nng%aOXDyuk;hF˓yvΟc h@  ^B MN}܉zWݳƻhs`=cC?'KO>/=JNXͿzB5ԝO-ީg򹮞M>P.d_z\*K>GϠ>%*qhA#f =\FWʅ.vyr `uVTrϡOGn=|YGu =qj|WA0 hyxbz4vbxCz~{igۦ?p11C'YF$n4mf\#9.d)8 |iLA0 70_;@NmL ޢv|oufb:'߿5S0Lu,|6Ny&ky|iz)8r?R_Wn4 M0گFED#:X*F.hGQb(Cb6k0Lw0_WqL6IbϤ" s#Tl>Vm%&YmWп4އlf4[vY#v4GRU~i=Ifƿg_.U=un0B0LuD 0̶D]4H}(u^%7UrabfN%hD陰%~vvЇR0:=xC+U|sN \ao4>+9[A[JN.Iʙʥ2I%V2F A0 #\VE 5idY%zFFǃZ큱zY^ xi7obɒyM1v0 67 z7w (Qq7̎_*A0_׳il$x\fۖLnˏ3}ͿU]ĈU~)VxdyW_Ǧ&o..*Ohk@rzKMmѻ4 툓?;L{&X]45{w*q_ȗը T/FqMݤ3sclMO4 y 鞛A<͇X+v脙a6/ Ş+jl$bNwE=A4Bج oUjhH΀::aƠaͪHtsp\ZŠ30f{nDe6*X#HZrC:R7O䴚6k G?}P뙕Pguo CبLJ? f;GV M?lE:Q%pyAaҒ\ y,sWUj (aM3xyG7FHRVsCߦƽZ4ChDʞxi6Bhr{G*E>o6BhwSLgyU  %&4Bݤ%CcCV.@o/O>/=}X a! ]l}Rxa;g˅r}\xaƤѫ1-HmHt{oX/- l4&{n'Mz\x/շ(Uo_ +0:=ngٻT&)侘/etk[ #<_4m8q%~n,Tr!_/sԭUrGt}xh F?Fz?RzlzȈaS ,_&P1" 8qmbP7dd.I~,ȥ2\>m1:NB+惭j\*E}a_3"lCDdbatzP방_10H)b#ciQh/w|bfo?Z!Mff)ʇё uO 1u Tƻ6-FaJ@診?73u$ޛ?7LKeRʕBFgqYoJ{vw+i7~ + a3d(UgÙm7)vc{$+ l(@z|a" ߻J|Z C6N?.ȁ5l=I(9g xziwAP`zjLm3hLidJw cDKOg#5?Vm::V5uww6z5aΆRJF\f~av ?FGȗ˺GnY^eBq*aC ~D̼CN>$o>~V]Rn!l(hi 1TX#7Dp)e6݀6qK>=x8 Rry5[O c{7 E UR#܎I\?ލ5`52/%gh aC$kvػQE (TOzyc\#ef7J †BH6l.3NM1QX6r\ud%L]sacT>' %t={ RnrL}I:"9k2eFh/w'ލq l6r?޽b:ώfϡ0A T ^GrdCh=Tʅ[+&N]㚺kX z6@n*Bp{*Ϻ+ՅƤH4W:weݾ&,?u51bX+Q$n4:3Er\batzs8UލVbRǛ$^BsZn|^z aԒRz!lajOB6ĺAIUݐ٩RS acRQ΀7Rkݻr_=7AI2I۠{7#pmoGS4hl݅M'wR-ѫ AJU=U'S a Tջ һo#|&Y lL,QB{(ٻ$ .vm*s=gˑUw#H u}]3ya&ZwC|h.s7S?%>h弦٠g cZnfGsW`U aeFȗl2dfB4 <0mq6iٻKRR!_G;5Z_J~g eOCWjRz7$rL3Beޤs6{G= Ӄ8o}Ϳ|-M=/taImB%j\OSk3+2*B KKpc{$w F#1{7k>*Bp >6 IDATR&=a {lHVJ ӃC7{lHIջo/̬" b/aQ̛,N=2r Hu;$?jP;1$j/!= k#YP(Aۻ4΀\'1_<$]A%{F3J 9b:'߿46u)?kv\ِe(Y ,.3G0  :XkD_:Fu3+"}QJn*B |ǡ3> 'װ i"-/P5+-M# Ӭ-$WM_ P.n놗I`Mb%y5ލ΀ky똹v/UO}O]ۇ.<IRꆲZfW/79^#jXتatV^ȗn()B{j>k[̓kg2% Sfbilմ;"hTų&o / (TrWVF \*k/외2?;#/O WOjUY*m(!_x/xpmϖ~M}-4'<)O xmV٭ g˕2?/?~ 9>Y{zI>#VSO_}˿}Vyock X5b:VaaTN]ۇ.? Kr6YއF 9|i~nxS':v g#K>^ea0ߔM@H#N!*ۿ/Þ?|hiUY,|.LMizǡ38~P?) †a8rnG#I֓Ke3IqǍzX-Zv3Ԝ|7b:Kyn{ _bNhab:Qrr޷Æaһoih w~RbGd1ohuhHh*+lD-=jlb:O 󞧖z*BP[PX#ҳ5hTrabV?+l"a0ClӻYX~\Bnishu/ͧǦ}{9ǩfۖĦM̦\ +]7 P/O Ꝺ>~Vbݎ`&!GðĕƶH dRވ*a#@&7s},@[$/9iF,j؈#w;-CF<2kv ur&M.I\AJ4 5CLP7Ĥ}Y؈(XyU8^N zJfYh6",KWʅA ]?}g`R6)F$FF<K-tOy!7eåz*O˥{0L`iBGȗs3ɹR+jr΀s;pQ^E(.a9^gw?t(|1K^{m< m9^g=JS ̎fKY03KȊP.9aIbyb 0b{F.IN *&*FP5/0!_K;Æap`#56_D}؈Z@(0a#jl$\XY.IN]H 6 TOn!+=2lD8Vf:lD(ef0:=8s},mK٤L6"3ѪVL6 R`|41PFĺhbH\Ѵa#B) 0=K?ha#B)  ba}z刁,6"=VA+ JYh-K,@M~ kf#JBجR#~ f (eRV6K?haSu!l2K l$@) k%!l$-cZAȄR`ZA(, Q>(a0.+~ FE(ea,V-PʂnV/Fp'}p@[PA>PA?6PA z|i~Y1†"(e~†B(e! aC1I膰JYl ac(e:~0CƠZ ac`V*eƇ1@s@ؘnsAؘJY9!lLʈ,P6&g]6Ak)\*v,ac!4 dacQDh/w\?;<721Bz6 x@[$Bj-Y>=6= cNƺ63<ᆘ;3夆O1sd)\熱aw%{bk=" t<)t/9HA5*IENDB`check_pgbackrest-REL2_2/tests/000077500000000000000000000000001415336775100164415ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/Makefile000066400000000000000000000033761415336775100201120ustar00rootroot00000000000000### Global variables # disable running the activity script by default ACTIVITY='false' # define PG on CentOS 7 as default profile PROFILE='c7pg' ### Make targets init: clean_vm create_vm uc1: ACTIVITY=$(ACTIVITY) ARCH='use-case-1' EXTRA="$(EXTRA) check_pgbackrest_build=true deploy_icinga2=true" PGBR_BUILD='false' PGBR_REPO_TYPE=$(PGBR_REPO_TYPE) PROFILE=$(PROFILE) vagrant up --provision-with=exec-ci uc1_full: ACTIVITY=$(ACTIVITY) ARCH='use-case-1' EXTRA="$(EXTRA) check_pgbackrest_build=true deploy_icinga2=true" PGBR_BUILD='true' PGBR_REPO_TYPE=$(PGBR_REPO_TYPE) PROFILE=$(PROFILE) vagrant up --provision-with=exec-ci uc1_light: ACTIVITY=$(ACTIVITY) ARCH='use-case-1' EXTRA="$(EXTRA)" PGBR_BUILD='false' PGBR_REPO_TYPE=$(PGBR_REPO_TYPE) PROFILE=$(PROFILE) vagrant up --provision-with=exec-ci uc2: ACTIVITY=$(ACTIVITY) ARCH='use-case-2' EXTRA="$(EXTRA) check_pgbackrest_build=true deploy_icinga2=true" PGBR_BUILD='false' PGBR_REPO_TYPE=$(PGBR_REPO_TYPE) PROFILE=$(PROFILE) vagrant up --provision-with=exec-ci uc2_full: ACTIVITY=$(ACTIVITY) ARCH='use-case-2' EXTRA="$(EXTRA) check_pgbackrest_build=true deploy_icinga2=true" PGBR_BUILD='true' PGBR_REPO_TYPE=$(PGBR_REPO_TYPE) PROFILE=$(PROFILE) vagrant up --provision-with=exec-ci uc2_light: ACTIVITY=$(ACTIVITY) ARCH='use-case-2' EXTRA="$(EXTRA)" PGBR_BUILD='false' PGBR_REPO_TYPE=$(PGBR_REPO_TYPE) PROFILE=$(PROFILE) vagrant up --provision-with=exec-ci ### Setup clean_ci: PROFILE=$(PROFILE) vagrant up --provision-with=clean-ci clean_docker: vagrant ssh -c "docker rm -f $(docker ps -a -q)" clean_git: git clean -f -dX --dry-run clean_icinga2: vagrant ssh -c "docker stop $(PROFILE)-icinga2 && docker rm $(PROFILE)-icinga2" clean_vm: vagrant destroy -f create_vm: vagrant up vagrant ssh -c "sh /vagrant/run.sh -i"check_pgbackrest-REL2_2/tests/README.md000066400000000000000000000053771415336775100177340ustar00rootroot00000000000000# README --- ## Introduction This _Test Suite_ is based on the [edb-ansible](https://github.com/EnterpriseDB/edb-ansible) Ansible Collection. It deploys docker containers and typical architectures. It tends to support the following situations: * Directly-attached storage - [Use Case 1](https://www.enterprisedb.com/docs/supported-open-source/pgbackrest/06-use_case_1/) * Dedicated repository host - [Use Case 2](https://www.enterprisedb.com/docs/supported-open-source/pgbackrest/07-use_case_2) --- ## GitHub Actions [GitHub Actions](../.github/workflows/main.yml) are testing: * Use-Case 1: PG 14, CentOS 7, using pgBackRest PGDG packages * Use-Case 2: PG 13, Ubuntu 20.04, using pgBackRest PGDG packages --- ## Vagrant To be able to run the tests manually, define your EDB repositories personal credential `vagrant.yml`. Example in [vagrant.yml-dist](vagrant.yml-dist). First of all, initialize the virtual machine with `make init`. * Deploy Use-Case 1 and run the activity script: `make ACTIVITY=true uc1` * Deploy Use-Case 2 and run the activity script: `make ACTIVITY=true uc2` To build pgBackRest from sources, use `uc1_full` or `uc2_full` make targets. To install pgBackRest and **check_pgbackrest** using PGDG packages, without deploying Icinga2, use `uc1_light` or `uc2_light` make targets. ### Change the test profile Add `PROFILE=xxx` to the make command. Available profiles: `c7epas`, `c7pg`, `d10epas`, `d10pg`, `u20epas`, `u20pg`. ### Change the pgBackRest repository type Add `PGBR_REPO_TYPE=xxx` to the make command. Available types: `azure`, `s3`, `multi`, `posix`. When setting `multi` repository, both `s3` and `azure` will be used. When setting `posix` repository, the repository path will be automatically adjusted to `/shared/repo1` where */shared* is a shared volume between the docker containers. ### Icinga2 To interact with Icinga2, the easiest way is to use the API: ```bash # Login to the vagrant box $ vagrant ssh # Reschedule check_pgbackrest checks $ curl -k -s -u 'icinga2-director:anyPassWord' -H 'Accept: application/json' -X POST \ 'https://localhost:5665/v1/actions/reschedule-check' \ -d '{ "type": "Service", "filter": "match(pattern,service.name)", "filter_vars": { "pattern": "pgbackrest*" }, "pretty": true }' |jq # Get check_pgbackrest checks status $ curl -k -s -u 'icinga2-director:anyPassWord' -H 'Accept: application/json' -X GET \ 'https://localhost:5665/v1/objects/services' \ -d '{ "filter": "match(pattern,service.name)", "filter_vars": { "pattern": "pgbackrest*" } }' |jq ``` ### Cleaning Before changing the `PROFILE` to deploy a new architecture, remove the docker containers and cluster directory using `make PROFILE=xxx clean_ci`. To remove the vagrant virtual machine: `make clean_vm`. check_pgbackrest-REL2_2/tests/VALIDATION.md000066400000000000000000000026501415336775100204200ustar00rootroot00000000000000# Validation process First of all, initialize the virtual machine: ```bash time make init ``` ## PostgreSQL ```bash # Use case 1 - CentOS 7 - pgBackRest packages, multi-repositories time make ACTIVITY=true PROFILE=c7pg PGBR_REPO_TYPE=multi uc1 make PROFILE=c7pg clean_ci # Use case 2 - CentOS 7 - pgBackRest packages time make ACTIVITY=true PROFILE=c7pg uc2 make PROFILE=c7pg clean_ci # Use case 1 - Ubuntu 20.04 - pgBackRest packages time make ACTIVITY=true PROFILE=u20pg uc1 make PROFILE=u20pg clean_ci # Use case 2 - Ubuntu 20.04 - pgBackRest packages, multi-repositories time make ACTIVITY=true PROFILE=u20pg PGBR_REPO_TYPE=multi uc2 make PROFILE=u20pg clean_ci ``` * To build pgBackRest from sources, use `uc1_full` of `uc2_full` make target ## EDB Postgres Advanced Server ```bash # Use case 1 - CentOS 7 - pgBackRest packages, multi-repositories time make ACTIVITY=true PROFILE=c7epas PGBR_REPO_TYPE=multi uc1 make PROFILE=c7epas clean_ci # Use case 2 - CentOS 7 - pgBackRest packages time make ACTIVITY=true PROFILE=c7epas uc2 make PROFILE=c7epas clean_ci # Use case 1 - Ubuntu 20.04 - pgBackRest packages time make ACTIVITY=true PROFILE=u20epas uc1 make PROFILE=u20epas clean_ci # Use case 2 - Ubuntu 20.04 - pgBackRest packages, multi-repositories time make ACTIVITY=true PROFILE=u20epas PGBR_REPO_TYPE=multi uc2 make PROFILE=u20epas clean_ci ``` * To build pgBackRest from sources, use `uc1_full` of `uc2_full` make target check_pgbackrest-REL2_2/tests/Vagrantfile000066400000000000000000000062541415336775100206350ustar00rootroot00000000000000require 'yaml' if File.file?('vagrant.yml') and ( custom = YAML.load_file('vagrant.yml') ) edb_repository_username = custom['edb_repository_username'] if custom.has_key?('edb_repository_username') edb_repository_password = custom['edb_repository_password'] if custom.has_key?('edb_repository_password') pgbackrest_git_url = custom['pgbackrest_git_url'] if custom.has_key?('pgbackrest_git_url') pgbackrest_git_branch = custom['pgbackrest_git_branch'] if custom.has_key?('pgbackrest_git_branch') end Vagrant.configure(2) do |config| config.vm.provider :virtualbox do |vb| vb.memory = 4096 vb.cpus = 4 vb.name = "check_pgbackrest-docker-host" end config.vm.box = "bento/ubuntu-20.04" # config.vm.box = "ubuntu/focal64" config.ssh.insert_key = false # mount check_pgbackrest path for development testing config.vm.synced_folder "..", "/check_pgbackrest" # mount edb-ansible local git clone if File.directory?(File.expand_path("../../edb-ansible")) config.vm.synced_folder "../../edb-ansible", "/edb-ansible" end config.vm.provision "shell", inline: <<-SHELL #----------------------------------------------------------------------------------------------------------------------- echo 'Install Docker' && date curl -fsSL https://get.docker.com | sh usermod -aG docker vagrant #----------------------------------------------------------------------------------------------------------------------- echo 'Install Perl modules' && date apt-get install -y libyaml-libyaml-perl jq SHELL config.vm.provision "shell", privileged: false, inline: <<-SHELL #----------------------------------------------------------------------------------------------------------------------- echo 'Install Ansible' && date sudo apt-get install -y python3-pip python3-venv python3 -m pip install --user pipx python3 -m pipx ensurepath python3 -m pipx install ansible-core SHELL # Execute CI script in Vagrant environment config.vm.provision "exec-ci", privileged: false, type: "shell", path: 'vagrant.sh', env: { "ACTIVITY" => ENV['ACTIVITY'], "ARCH" => ENV['ARCH'], "EXTRA" => ENV['EXTRA'], "PGBR_BUILD" => ENV['PGBR_BUILD'], "PGBR_REPO_TYPE" => ENV['PGBR_REPO_TYPE'], "PROFILE" => ENV['PROFILE'], "edb_repository_username" => edb_repository_username, "edb_repository_password" => edb_repository_password, "pgbackrest_git_url" => pgbackrest_git_url, "pgbackrest_git_branch" => pgbackrest_git_branch }, run: 'never' # Clean a specific cluster in Vagrant environment $clean_script = <<-SCRIPT cd /vagrant echo "PROFILE = '$PROFILE'" source profile.d/$PROFILE.profile source profile.d/vagrant.profile sh run.sh -C -c "$CLPATH/$CLNAME" SCRIPT config.vm.provision "clean-ci", privileged: false, type: 'shell', inline: $clean_script, env: { "PROFILE" => ENV['PROFILE'] }, run: 'never' end check_pgbackrest-REL2_2/tests/architectures/000077500000000000000000000000001415336775100213065ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/architectures/use-case-1/000077500000000000000000000000001415336775100231515ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/architectures/use-case-1/config.yml000066400000000000000000000007751415336775100251520ustar00rootroot00000000000000--- cluster_name: use-case-1 platform: docker docker: image_name: centos:7 exposed_ports: - '22' - '5432' - '5444' cluster_vars: pg_version: 14 pg_type: PG disable_logging: false pgbackrest_repo_type: s3 instances: - name: pg1 ansible_group: primary pgbackrest: true - name: pg2 ansible_group: standby upstream: pg1 replication_type: asynchronous pgbackrest: true - name: pg3 ansible_group: standby upstream: pg1 replication_type: asynchronous pgbackrest: true check_pgbackrest-REL2_2/tests/architectures/use-case-2/000077500000000000000000000000001415336775100231525ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/architectures/use-case-2/config.yml000066400000000000000000000011751415336775100251460ustar00rootroot00000000000000--- cluster_name: use-case-2 platform: docker docker: image_name: centos:7 exposed_ports: - '22' - '5432' - '5444' cluster_vars: pg_version: 14 pg_type: PG disable_logging: false instances: - name: bck-host ansible_group: pgbackrest_repo_host - name: pg1 ansible_group: primary pgbackrest: true pgbackrest_repo_host: bck-host - name: pg2 ansible_group: standby upstream: pg1 replication_type: asynchronous pgbackrest: true pgbackrest_repo_host: bck-host - name: pg3 ansible_group: standby upstream: pg1 replication_type: asynchronous pgbackrest: true pgbackrest_repo_host: bck-host check_pgbackrest-REL2_2/tests/ci.sh000066400000000000000000000012461415336775100173730ustar00rootroot00000000000000#!/usr/bin/env bash set -o errexit set -o nounset cd "$(dirname "$0")" perl config.pl --force --architecture "$ARCH" \ --cluster-path "$CLPATH" --cluster-name "$CLNAME" \ --db-type "$DBTYPE" --db-version "$DBVERSION" \ --docker-image "$DOCKERI" --extra-vars "$EXTRA_VARS" sed -i "s/pg1/$CLNAME-1/g" "$CLPATH/$CLNAME/config.yml" sed -i "s/pg2/$CLNAME-2/g" "$CLPATH/$CLNAME/config.yml" sed -i "s/pg3/$CLNAME-3/g" "$CLPATH/$CLNAME/config.yml" sed -i "s/bck-host/$CLNAME-bck/g" "$CLPATH/$CLNAME/config.yml" echo "ACTIVITY = '$ACTIVITY'" export ACTIVITY=$ACTIVITY echo "RUN_ARGS = '$RUN_ARGS'" sh run.sh -c "$CLPATH/$CLNAME" "$RUN_ARGS" check_pgbackrest-REL2_2/tests/config.pl000066400000000000000000000131541415336775100202470ustar00rootroot00000000000000#!/usr/bin/env perl #################################################################################################################################### # Perl includes #################################################################################################################################### use strict; use warnings; use File::Basename qw(dirname); use File::Path qw(make_path); use Getopt::Long qw(GetOptions); use List::Util qw(any); use Pod::Usage qw(pod2usage); use YAML::XS qw(LoadFile DumpFile); #################################################################################################################################### # Global vars #################################################################################################################################### my $dbTypes = { 'PG' => ['10', '11', '12', '13', '14'], 'EPAS' => ['10', '11', '12', '13', '14'] }; my @supportedDockerImages = ('debian:9', 'debian:10', 'ubuntu:18.04', 'ubuntu:20.04', 'centos:7', 'centos:8', 'rockylinux:8'); #################################################################################################################################### # Usage #################################################################################################################################### =head1 NAME config.pl - generate configuration file =head1 SYNOPSIS config.pl [options] Cluster Options: --cluster-name cluster name (a directory named after this name will be created in cluster path) --cluster-path cluster path --extra-vars additional cluster variables ('key=value key2=value2' format) Test Options: --architecture target architecture --db-type database type ('EPAS' or 'PG') --db-version version of database Docker Options: --docker-image docker base image name ('debian:9', 'debian:10', 'ubuntu:18.04', 'ubuntu:20.04', 'centos:7', 'centos:8', 'rockylinux:8') General Options: --help display usage and exit --force force configuration file update =cut #################################################################################################################################### # Command line parameters #################################################################################################################################### my $bHelp = 0; my $bForce = 0; my $strArchitecture; my $strClusterName; my $strClusterPath; my $strExtraVars; my $strDbType; my $strDbVersion; my $strDockerImage; GetOptions( 'architecture=s' => \$strArchitecture, 'cluster-name=s' => \$strClusterName, 'cluster-path=s' => \$strClusterPath, 'db-type=s' => \$strDbType, 'db-version=s' => \$strDbVersion, 'docker-image=s' => \$strDockerImage, 'extra-vars=s' => \$strExtraVars, 'force' => \$bForce, 'help' => \$bHelp, ) or pod2usage( -exitval => 127 ); pod2usage() if $bHelp; #################################################################################################################################### # Run in eval block to catch errors #################################################################################################################################### eval{ print("-------------------PROCESS START-------------------\n"); print("INFO: config begin\n"); die("cluster path must be provided") unless defined($strClusterPath); die("db type '$strDbType' not supported") if (defined($strDbType) and !defined($dbTypes->{$strDbType})); if(defined($strDbVersion)){ die("db type must be provided when db version is provided") unless defined($strDbType); die("db type '$strDbType', version '$strDbVersion' not supported") unless (any { $_ eq $strDbVersion } @{$dbTypes->{$strDbType}}); } die("docker image '$strDockerImage' not supported") unless !defined($strDockerImage) or (any { $_ eq $strDockerImage } @supportedDockerImages); # Validate architecture and load configuration file die("architecture must be provided") unless defined($strArchitecture); my $archConfFile = dirname($0)."/architectures/".$strArchitecture."/config.yml"; die("architecture '$strArchitecture' not found") unless (-f $archConfFile); print("INFO: load '$archConfFile'\n"); my $archConfig = LoadFile($archConfFile); # Modify cluster configuration $archConfig->{cluster_name} = $strClusterName if defined($strClusterName); $archConfig->{cluster_vars}->{pg_type} = $strDbType if defined($strDbType); $archConfig->{cluster_vars}->{pg_version} = $strDbVersion if defined($strDbVersion); $archConfig->{docker}->{image_name} = $strDockerImage if defined($strDockerImage); # Add extra cluster vars if(defined $strExtraVars and length $strExtraVars){ $strExtraVars =~ s/^\s+|\s+$//g; foreach(split(/\s+/, $strExtraVars)){ my ($key, $value) = split(/=/, $_); die("extra variables format must be 'key=value'") unless defined($key) and defined($value); $archConfig->{cluster_vars}->{$key} = $value; } } # Create cluster directory my $strClusterDir = $strClusterPath."/".$strClusterName; die("cluster directory already exists") if (-e $strClusterDir and !$bForce); if(! -e $strClusterDir){ print("INFO: create cluster directory '$strClusterDir'\n"); make_path($strClusterDir, { verbose => 1 }) or die("failed to create '$strClusterDir'"); } print("INFO: write cluster configuration file\n"); DumpFile($strClusterDir."/config.yml", $archConfig) or die("failed to write cluster configuration file"); # Exit with success exit 0; }; die("ERROR: test execution failed - $@\n") if $@; check_pgbackrest-REL2_2/tests/platforms/000077500000000000000000000000001415336775100204505ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/azure/000077500000000000000000000000001415336775100215765ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/azure/blob-create-container.py000077500000000000000000000020661415336775100263160ustar00rootroot00000000000000#!/usr/bin/python import argparse, os, urllib3 from azure.storage.blob import BlobServiceClient, __version__ try: print("Azure Blob Storage v" + __version__) # Parse arguments parser = argparse.ArgumentParser() parser.add_argument("--container_name", "-c", help="container name to create") args = parser.parse_args() # Get Connection String from environment connect_str = os.getenv('AZURE_STORAGE_CONNECTION_STRING') urllib3.disable_warnings() blob_service_client = BlobServiceClient.from_connection_string(connect_str, connection_verify=False) # Create the container if args.container_name: container_client = blob_service_client.get_container_client(args.container_name) if container_client.exists(): print("Container %s already exists..." % args.container_name) else: print("Container name to create: %s" % args.container_name) container_client = blob_service_client.create_container(args.container_name) except Exception as ex: print('Exception:') print(ex)check_pgbackrest-REL2_2/tests/platforms/common/000077500000000000000000000000001415336775100217405ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/common/inventory/000077500000000000000000000000001415336775100237755ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/common/inventory/docker.j2000066400000000000000000000002231415336775100254760ustar00rootroot00000000000000--- all: hosts: {% for h in instances %} {% if platform == 'docker' %} {{ h.name }}: ansible_connection: docker {% endif%} {% endfor %}check_pgbackrest-REL2_2/tests/platforms/common/inventory/inventory.j2000066400000000000000000000006611415336775100262720ustar00rootroot00000000000000--- all: children: {% for g in instances_groups %} {{ g }}: hosts: {% for h in instances %} {% if h.ansible_group == g %} {{ h.name }}: {% endif%} {% endfor %} {% endfor %} {% if cluster_vars.deploy_icinga2 is defined and cluster_vars.deploy_icinga2 | bool %} icinga2: hosts: {{ cluster_name }}-icinga2: ansible_connection: docker ansible_python_interpreter: auto {% endif%} check_pgbackrest-REL2_2/tests/platforms/common/inventory/write.yml000066400000000000000000000046501415336775100256570ustar00rootroot00000000000000--- - name: Ensure that the cluster's inventory directories exist file: path: "{{ cluster_dir }}/{{ item }}" state: directory loop: - inventory - inventory/host_vars - inventory/group_vars - name: Create host_vars subdirectories file: path: "{{ cluster_dir }}/inventory/host_vars/{{ item.name }}" state: directory loop: "{{ instances | flatten(levels=1) }}" loop_control: label: >- {{ item.name }} - name: Get ansible groups set_fact: instances_groups: "{{ instances_groups | default([]) | union([ item.ansible_group ]) }}" loop: "{{ instances | flatten(levels=1) }}" loop_control: label: >- {{ item.ansible_group }} - name: Write docker static inventory file template: src: docker.j2 dest: "{{ cluster_dir }}/inventory.docker.yml" mode: 0644 when: platform == 'docker' - name: Write inventory file template: src: inventory.j2 dest: "{{ cluster_dir }}/inventory/inventory.yml" mode: 0644 - name: Write group_vars copy: content: | {{ group_vars|to_nice_yaml(indent=2) }} dest: "{{ group_dir }}/{{ file_name }}" mode: 0644 force: yes vars: ansible_ssh_private_key_file: "{{ cluster_dir }}/{{ ssh_key_file }}" file_name: "all.yml" group_dir: "{{ cluster_dir }}/inventory/group_vars" group_vars: > {{ cluster_vars|combine({ 'cluster_name': cluster_name, 'ansible_ssh_private_key_file': ansible_ssh_private_key_file, }) }} - name: Write instance variables for hosts copy: content: | {{ host_vars|to_nice_yaml(indent=2) }} dest: "{{ host_dir }}/{{ file_name }}" mode: 0644 force: yes vars: file_name: "instance_vars.yml" host_dir: "{{ cluster_dir }}/inventory/host_vars/{{ item.name }}" host_vars: "{{ item.vars }}" loop: "{{ instance_vars|flatten(levels=1) }}" loop_control: label: >- {{ item.name }} when: item.vars | length > 0 - name: Transform upstream property to upstream_node_private_ip ansible.builtin.lineinfile: path: "{{ host_dir }}/{{ file_name }}" regexp: '^upstream: ' line: "upstream_node_private_ip: {{ private_ip_list[item.vars.upstream] }}" vars: file_name: "instance_vars.yml" host_dir: "{{ cluster_dir }}/inventory/host_vars/{{ item.name }}" when: item.vars.upstream is defined loop: "{{ instance_vars|flatten(levels=1) }}" loop_control: label: >- {{ item.name }} check_pgbackrest-REL2_2/tests/platforms/common/provision.yml000066400000000000000000000017031415336775100245140ustar00rootroot00000000000000--- - name: Run ssh-keygen command: ssh-keygen -P "" -f "{{ ssh_key_file }}" args: chdir: "{{ cluster_dir }}" creates: "{{ ssh_key_file }}" - name: Ensure that the cluster's certs directory exist file: path: "{{ cluster_dir }}/certs" state: directory - name: Generate an OpenSSL private key - 2048 bits community.crypto.openssl_privatekey: path: "{{ cluster_dir }}/certs/{{ cluster_name }}.key" size: 2048 - name: Generate an OpenSSL Certificate Signing Request community.crypto.openssl_csr: path: "{{ cluster_dir }}/certs/{{ cluster_name }}.csr" privatekey_path: "{{ cluster_dir }}/certs/{{ cluster_name }}.key" - name: Generate a Self Signed OpenSSL certificate community.crypto.x509_certificate: provider: selfsigned path: "{{ cluster_dir }}/certs/{{ cluster_name }}.crt" privatekey_path: "{{ cluster_dir }}/certs/{{ cluster_name }}.key" csr_path: "{{ cluster_dir }}/certs/{{ cluster_name }}.csr"check_pgbackrest-REL2_2/tests/platforms/deprovision.yml000066400000000000000000000011561415336775100235370ustar00rootroot00000000000000--- - name: Deprovision cluster hosts: localhost tasks: - name: Require cluster directory to be specified assert: msg: "No cluster directory specified" that: - cluster_dir is defined and cluster_dir != '' - import_tasks: load-config.yml - assert: msg: "Unsupported platform: '{{ platform }}'" that: - platform is defined - platform in _available_platforms vars: _available_platforms: - 'docker' - include_tasks: "{{ platform }}/deprovision.yml" - include_tasks: "docker/deprovision-repository-types.yml"check_pgbackrest-REL2_2/tests/platforms/docker/000077500000000000000000000000001415336775100217175ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/docker/build_all_images.yml000066400000000000000000000012501415336775100257140ustar00rootroot00000000000000--- - name: Build all systemd images hosts: localhost tasks: - docker_image: name: "systemd/{{ item.base }}:{{ item.tag }}" state: present source: build build: path: systemd dockerfile: "{{ item.base }}.Dockerfile" pull: no args: BASE_IMAGE: "{{ item.base }}:{{ item.tag }}" loop: - { base: 'debian', tag: '9' } - { base: 'debian', tag: '10' } - { base: 'ubuntu', tag: '18.04' } - { base: 'ubuntu', tag: '20.04' } - { base: 'centos', tag: '7' } - { base: 'centos', tag: '8' } - { base: 'rockylinux', tag: '8' } check_pgbackrest-REL2_2/tests/platforms/docker/deprovision-repository-types.yml000066400000000000000000000017721415336775100303710ustar00rootroot00000000000000--- - name: Deprovision MinIO docker container docker_container: name: "{{ cluster_name }}-minio" state: absent container_default_behavior: compatibility when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "s3" or cluster_vars.pgbackrest_repo_type == "multi") - name: Deprovision Azurite docker container docker_container: name: "{{ cluster_name }}-azurite" state: absent container_default_behavior: compatibility when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "azure" or cluster_vars.pgbackrest_repo_type == "multi") - name: Deprovision Icinga2 docker container docker_container: name: "{{ cluster_name }}-icinga2" state: absent container_default_behavior: compatibility when: cluster_vars.deploy_icinga2 is defined and cluster_vars.deploy_icinga2 | bool - name: Delete docker network docker_network: name: "network_{{ cluster_name }}" state: absentcheck_pgbackrest-REL2_2/tests/platforms/docker/deprovision.yml000066400000000000000000000005451415336775100250070ustar00rootroot00000000000000--- - name: Deprovision docker containers docker_container: name: "{{ item.name }}" state: absent container_default_behavior: compatibility loop: "{{ instances | flatten(levels=1) }}" loop_control: label: >- {{ item.name }} - name: Delete docker network docker_network: name: "network_{{ cluster_name }}" state: absentcheck_pgbackrest-REL2_2/tests/platforms/docker/docker_container.yml000066400000000000000000000023621415336775100257560ustar00rootroot00000000000000--- - name: Provision docker container "{{ item.name }}" community.docker.docker_container: name: "{{ item.name }}" hostname: "{{ item.name }}" image: "systemd/{{ docker.image_name }}" state: started pull: no detach: yes exposed_ports: "{{ _exposed_ports }}" published_ports: "{{ _exposed_ports }}" networks: - name: "network_{{ cluster_name }}" networks_cli_compatible: yes volumes: - "/sys/fs/cgroup:/sys/fs/cgroup:ro" - "{{ cluster_dir }}/shared:/shared:z" tmpfs: - "/tmp" - "/run" - "/run/lock" container_default_behavior: compatibility network_mode: default restart_policy: unless-stopped vars: _exposed_ports: "{{ docker.exposed_ports|default(['22','5432', '5444']) }}" register: docker_container_register - set_fact: docker_container_results: "{{ docker_container_results|default([])|union([ dc|combine({ 'item': item|combine({ 'private_ip': nw.Networks[nw_name].IPAddress, 'ansible_host': nw.Networks[nw_name].IPAddress, }) }) ]) }}" vars: dc: "{{ docker_container_register.container }}" nw: "{{ dc.NetworkSettings }}" nw_name: "network_{{ cluster_name }}"check_pgbackrest-REL2_2/tests/platforms/docker/provision-repository-types.yml000066400000000000000000000116431415336775100300560ustar00rootroot00000000000000--- - name: Create docker network docker_network: name: "network_{{ cluster_name }}" state: present - name: Ensure that the cluster's MinIO data directory exists with default bucket file: path: "{{ cluster_dir }}/{{ item }}" state: directory loop: - "minio" - "minio/data" when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "s3" or cluster_vars.pgbackrest_repo_type == "multi") - name: Provision MinIO docker container community.docker.docker_container: image: minio/minio name: "{{ cluster_name }}-minio" state: started restart_policy: always command: server /data --address :443 exposed_ports: "443" published_ports: "443" networks: - name: "network_{{ cluster_name }}" volumes: - "{{ cluster_dir }}/minio/data:/data:z" - "{{ cluster_dir }}/certs/{{ cluster_name }}.crt:/root/.minio/certs/public.crt:z" - "{{ cluster_dir }}/certs/{{ cluster_name }}.key:/root/.minio/certs/private.key:z" networks_cli_compatible: yes network_mode: default container_default_behavior: compatibility env: MINIO_ROOT_USER: "accessKey" MINIO_ROOT_PASSWORD: "superSECRETkey" register: minio_docker_container_register when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "s3" or cluster_vars.pgbackrest_repo_type == "multi") - name: Waits for MinIO wait_for: host: "{{nw.Networks[nw_name].IPAddress}}" port: 443 delay: 5 vars: dc: "{{ minio_docker_container_register.container }}" nw: "{{ dc.NetworkSettings }}" nw_name: "network_{{ cluster_name }}" when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "s3" or cluster_vars.pgbackrest_repo_type == "multi") - name: MinIO Python Client API - create bucket command: "python3 {{ playbook_dir }}/minio/create-bucket.py -b bucket" vars: dc: "{{ minio_docker_container_register.container }}" nw: "{{ dc.NetworkSettings }}" nw_name: "network_{{ cluster_name }}" environment: MINIO_ENDPOINT: "{{nw.Networks[nw_name].IPAddress}}" MINIO_ROOT_USER: "accessKey" MINIO_ROOT_PASSWORD: "superSECRETkey" when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "s3" or cluster_vars.pgbackrest_repo_type == "multi") - name: Ensure that the cluster's Azurite data directory exists file: path: "{{ cluster_dir }}/{{ item }}" state: directory loop: - "azurite" - "azurite/data" when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "azure" or cluster_vars.pgbackrest_repo_type == "multi") - name: Provision Azurite docker container community.docker.docker_container: image: mcr.microsoft.com/azure-storage/azurite name: "{{ cluster_name }}-azurite" state: started restart_policy: always command: azurite-blob --blobPort 443 --blobHost 0.0.0.0 --cert=/root/public.crt --key=/root/private.key -l /workspace -d /workspace/debug.log exposed_ports: "443" published_ports: "443" networks: - name: "network_{{ cluster_name }}" volumes: - "{{ cluster_dir }}/azurite/data:/workspace:z" - "{{ cluster_dir }}/certs/{{ cluster_name }}.crt:/root/public.crt:ro" - "{{ cluster_dir }}/certs/{{ cluster_name }}.key:/root/private.key:ro" networks_cli_compatible: yes network_mode: default container_default_behavior: compatibility env: AZURITE_ACCOUNTS: "pgbackrest:aF49wnZP" register: azure_docker_container_register when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "azure" or cluster_vars.pgbackrest_repo_type == "multi") - name: Azure Blob Storage - create container command: "python3 {{ playbook_dir }}/azure/blob-create-container.py -c container" vars: dc: "{{ azure_docker_container_register.container }}" nw: "{{ dc.NetworkSettings }}" nw_name: "network_{{ cluster_name }}" environment: AZURE_STORAGE_CONNECTION_STRING: "DefaultEndpointsProtocol=https;AccountName=pgbackrest;AccountKey=aF49wnZP;BlobEndpoint=https://{{nw.Networks[nw_name].IPAddress}}/pgbackrest;" when: > cluster_vars.pgbackrest_repo_type is defined and (cluster_vars.pgbackrest_repo_type == "azure" or cluster_vars.pgbackrest_repo_type == "multi") - name: Provision Icinga2 docker container community.docker.docker_container: image: jordan/icinga2 name: "{{ cluster_name }}-icinga2" state: started restart_policy: "no" published_ports: - "80:80" - "443:443" - "5665:5665" networks: - name: "network_{{ cluster_name }}" networks_cli_compatible: yes network_mode: default container_default_behavior: compatibility env: ICINGA2_FEATURE_DIRECTOR_PASS: "anyPassWord" when: cluster_vars.deploy_icinga2 is defined and cluster_vars.deploy_icinga2 | bool check_pgbackrest-REL2_2/tests/platforms/docker/provision.yml000066400000000000000000000035051415336775100244750ustar00rootroot00000000000000--- - assert: msg: "Unsupported docker image_name: '{{ docker.image_name }}'" that: docker.image_name in _available_images vars: _available_images: - 'debian:9' - 'debian:10' - 'ubuntu:18.04' - 'ubuntu:20.04' - 'centos:7' - 'centos:8' - 'rockylinux:8' - name: Build systemd image {{ docker.image_name }} docker_image: name: "systemd/{{ _image_base }}:{{ _image_tag }}" state: present source: build build: path: docker/systemd dockerfile: "{{ _image_base }}.Dockerfile" pull: no args: BASE_IMAGE: "{{ _image_base }}:{{ _image_tag }}" vars: _parts: "{{ docker.image_name.split(':') }}" _image_base: "{{ _parts[0] }}" _image_tag: "{{ _parts[1] }}" - name: Create docker network docker_network: name: "network_{{ cluster_name }}" state: present - name: Ensure that the cluster's default shared directory exist file: path: "{{ cluster_dir }}/shared" state: directory owner: root group: root mode: '1777' become: yes - name: Provision docker containers include_tasks: docker_container.yml loop: "{{ instances | flatten(levels=1) }}" loop_control: label: >- {{ item.name }} - name: Set instance variables set_fact: instance_vars: "{{ instance_vars | default([]) | union([ { 'name': item.item.name, 'vars': item.item } ]) }}" with_items: "{{ docker_container_results }}" loop_control: label: >- {{ item.item.name }} - name: Create private ip list set_fact: private_ip_list: "{{ private_ip_list | default({}) | combine({ item.name: item.vars.private_ip }) }}" when: item.vars.private_ip is defined loop: "{{ instance_vars|flatten(levels=1) }}" loop_control: label: >- {{ item.name }} check_pgbackrest-REL2_2/tests/platforms/docker/systemd/000077500000000000000000000000001415336775100234075ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/docker/systemd/centos.Dockerfile000066400000000000000000000011151415336775100266710ustar00rootroot00000000000000ARG BASE_IMAGE FROM ${BASE_IMAGE} ENV container docker RUN cd /lib/systemd/system/sysinit.target.wants/; \ for i in *; do [ $i = systemd-tmpfiles-setup.service ] || rm -f $i; done RUN rm -f /lib/systemd/system/multi-user.target.wants/* \ /etc/systemd/system/*.wants/* \ /lib/systemd/system/local-fs.target.wants/* \ /lib/systemd/system/sockets.target.wants/*udev* \ /lib/systemd/system/sockets.target.wants/*initctl* \ /lib/systemd/system/basic.target.wants/* \ /lib/systemd/system/anaconda.target.wants/* VOLUME [ "/sys/fs/cgroup" ] CMD ["/usr/sbin/init"]check_pgbackrest-REL2_2/tests/platforms/docker/systemd/debian.Dockerfile000066400000000000000000000012621415336775100266230ustar00rootroot00000000000000ARG BASE_IMAGE FROM ${BASE_IMAGE} ENV container docker ENV LC_ALL C ENV DEBIAN_FRONTEND noninteractive RUN apt-get update \ && apt-get install -y python3 systemd systemd-sysv \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* RUN rm -f /lib/systemd/system/multi-user.target.wants/* \ /etc/systemd/system/*.wants/* \ /lib/systemd/system/local-fs.target.wants/* \ /lib/systemd/system/sockets.target.wants/*udev* \ /lib/systemd/system/sockets.target.wants/*initctl* \ /lib/systemd/system/sysinit.target.wants/systemd-tmpfiles-setup* \ /lib/systemd/system/systemd-update-utmp* VOLUME [ "/sys/fs/cgroup" ] CMD ["/lib/systemd/systemd"] check_pgbackrest-REL2_2/tests/platforms/docker/systemd/rockylinux.Dockerfile000066400000000000000000000012101415336775100276010ustar00rootroot00000000000000ARG BASE_IMAGE FROM rockylinux/${BASE_IMAGE} ENV container docker RUN yum install -y procps net-tools yum-utils RUN cd /lib/systemd/system/sysinit.target.wants/; \ for i in *; do [ $i = systemd-tmpfiles-setup.service ] || rm -f $i; done RUN rm -f /lib/systemd/system/multi-user.target.wants/* \ /etc/systemd/system/*.wants/* \ /lib/systemd/system/local-fs.target.wants/* \ /lib/systemd/system/sockets.target.wants/*udev* \ /lib/systemd/system/sockets.target.wants/*initctl* \ /lib/systemd/system/basic.target.wants/* \ /lib/systemd/system/anaconda.target.wants/* VOLUME [ "/sys/fs/cgroup" ] CMD ["/usr/sbin/init"] check_pgbackrest-REL2_2/tests/platforms/docker/systemd/ubuntu.Dockerfile000066400000000000000000000016111415336775100267210ustar00rootroot00000000000000ARG BASE_IMAGE FROM ${BASE_IMAGE} ENV container docker ENV LC_ALL C ENV DEBIAN_FRONTEND noninteractive RUN sed -i 's/# deb/deb/g' /etc/apt/sources.list RUN apt-get update \ && apt-get install -y systemd systemd-sysv \ && apt-get clean \ && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* RUN cd /lib/systemd/system/sysinit.target.wants/ \ && ls | grep -v systemd-tmpfiles-setup | xargs rm -f $1 RUN rm -f /lib/systemd/system/multi-user.target.wants/* \ /etc/systemd/system/*.wants/* \ /lib/systemd/system/local-fs.target.wants/* \ /lib/systemd/system/sockets.target.wants/*udev* \ /lib/systemd/system/sockets.target.wants/*initctl* \ /lib/systemd/system/basic.target.wants/* \ /lib/systemd/system/anaconda.target.wants/* \ /lib/systemd/system/plymouth* \ /lib/systemd/system/systemd-update-utmp* VOLUME [ "/sys/fs/cgroup" ] CMD ["/lib/systemd/systemd"]check_pgbackrest-REL2_2/tests/platforms/load-config.yml000066400000000000000000000011151415336775100233530ustar00rootroot00000000000000--- - name: Set full path to cluster_dir and config_file set_fact: config_file: "{{ cluster_dir }}/{{ file }}" vars: file: >- {{ config|default('config.yml') }} - name: Load cluster configuration file include_vars: "{{ config_file }}" - name: Ensure cluster_name is specified assert: msg: "Please define cluster_name in {{ config_file }}" that: - cluster_name is defined - cluster_name != '' - name: Set ssh_key_file set_fact: ssh_key_file: >- {{ ssh_key_file|default(_default) }} vars: _default: "id_{{ cluster_name|lower }}"check_pgbackrest-REL2_2/tests/platforms/minio/000077500000000000000000000000001415336775100215635ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/platforms/minio/create-bucket.py000077500000000000000000000020201415336775100246500ustar00rootroot00000000000000#!/usr/bin/python import argparse, os, urllib3 from minio import Minio from minio.error import S3Error def main(): print("MinIO Python Client API") # Parse arguments parser = argparse.ArgumentParser() parser.add_argument("--bucket", "-b", help="bucket name to create") args = parser.parse_args() # Create HTTPS client connection without certificate verification urllib3.disable_warnings() client = Minio( os.getenv('MINIO_ENDPOINT'), os.getenv('MINIO_ROOT_USER'), os.getenv('MINIO_ROOT_PASSWORD'), secure=True, http_client=urllib3.PoolManager(cert_reqs='CERT_NONE') ) # Create the container if args.bucket: if client.bucket_exists(args.bucket): print("Bucket %s already exists..." % args.bucket) else: print("Bucket name to create: %s" % args.bucket) client.make_bucket(args.bucket) if __name__ == "__main__": try: main() except S3Error as exc: print("error occurred.", exc) check_pgbackrest-REL2_2/tests/platforms/provision.yml000066400000000000000000000013121415336775100232200ustar00rootroot00000000000000--- - name: Provision cluster hosts: localhost tasks: - name: Require cluster directory to be specified assert: msg: "No cluster directory specified" that: - cluster_dir is defined and cluster_dir != '' - import_tasks: load-config.yml - assert: msg: "Unsupported platform: '{{ platform }}'" that: - platform is defined - platform in _available_platforms vars: _available_platforms: - 'docker' - include_tasks: "common/provision.yml" - include_tasks: "{{ platform }}/provision.yml" - include_tasks: "docker/provision-repository-types.yml" - include_tasks: common/inventory/write.ymlcheck_pgbackrest-REL2_2/tests/platforms/system-config.yml000066400000000000000000000005441415336775100237650ustar00rootroot00000000000000--- - name: Apply default system configuration hosts: all tasks: - name: Require cluster directory to be specified assert: msg: "No cluster directory specified" that: - cluster_dir is defined and cluster_dir != '' run_once: true - import_tasks: load-config.yml - include_role: name: 'sys'check_pgbackrest-REL2_2/tests/playbooks/000077500000000000000000000000001415336775100204445ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/playbooks/activity.yml000066400000000000000000000063511415336775100230300ustar00rootroot00000000000000--- - name: Simulate activity hosts: all any_errors_fatal: true tasks: - name: Copy regression tests to remote host copy: src: regress/ dest: /tmp/regress/ directory_mode: yes mode: '0755' when: > 'primary' in group_names and (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) - name: Run check_pgbackrest regression tests command: /usr/bin/env bash /tmp/regress/regression-tests.bash -P /usr/bin 2>&1 |tee /var/log/regression-tests.log environment: PGBIN: "{{ cluster_vars['pg_bin_path'] }}" PGDATABASE: "{{ cluster_vars['pg_database'] }}" PGUNIXSOCKET: "{{ cluster_vars['pg_unix_socket'] }}" PGUSER: "{{ cluster_vars['pg_owner'] }}" STANZA: "{{ cluster_vars['cluster_name'] }}" PGBR_HOST: "{{ cluster_vars['pgbackrest_repo_host'] | default(None) }}" PGBR_USER: "{{ cluster_vars['pgbackrest_user'] }}" PGBR_REPO_TYPE: "{{ cluster_vars['pgbackrest_repo_type'] }}" SCRIPT_PROFILE: "" vars: cluster_vars: "{{ ansible_local['profile']['global'] }}" register: regress_output when: > 'primary' in group_names and (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) - name: Regression tests output debug: var=regress_output.stdout_lines when: regress_output.changed - name: Copy activity script to remote host copy: src: scripts/simulate-activity-basic.bash dest: /tmp/simulate-activity-basic.bash mode: '0755' when: > 'primary' in group_names and (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) - name: Simulate basic activity command: /usr/bin/env bash /tmp/simulate-activity-basic.bash -s 10 -a 10 2>&1 |tee /var/log/simulate-activity-basic.log environment: PGBIN: "{{ cluster_vars['pg_bin_path'] }}" PGDATABASE: "{{ cluster_vars['pg_database'] }}" PGSVC: "{{ cluster_vars['pg_service'] }}" PGUNIXSOCKET: "{{ cluster_vars['pg_unix_socket'] }}" PGUSER: "{{ cluster_vars['pg_owner'] }}" STANZA: "{{ cluster_vars['cluster_name'] }}" PGBR_HOST: "{{ cluster_vars['pgbackrest_repo_host'] | default(None) }}" PGBR_STANDBIES: "{{ cluster_vars['pgbackrest_standbies'] | default(None) }}" PGBR_USER: "{{ cluster_vars['pgbackrest_user'] }}" PGBR_REPO_TYPE: "{{ cluster_vars['pgbackrest_repo_type'] }}" SCRIPT_PROFILE: "" vars: cluster_vars: "{{ ansible_local['profile']['global'] }}" register: basic_activity_output when: > 'primary' in group_names and (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) - name: Basic activity output debug: var=basic_activity_output.stdout_lines when: basic_activity_output.changed - name: Check Icinga2 services include_role: name: setup_check_pgbackrest tasks_from: icinga2-check.yml vars: reschedule_check_icinga2: true when: deploy_icinga2 is defined and deploy_icinga2 | bool check_pgbackrest-REL2_2/tests/playbooks/deploy.yml000066400000000000000000000076141415336775100224730ustar00rootroot00000000000000--- - name: Deploy cluster hosts: all any_errors_fatal: true tasks: - name: Require cluster directory to be specified assert: msg: "No cluster directory specified" that: - cluster_dir is defined and cluster_dir != '' run_once: true - name: Check EDB repository credentials assert: msg: "Missing credentials" that: - lookup('env', 'EDB_REPO_USERNAME') != '' - lookup('env', 'EDB_REPO_PASSWORD') != '' run_once: true # Install PGDG and EDB repositories - set_fact: supported_pg_version: - 10 - 11 - 12 - 13 - 14 supported_os: - CentOS7 - CentOS8 - Rocky8 # still experimental! - Ubuntu18 - Ubuntu20 - Debian9 - Debian10 - include_role: name: edb_devops.edb_postgres.setup_repo vars: pg_type: "PG" repo_username: "{{ lookup('env', 'EDB_REPO_USERNAME') }}" repo_password: "{{ lookup('env', 'EDB_REPO_PASSWORD') }}" when: "'icinga2' not in group_names" - include_role: name: edb_devops.edb_postgres.setup_repo vars: pg_type: "EPAS" repo_username: "{{ lookup('env', 'EDB_REPO_USERNAME') }}" repo_password: "{{ lookup('env', 'EDB_REPO_PASSWORD') }}" when: "'icinga2' not in group_names" # Setup testing repositories for EPAS testing - include_role: name: sys/testing_repo tasks_from: edb-staging vars: repo_username: "{{ lookup('env', 'EDB_REPO_USERNAME') }}" repo_password: "{{ lookup('env', 'EDB_REPO_PASSWORD') }}" when: "pg_type == 'EPAS' and pg_version|int >= 14" # Setup PG apt testing repositories for pgBackRest - include_role: name: sys/testing_repo tasks_from: pgdg-apt-testing when: > ansible_os_family == 'Debian' and 'icinga2' not in group_names # Install db server and setup replication - set_fact: pg_instance_name: "main" use_replication_slots: false use_hostname: false pg_initdb_options: "-k --encoding=UTF-8 --locale=C.UTF-8" - set_fact: pg_initdb_options: "-k --encoding=UTF-8 --locale=en_US.UTF-8" when: > ansible_os_family == 'RedHat' and ansible_distribution_major_version == "7" - set_fact: pg_ssl: false # temporary fix until sslutils is available when: > ansible_os_family == 'Debian' and pg_version|int >= 14 - include_role: name: install_dbserver # name: edb_devops.edb_postgres.install_dbserver when: "'primary' in group_names or 'standby' in group_names" - include_role: name: edb_devops.edb_postgres.init_dbserver when: "'primary' in group_names" - include_role: name: edb_devops.edb_postgres.setup_replication when: "'standby' in group_names" # Install pgBackRest and check_pgbackrest - include_role: name: setup_pgbackrest when: > 'pgbackrest_repo_host' in group_names or (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) - include_role: name: setup_check_pgbackrest when: > 'pgbackrest_repo_host' in group_names or (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) or 'icinga2' in group_names # Save facts locally for other playbooks - name: Ensure local facts directory exists file: state=directory path="/etc/ansible/facts.d" - name: Save local facts template: src: "profile.fact.j2" dest: "/etc/ansible/facts.d/profile.fact" when: > 'primary' in group_names and (hostvars[inventory_hostname].pgbackrest is defined and hostvars[inventory_hostname].pgbackrest == true) check_pgbackrest-REL2_2/tests/playbooks/regress/000077500000000000000000000000001415336775100221165ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/playbooks/regress/expected/000077500000000000000000000000001415336775100237175ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-age-alert-ko.out000066400000000000000000000000271415336775100305210ustar00rootroot00000000000000WAL_ARCHIVES CRITICAL check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-age-alert-ok.out000066400000000000000000000000211415336775100305130ustar00rootroot00000000000000WAL_ARCHIVES OK check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-ignore-after.out000066400000000000000000000000551415336775100306340ustar00rootroot00000000000000WAL_ARCHIVES UNKNOWN - no archived WAL found check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-ignore-before.out000066400000000000000000000000551415336775100307750ustar00rootroot00000000000000WAL_ARCHIVES UNKNOWN - no archived WAL found check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-max-archives-check-ko.out000066400000000000000000000000511415336775100323170ustar00rootroot00000000000000max-archives-check-number limit exceeded.check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-ok-global.out000066400000000000000000000000211415336775100301120ustar00rootroot00000000000000WAL_ARCHIVES OK check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-ok.out000066400000000000000000000000211415336775100266540ustar00rootroot00000000000000WAL_ARCHIVES OK check_pgbackrest-REL2_2/tests/playbooks/regress/expected/archives-repo2-ok.out000066400000000000000000000000211415336775100277010ustar00rootroot00000000000000WAL_ARCHIVES OK check_pgbackrest-REL2_2/tests/playbooks/regress/expected/list.out000066400000000000000000000002711415336775100254230ustar00rootroot00000000000000List of available services: archives Check WAL archives. check_pgb_version Check the version of this check_pgbackrest script. retention Check the retention policy. check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-age-to-full.out000066400000000000000000000000611415336775100305660ustar00rootroot00000000000000BACKUPS_RETENTION OK - backups policy checks ok check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-age-to-oldest-fail.out000066400000000000000000000000641415336775100320320ustar00rootroot00000000000000BACKUPS_RETENTION CRITICAL - backups are too young check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-age.out000066400000000000000000000000611415336775100272060ustar00rootroot00000000000000BACKUPS_RETENTION OK - backups policy checks ok check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-diff.out000066400000000000000000000000611415336775100273620ustar00rootroot00000000000000BACKUPS_RETENTION OK - backups policy checks ok check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-fail.out000066400000000000000000000001601415336775100273650ustar00rootroot00000000000000BACKUPS_RETENTION CRITICAL - not enough full backups: 2 required, backups are too old, full backups are too old check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-full-global.out000066400000000000000000000000611415336775100306520ustar00rootroot00000000000000BACKUPS_RETENTION OK - backups policy checks ok check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-full-repo2-ko.out000066400000000000000000000000651415336775100310540ustar00rootroot00000000000000BACKUPS_RETENTION CRITICAL - repo2: no valid backups check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-full.out000066400000000000000000000000611415336775100274140ustar00rootroot00000000000000BACKUPS_RETENTION OK - backups policy checks ok check_pgbackrest-REL2_2/tests/playbooks/regress/expected/retention-incr.out000066400000000000000000000000611415336775100274050ustar00rootroot00000000000000BACKUPS_RETENTION OK - backups policy checks ok check_pgbackrest-REL2_2/tests/playbooks/regress/regression-tests.bash000077500000000000000000000274431415336775100263120ustar00rootroot00000000000000#!/usr/bin/env bash set -o nounset cd "$(dirname "$0")" # vars PLUGIN_PATH=/usr/lib64/nagios/plugins RESULTS_DIR=/tmp/results SKIP_INIT=false SKIP_REPO2_CLEAR=false usage() { echo "Usage:" echo " -s Skip backups initialization step." echo " -S Skip repo2 clear step when multiple repositories are used." echo " -P Change check_pgbackrest plugin path." echo " -p Use local or remote profile." } while getopts "sSP:p:" o; do case "${o}" in s) SKIP_INIT=true ;; S) SKIP_REPO2_CLEAR=true ;; P) PLUGIN_PATH=${OPTARG} ;; p) SCRIPT_PROFILE=${OPTARG} ;; *) usage 1>&2 exit 1 ;; esac done shift $((OPTIND-1)) if [ -z "$SCRIPT_PROFILE" ]; then SCRIPT_PROFILE="local" if [ ! -z $PGBR_HOST ]; then SCRIPT_PROFILE="remote" fi fi if [ "$SCRIPT_PROFILE" != "local" ] && [ "$SCRIPT_PROFILE" != "remote" ]; then usage fi PYTHON="python3" command -v $PYTHON >/dev/null 2>&1 || { PYTHON="python"; } SSH_ARGS='-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no' echo "SKIP_INIT = $SKIP_INIT" echo "PLUGIN_PATH = $PLUGIN_PATH" echo "SCRIPT_PROFILE = $SCRIPT_PROFILE" echo "PGBIN = $PGBIN" echo "PGDATABASE = $PGDATABASE" echo "PGUNIXSOCKET = $PGUNIXSOCKET" echo "PGUSER = $PGUSER" echo "STANZA = $STANZA" if [ ! -z "$PGBR_HOST" ]; then echo "PGBR_USER = $PGBR_USER" echo "PGBR_HOST = $PGBR_HOST" PGBR_HOST=(`$PYTHON -c "print(' '.join($PGBR_HOST))"`) fi echo "PGBR_REPO_TYPE = $PGBR_REPO_TYPE" REPO="" if [ "$PGBR_REPO_TYPE" = "multi" ]; then REPO="--repo=1" echo "...multi repo support, defaulting to repo1" if ! $SKIP_REPO2_CLEAR; then # Clear repo2 echo "...clear repo2" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA --repo=2 --recurse repo-rm archive/$STANZA sudo -iu $PGUSER pgbackrest --stanza=$STANZA --repo=2 --recurse repo-rm backup/$STANZA sudo -iu $PGUSER pgbackrest --stanza=$STANZA --log-level-console=warn stanza-create else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA --repo=2 --recurse repo-rm archive/$STANZA" sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA --repo=2 --recurse repo-rm backup/$STANZA" sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA --log-level-console=warn stanza-create" fi fi fi if [ ! -d $RESULTS_DIR ]; then mkdir $RESULTS_DIR else rmdir $RESULTS_DIR mkdir $RESULTS_DIR fi ## Tests # Initiate backups (full, diff, incr) if ! $SKIP_INIT; then echo "...Initiate backups (full, diff, incr)" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO backup --type=full --log-level-console=warn --repo1-retention-full=1 sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO backup --type=diff --log-level-console=warn sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO backup --type=incr --log-level-console=warn else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO backup --type=full --log-level-console=warn --repo1-retention-full=1" sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO backup --type=diff --log-level-console=warn" sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO backup --type=incr --log-level-console=warn" fi fi # --list echo "--list" $PLUGIN_PATH/check_pgbackrest --list | tee $RESULTS_DIR/list.out # --version echo "--version" $PLUGIN_PATH/check_pgbackrest --version # --service=retention --retention-full, --retention-diff, --retention-incr echo "--service=retention --retention-full, --retention-diff, --retention-incr" if [ "$PGBR_REPO_TYPE" = "multi" ] && ! $SKIP_REPO2_CLEAR; then # repo2 should be empty, the service should then fail $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA --service=retention --retention-full=1 > $RESULTS_DIR/retention-full-repo2-ko.out fi $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=1 --retention-diff=1 --retention-incr=1 --output=nagios_strict echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=1 --retention-diff=1 --retention-incr=1 --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=1 --retention-diff=1 --retention-incr=1 --output=prtg echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=1 | cut -f1 -d"|" > $RESULTS_DIR/retention-full.out $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-diff=1 | cut -f1 -d"|" > $RESULTS_DIR/retention-diff.out $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-incr=1 | cut -f1 -d"|" > $RESULTS_DIR/retention-incr.out if [ "$PGBR_REPO_TYPE" = "multi" ] && ! $SKIP_REPO2_CLEAR; then # Take an extra backup for repo2 and make sure the global check will see it if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA --repo=2 backup --type=full --log-level-console=warn else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA --repo=2 backup --type=full --log-level-console=warn" fi $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA --service=retention --retention-full=2 | cut -f1 -d"|" > $RESULTS_DIR/retention-full-global.out fi # --service=retention --retention-age echo "--service=retention --retention-age" $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age=1h --output=nagios_strict echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age=1h --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age=1h | cut -f1 -d"|" > $RESULTS_DIR/retention-age.out # --service=retention --retention-age-to-full echo "--service=retention --retention-age-to-full" $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age-to-full=1h --output=nagios_strict echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age-to-full=1h --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age-to-full=1h | cut -f1 -d"|" > $RESULTS_DIR/retention-age-to-full.out # --service=retention --retention-age-to-oldest # check if the oldest backup is too young echo "--service=retention --retention-age-to-oldest fail" $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age-to-oldest=1h --output=nagios_strict echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age-to-oldest=1h --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-age-to-oldest=1h | cut -f1 -d"|" > $RESULTS_DIR/retention-age-to-oldest-fail.out # --service=retention fail echo "--service=retention fail" sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_sleep(2);" > /dev/null 2>&1 $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=2 --retention-age=1s --retention-age-to-full=1s --output=nagios_strict echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=2 --retention-age=1s --retention-age-to-full=1s --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=retention --retention-full=2 --retention-age=1s --retention-age-to-full=1s | cut -f1 -d"|" > $RESULTS_DIR/retention-fail.out # --service=archives echo "--service=archives" if [ "$PGBR_REPO_TYPE" = "multi" ] && ! $SKIP_REPO2_CLEAR; then $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA --repo=2 --service=archives | cut -f1 -d"-" > $RESULTS_DIR/archives-repo2-ok.out fi sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_create_restore_point('generate WAL');" > /dev/null 2>&1 sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_switch_xlog();" > /dev/null 2>&1 sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_switch_wal();" > /dev/null 2>&1 sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_sleep(1);" > /dev/null 2>&1 $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --output=nagios_strict echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --output=prtg echo $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives | cut -f1 -d"-" > $RESULTS_DIR/archives-ok.out if [ "$PGBR_REPO_TYPE" = "multi" ] && ! $SKIP_REPO2_CLEAR; then $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA --service=archives | cut -f1 -d"-" > $RESULTS_DIR/archives-ok-global.out fi # --service=archives --ignore-archived-before echo "--service=archives --ignore-archived-before" sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_sleep(2);" > /dev/null 2>&1 $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --ignore-archived-before=1s > $RESULTS_DIR/archives-ignore-before.out # --service=archives --ignore-archived-after echo "--service=archives --ignore-archived-after" $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --ignore-archived-after=1h > $RESULTS_DIR/archives-ignore-after.out # --service=archives --latest-archive-age-alert echo "--service=archives --latest-archive-age-alert" sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "SELECT pg_sleep(2);" > /dev/null 2>&1 $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --latest-archive-age-alert=1h | cut -f1 -d"-" > $RESULTS_DIR/archives-age-alert-ok.out $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --latest-archive-age-alert=1s --output=human $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --latest-archive-age-alert=1s | cut -f1 -d"-" > $RESULTS_DIR/archives-age-alert-ko.out # --service=archives --max-archives-check-number echo "--service=archives --max-archives-check-number" $PLUGIN_PATH/check_pgbackrest --prefix="sudo -u $PGUSER" --stanza=$STANZA $REPO --service=archives --max-archives-check-number=1 > $RESULTS_DIR/archives-max-archives-check-ko.out 2>&1 ## Results if [ "$PGBR_REPO_TYPE" = "multi" ] && ! $SKIP_REPO2_CLEAR; then diff -abB expected/ $RESULTS_DIR/ > /tmp/regression.diffs else diff -abB -x '*repo2*' -x '*-global.out' expected/ $RESULTS_DIR/ > /tmp/regression.diffs fi if [ $(wc -l < /tmp/regression.diffs) -gt 0 ]; then cat /tmp/regression.diffs exit 1 fi exit 0 check_pgbackrest-REL2_2/tests/playbooks/scripts/000077500000000000000000000000001415336775100221335ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/playbooks/scripts/simulate-activity-basic.bash000066400000000000000000000152201415336775100275260ustar00rootroot00000000000000#!/usr/bin/env bash set -o errexit set -o nounset cd "$(dirname "$0")" EXTENDED_ACTIVITY=false usage() { echo "Usage:" echo " -s " echo " -a " echo " -p " echo " -e (extended activity)" } while getopts "s:a:p:e" o; do case "${o}" in s) SCALE=${OPTARG} ;; a) ACTIVITY_TIME=${OPTARG} ;; p) SCRIPT_PROFILE=${OPTARG} ;; e) EXTENDED_ACTIVITY=true ;; *) usage 1>&2 exit 1 ;; esac done shift $((OPTIND-1)) if [ -z "$SCRIPT_PROFILE" ]; then SCRIPT_PROFILE="local" if [ ! -z $PGBR_HOST ]; then SCRIPT_PROFILE="remote" fi fi if [ "$SCRIPT_PROFILE" != "local" ] && [ "$SCRIPT_PROFILE" != "remote" ]; then usage fi PYTHON="python3" command -v $PYTHON >/dev/null 2>&1 || { PYTHON="python"; } SSH_ARGS='-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no' echo "SCALE = $SCALE" echo "ACTIVITY_TIME = $ACTIVITY_TIME seconds" echo "SCRIPT_PROFILE = $SCRIPT_PROFILE" echo "PGBIN = $PGBIN" echo "PGDATABASE = $PGDATABASE" echo "PGSVC = $PGSVC" echo "PGUNIXSOCKET = $PGUNIXSOCKET" echo "PGUSER = $PGUSER" echo "STANZA = $STANZA" if [ ! -z "$PGBR_HOST" ]; then echo "PGBR_USER = $PGBR_USER" echo "PGBR_HOST = $PGBR_HOST" PGBR_HOST=(`$PYTHON -c "print(' '.join($PGBR_HOST))"`) fi if [ ! -z "$PGBR_STANDBIES" ]; then echo "PGBR_STANDBIES = $PGBR_STANDBIES" PGBR_STANDBIES=(`$PYTHON -c "print(' '.join($PGBR_STANDBIES))"`) fi echo "PGBR_REPO_TYPE = $PGBR_REPO_TYPE" REPO="" if [ "$PGBR_REPO_TYPE" = "multi" ]; then REPO="--repo=1" echo "...multi repo support, defaulting to repo1" fi # run echo "-------------------PROCESS START-------------------" echo "--Create pgbench setup" sudo -iu $PGUSER $PGBIN/dropdb -h $PGUNIXSOCKET --if-exists bench sudo -iu $PGUSER $PGBIN/createdb -h $PGUNIXSOCKET bench sudo -iu $PGUSER $PGBIN/pgbench -h $PGUNIXSOCKET -i -s $SCALE --quiet --foreign-keys bench echo "--Take a full backup" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO --type=full backup else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO --type=full backup" fi sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO info echo "--Simulate $ACTIVITY_TIME sec activity" sudo -iu $PGUSER $PGBIN/pgbench -h $PGUNIXSOCKET -T $ACTIVITY_TIME bench echo "--Take an incremental backup" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO --type=incr backup else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO --type=incr backup" fi sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO info echo "--Simulate $ACTIVITY_TIME sec activity" sudo -iu $PGUSER $PGBIN/pgbench -h $PGUNIXSOCKET -T $ACTIVITY_TIME bench echo "--Take a full backup to test the purge action" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO --type=full backup else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO --type=full backup" fi sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO info echo "--Simulate $ACTIVITY_TIME sec activity" sudo -iu $PGUSER $PGBIN/pgbench -h $PGUNIXSOCKET -T $ACTIVITY_TIME bench echo "--Create restore point RP1 and get latest pgbench history time" sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "select pg_create_restore_point('RP1');" sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d bench -c 'SELECT max(mtime) FROM pgbench_history;' echo "--Simulate $ACTIVITY_TIME sec activity and get latest pgbench history time" sudo -iu $PGUSER $PGBIN/pgbench -h $PGUNIXSOCKET -T $ACTIVITY_TIME bench sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d bench -c 'SELECT max(mtime) FROM pgbench_history;' echo "--Restore RP1 restore point and get latest pgbench history time" systemctl stop $PGSVC sudo -iu $PGUSER pgbackrest restore --stanza=$STANZA $REPO --delta --type=name --target=RP1 --target-action=promote systemctl start $PGSVC systemctl status $PGSVC echo "--Wait while pg_is_in_recovery" while [ `sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c 'SELECT pg_is_in_recovery();' -A -t` = "t" ] do echo "wait..." sleep 5 done sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d bench -c 'SELECT max(mtime) FROM pgbench_history;' echo "--Resync standby server(s)" echo "----Take incremental backup" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO --type=incr backup else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO --type=incr backup" fi for i in "${PGBR_STANDBIES[@]}"; do echo "----Restore on standby server - $i" ssh ${SSH_ARGS} "$i" "systemctl stop $PGSVC" ssh ${SSH_ARGS} "$i" "sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO --reset-pg2-host --type=standby restore" ssh ${SSH_ARGS} "$i" "systemctl start $PGSVC" done echo "----Wait until at least 1 standby is replicated" while [ `sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -At -c "SELECT count(*) FROM pg_stat_replication;"` -lt 1 ] do echo "wait..." sleep 5 done sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -x -c "SELECT * FROM pg_stat_replication;" echo "--Simulate $ACTIVITY_TIME sec activity to get archives on different time-lines" sudo -iu $PGUSER $PGBIN/pgbench -h $PGUNIXSOCKET -T $ACTIVITY_TIME bench sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO info if $EXTENDED_ACTIVITY; then echo "--Create test-checksums setup" sudo -iu $PGUSER $PGBIN/dropdb -h $PGUNIXSOCKET --if-exists test-checksums sudo -iu $PGUSER $PGBIN/createdb -h $PGUNIXSOCKET test-checksums sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d test-checksums -c "CREATE TABLE t1 (id int);INSERT INTO t1 VALUES (1);" sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d $PGDATABASE -c "CHECKPOINT;" FILE_TO_EDIT=`sudo -iu $PGUSER $PGBIN/psql -h $PGUNIXSOCKET -d test-checksums -A -t -c "SELECT current_setting('data_directory') || '/' || pg_relation_filepath('t1');"` echo "FILE_TO_EDIT=$FILE_TO_EDIT" echo "33" |xxd > $FILE_TO_EDIT echo "--Take an incremental backup" if [ "$SCRIPT_PROFILE" = "local" ]; then sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO --type=incr backup else sudo -iu $PGUSER ssh ${SSH_ARGS} ${PGBR_USER}@${PGBR_HOST} "pgbackrest --stanza=$STANZA $REPO --type=incr backup" fi sudo -iu $PGUSER pgbackrest --stanza=$STANZA $REPO info fi echo "-------------------PROCESS END-------------------" check_pgbackrest-REL2_2/tests/playbooks/templates/000077500000000000000000000000001415336775100224425ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/playbooks/templates/profile.fact.j2000066400000000000000000000010561415336775100252550ustar00rootroot00000000000000[global] cluster_name={{ cluster_name }} pg_bin_path={{ pg_bin_path }} pg_data={{ pg_data }} pg_database={{ pg_database }} pg_owner={{ pg_owner }} pg_port={{ pg_port }} pg_service={{ pg_service }} pg_unix_socket={{ pg_unix_socket_directories[0] }} pg_version={{ pg_version }} pgbackrest_user={{ pgbackrest_user }} pgbackrest_repo_type={{ pgbackrest_repo_type }} {% if repository_server|length > 0 %} pgbackrest_repo_host={{ repository_server }} {% endif %} {% if pgbackrest_standbies|length > 0 %} pgbackrest_standbies={{ pgbackrest_standbies }} {% endif %} check_pgbackrest-REL2_2/tests/profile.d/000077500000000000000000000000001415336775100203235ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/profile.d/c7epas.profile000066400000000000000000000001341415336775100230650ustar00rootroot00000000000000export CLNAME="c7epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="centos:7" check_pgbackrest-REL2_2/tests/profile.d/c7pg.profile000066400000000000000000000001301415336775100225370ustar00rootroot00000000000000export CLNAME="c7pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="centos:7" check_pgbackrest-REL2_2/tests/profile.d/c8epas.profile000066400000000000000000000001341415336775100230660ustar00rootroot00000000000000export CLNAME="c8epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="centos:8" check_pgbackrest-REL2_2/tests/profile.d/c8pg.profile000066400000000000000000000001301415336775100225400ustar00rootroot00000000000000export CLNAME="c8pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="centos:8" check_pgbackrest-REL2_2/tests/profile.d/d10epas.profile000066400000000000000000000001361415336775100231420ustar00rootroot00000000000000export CLNAME="d10epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="debian:10" check_pgbackrest-REL2_2/tests/profile.d/d10pg.profile000066400000000000000000000001321415336775100226140ustar00rootroot00000000000000export CLNAME="d10pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="debian:10" check_pgbackrest-REL2_2/tests/profile.d/d9epas.profile000066400000000000000000000001341415336775100230700ustar00rootroot00000000000000export CLNAME="d9epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="debian:9" check_pgbackrest-REL2_2/tests/profile.d/d9pg.profile000066400000000000000000000001301415336775100225420ustar00rootroot00000000000000export CLNAME="d9pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="debian:9" check_pgbackrest-REL2_2/tests/profile.d/ro8epas.profile000066400000000000000000000001411415336775100232620ustar00rootroot00000000000000export CLNAME="ro8epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="rockylinux:8" check_pgbackrest-REL2_2/tests/profile.d/ro8pg.profile000066400000000000000000000001351415336775100227430ustar00rootroot00000000000000export CLNAME="ro8pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="rockylinux:8" check_pgbackrest-REL2_2/tests/profile.d/u18epas.profile000066400000000000000000000001411415336775100231670ustar00rootroot00000000000000export CLNAME="u18epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="ubuntu:18.04" check_pgbackrest-REL2_2/tests/profile.d/u18pg.profile000066400000000000000000000001351415336775100226500ustar00rootroot00000000000000export CLNAME="u18pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="ubuntu:18.04" check_pgbackrest-REL2_2/tests/profile.d/u20epas.profile000066400000000000000000000001411415336775100231600ustar00rootroot00000000000000export CLNAME="u20epas" export DBTYPE="EPAS" export DBVERSION="14" export DOCKERI="ubuntu:20.04" check_pgbackrest-REL2_2/tests/profile.d/u20pg.profile000066400000000000000000000001351415336775100226410ustar00rootroot00000000000000export CLNAME="u20pg" export DBTYPE="PG" export DBVERSION="14" export DOCKERI="ubuntu:20.04" check_pgbackrest-REL2_2/tests/profile.d/vagrant.profile000066400000000000000000000003751415336775100233540ustar00rootroot00000000000000# Vagrant settings export CLPATH="/home/vagrant/clusters" # Ansible settings export ANSIBLE_ROLES_PATH=${ANSIBLE_ROLES_PATH:+$ANSIBLE_ROLES_PATH:}$(pwd)/roles export ANSIBLE_HOST_KEY_CHECKING=False export ANSIBLE_REMOTE_USER="root" export EXTRA_VARS="" check_pgbackrest-REL2_2/tests/roles/000077500000000000000000000000001415336775100175655ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/install_dbserver/000077500000000000000000000000001415336775100231275ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/install_dbserver/defaults/000077500000000000000000000000001415336775100247365ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/install_dbserver/defaults/main.yml000066400000000000000000000007041415336775100264060ustar00rootroot00000000000000--- os: "" pg_type: "PG" pg_version: 12 epas_deb_drop_cluster: "/usr/bin/epas_dropcluster" epas_service: "edb-as@{{ pg_version }}-main" pg_deb_drop_cluster: "/usr/bin/pg_dropcluster" deb_cluster_name: "main" pg_service: "postgresql@{{ pg_version }}-main" supported_os: - CentOS7 - CentOS8 - RedHat7 - RedHat8 - Ubuntu20 - Debian9 - Debian10 supported_pg_type: - EPAS - PG supported_pg_version: - 10 - 11 - 12 - 13 - 14 check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/000077500000000000000000000000001415336775100242545ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/EPAS_Debian_install.yml000066400000000000000000000027731415336775100305300ustar00rootroot00000000000000--- - name: Install dependencies EPAS < 14 package: name: - edb-as{{ pg_version }}-server-edb-modules state: present when: pg_version|int < 14 become: yes - name: Install dependencies EPAS >= 14 package: name: - edb-as{{ pg_version }}-server-edb-wait-states state: present when: pg_version|int >= 14 become: yes - name: Install EPAS Packages package: name: - python3-pip - python3-psycopg2 - edb-as{{ pg_version }}-server - edb-as{{ pg_version }}-server-core - edb-as{{ pg_version }}-server-client - edb-as{{ pg_version }}-server-sslutils - edb-as{{ pg_version }}-server-indexadvisor - edb-as{{ pg_version }}-server-sqlprofiler - edb-as{{ pg_version }}-server-sqlprotect - edb-as{{ pg_version }}-server-sslutils state: present update_cache: yes become: true register: install_package - name: Install python-psycopg2 package: name: - python-psycopg2 - python-ipaddress state: present update_cache: yes when: os in ['Ubuntu18', 'Debian9'] become: true - name: Stop the service {{ epas_service }} systemd: name: "{{ epas_service }}" state: stopped when: install_package.changed become: true - name: Drop the default debian database shell: > {{ epas_deb_drop_cluster }} --stop {{ pg_version }} {{ deb_cluster_name }} when: install_package.changed register: drop_cluster changed_when: drop_cluster.rc == 0 failed_when: drop_cluster.rc != 0 become: true check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/EPAS_Debian_rm_install.yml000066400000000000000000000022571415336775100312230ustar00rootroot00000000000000--- - name: Stop the service {{ epas_service }} systemd: name: "{{ epas_service }}" state: stopped ignore_errors: yes become: true - name: Remove EPAS Packages package: name: - python3-pip - python3-psycopg2 - edb-as{{ pg_version }}-server - edb-as{{ pg_version }}-server-core - edb-as{{ pg_version }}-server-client - edb-as{{ pg_version }}-server-sslutils - edb-as{{ pg_version }}-server-indexadvisor - edb-as{{ pg_version }}-server-sqlprofiler - edb-as{{ pg_version }}-server-sqlprotect - edb-as{{ pg_version }}-server-sslutils state: absent update_cache: yes become: true - name: Remove EPAS < 14 packages package: name: - edb-as{{ pg_version }}-server-edb-modules state: absent when: pg_version|int < 14 become: yes - name: Remove EPAS >= 14 packages package: name: - edb-as{{ pg_version }}-server-edb-wait-states state: absent when: pg_version|int >= 14 become: yes - name: Remove python-psycopg2 package: name: - python-psycopg2 - python-ipaddress state: absent update_cache: yes when: os in ['Ubuntu18', 'Debian9'] become: true check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/EPAS_RedHat_install.yml000066400000000000000000000024371415336775100305120ustar00rootroot00000000000000--- - name: Install python packages package: name: - python-pip - python-psycopg2 - python-ipaddress state: present when: os in ['RedHat7','CentOS7'] become: yes - name: Install python packages package: name: - python3-pip - python3-psycopg2 state: present when: os in ['RedHat8','CentOS8','Rocky8'] become: yes - name: Install dependencies EPAS < 14 package: name: - edb-as{{ pg_version }}-server-edb-modules state: present when: pg_version|int < 14 become: yes - name: Install dependencies EPAS >= 14 package: name: - edb-as{{ pg_version }}-server-edb_wait_states state: present when: pg_version|int >= 14 become: yes - name: Install EPAS packages package: name: - edb-as{{ pg_version }}-server - edb-as{{ pg_version }}-server-core - edb-as{{ pg_version }}-server-contrib - edb-as{{ pg_version }}-server-libs - edb-as{{ pg_version }}-server-client - edb-as{{ pg_version }}-server-llvmjit - edb-as{{ pg_version }}-server-sslutils - edb-as{{ pg_version }}-server-indexadvisor - edb-as{{ pg_version }}-server-sqlprofiler - edb-as{{ pg_version }}-server-sqlprotect - edb-as{{ pg_version }}-server-sslutils state: present become: yes check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/EPAS_RedHat_rm_install.yml000066400000000000000000000025561415336775100312120ustar00rootroot00000000000000--- - name: Stop the service {{ epas_service }} systemd: name: "{{ epas_service }}" state: stopped become: true - name: Remove EPAS packages package: name: - edb-as{{ pg_version }}-server - edb-as{{ pg_version }}-server-core - edb-as{{ pg_version }}-server-contrib - edb-as{{ pg_version }}-server-libs - edb-as{{ pg_version }}-server-client - edb-as{{ pg_version }}-server-llvmjit - edb-as{{ pg_version }}-server-sslutils - edb-as{{ pg_version }}-server-indexadvisor - edb-as{{ pg_version }}-server-sqlprofiler - edb-as{{ pg_version }}-server-sqlprotect - edb-as{{ pg_version }}-server-sslutils state: absent become: yes - name: Remove EPAS < 14 packages package: name: - edb-as{{ pg_version }}-server-edb-modules state: absent when: pg_version|int < 14 become: yes - name: Remove EPAS >= 14 packages package: name: - edb-as{{ pg_version }}-server-edb_wait_states state: absent when: pg_version|int >= 14 become: yes - name: Remove python packages package: name: - python-pip - python-psycopg2 state: absent when: os in ['RedHat7','CentOS7'] become: yes - name: Remove python packages package: name: - python3-pip - python3-psycopg2 state: absent when: os in ['RedHat8','CentOS8','Rocky8'] become: yes check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/PG_Debian_install.yml000066400000000000000000000021761415336775100303030ustar00rootroot00000000000000--- - name: Install Postgres package: name: - ca-certificates - python3-pycurl - python3-psycopg2 - postgresql-{{ pg_version }} - postgresql-{{ pg_version }} - postgresql-server-dev-{{ pg_version }} state: present update_cache: yes register: install_package become: true - name: Install dependencies PG < 14 package: name: - postgresql-{{ pg_version }}-sslutils state: present when: pg_version|int < 14 become: true - name: Install python-psycopg2 package: name: - python-psycopg2 - python-ipaddress state: present update_cache: yes when: os in ['Ubuntu18','Debian9', 'Debian10'] become: true - name: Stop the service {{ pg_service }} systemd: name: "{{ pg_service }}" state: stopped when: install_package.changed become: true - name: Drop the default debian database shell: > {{ pg_deb_drop_cluster }} {{ pg_version }} {{ deb_cluster_name }} args: executable: /bin/bash when: install_package.changed register: drop_cluster changed_when: drop_cluster.rc == 0 failed_when: drop_cluster.rc != 0 become: true check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/PG_Debian_rm_install.yml000066400000000000000000000014141415336775100307730ustar00rootroot00000000000000--- - name: Stop the service {{ pg_service }} systemd: name: "{{ pg_service }}" state: stopped become: true - name: Remove postgreSQL package: name: - ca-certificates - python3-pycurl - python3-psycopg2 - postgresql-{{ pg_version }} - postgresql-{{ pg_version }} - postgresql-server-dev-{{ pg_version }} state: absent update_cache: yes become: true - name: Remove PG < 14 packages package: name: - postgresql-{{ pg_version }}-sslutils state: absent when: pg_version|int < 14 become: true - name: Remove python-psycopg2 package: name: - python-psycopg2 - python-ipaddress state: absent update_cache: yes when: os in ['Ubuntu18','Debian9', 'Debian10'] become: true check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/PG_RedHat_install.yml000066400000000000000000000020621415336775100302620ustar00rootroot00000000000000--- - name: Disable builtin postgresql module shell: > dnf -qy module disable postgresql args: executable: /bin/bash register: disable_builtin_postgres changed_when: disable_builtin_postgres.rc == 0 failed_when: disable_builtin_postgres.rc != 0 ignore_errors: yes become: yes when: os in ['RedHat8','CentOS8'] - name: Install require python package package: name: - python-pycurl - libselinux-python - python-psycopg2 - python-ipaddress state: present when: os in ['RedHat7','CentOS7'] become: yes - name: Install require python package package: name: - python3-pip - python3-pycurl - python3-libselinux - python3-psycopg2 state: present become: yes when: os in ['RedHat8','CentOS8','Rocky8'] - name: Install Postgres package: name: - glibc-common - ca-certificates - postgresql{{ pg_version }} - postgresql{{ pg_version }}-server - postgresql{{ pg_version }}-contrib - sslutils_{{ pg_version }} state: present become: yes check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/PG_RedHat_rm_install.yml000066400000000000000000000014641415336775100307650ustar00rootroot00000000000000--- - name: Stop the PG service if it's running systemd: name: postgresql-{{ pg_version }} state: stopped become: yes ignore_errors: yes - name: Remove require python package package: name: - python-pycurl - libselinux-python - python-psycopg2 - python-ipaddress state: absent when: os in ['RedHat7','CentOS7'] become: yes - name: Remove require python package package: name: - python3-pycurl - python3-libselinux - python3-psycopg2 state: absent become: yes when: os in ['RedHat8','CentOS8','Rocky8'] - name: Remove Postgres package: name: - postgresql{{ pg_version }} - postgresql{{ pg_version }}-server - postgresql{{ pg_version }}-contrib - sslutils_{{ pg_version }} state: absent become: yes check_pgbackrest-REL2_2/tests/roles/install_dbserver/tasks/main.yml000066400000000000000000000014201415336775100257200ustar00rootroot00000000000000--- - name: Set the os variable set_fact: os: "{{ ansible_distribution }}{{ ansible_distribution_major_version }}" - name: Check support for Operating System fail: msg: "Operating System = {{ os }} not supported." when: os not in supported_os - name: Check supported versions for Database engine fail: msg: "Database Engine Version = {{ pg_version }} not supported. Supported versions are {{ supported_pg_version }}" when: pg_version|int not in supported_pg_version - name: Remove Postgres packages include_tasks: "{{ pg_type }}_{{ ansible_os_family }}_rm_install.yml" when: - force_install is defined - force_install - name: Install and Configure Postgres on RedHat include_tasks: "{{ pg_type }}_{{ ansible_os_family }}_install.yml" check_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/000077500000000000000000000000001415336775100242675ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/defaults/000077500000000000000000000000001415336775100260765ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/defaults/main.yml000066400000000000000000000003671415336775100275530ustar00rootroot00000000000000--- check_pgbackrest_build: false deploy_icinga2: false reschedule_check_icinga2: false build_packages: common: - git Debian: - libjson-perl - libdata-dump-perl RedHat: - nagios-plugins - perl-JSON - perl-Data-Dumpercheck_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/tasks/000077500000000000000000000000001415336775100254145ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/tasks/build.yml000066400000000000000000000022461415336775100272420ustar00rootroot00000000000000--- - name: Install build requirements package: name: > {{ query('flattened', package_lists) }} state: latest vars: package_lists: - "{{ build_packages['common'] }}" - "{{ build_packages[ansible_os_family] }}" - name: Check if development file exists on controller local_action: stat path="/check_pgbackrest/check_pgbackrest" register: dev_file - name: Ensure src directory exists file: state=directory path="{{ check_pgbackrest_src_dir }}" when: not dev_file.stat.exists | bool - name: Fetch check_pgbackrest from Github git: repo: "https://github.com/pgstef/check_pgbackrest" dest: "{{ check_pgbackrest_src_dir }}" version: "main" depth: 1 accept_hostkey: yes when: not dev_file.stat.exists | bool - name: Install check_pgbackrest copy: src: "{{ check_pgbackrest_src_dir }}/check_pgbackrest" remote_src: yes dest: /usr/bin/check_pgbackrest mode: '0755' when: not dev_file.stat.exists | bool - name: Install development check_pgbackrest copy: src: "/check_pgbackrest/check_pgbackrest" remote_src: no dest: /usr/bin/check_pgbackrest mode: '0755' when: dev_file.stat.exists | boolcheck_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/tasks/icinga2-check.yml000066400000000000000000000031631415336775100305310ustar00rootroot00000000000000--- - set_fact: icinga_api_url: "https://localhost:5665/v1" icinga_api_user: "icinga2-director" icinga_api_pass: "anyPassWord" when: "'icinga2' in group_names" - name: Reschedule-check on all check_pgbackrest services uri: url: "{{ icinga_api_url }}/actions/reschedule-check" validate_certs: no user: "{{ icinga_api_user }}" password: "{{ icinga_api_pass }}" method: POST headers: Accept: "application/json" body_format: json body: '{ "type": "Service", "filter": "match(pattern,service.name)", "filter_vars": { "pattern": "pgbackrest*" } }' when: > 'icinga2' in group_names and reschedule_check_icinga2 - name: Get services status uri: url: "{{ icinga_api_url }}/objects/services" validate_certs: no user: "{{ icinga_api_user }}" password: "{{ icinga_api_pass }}" method: GET return_content: yes headers: Content-Type: "application/json" body_format: json body: '{ "filter": "match(pattern,service.name)", "filter_vars": { "pattern": "pgbackrest*" } }' register: icinga2_services_status when: "'icinga2' in group_names" - name: Verify services status debug: msg: - "Check {{ item.attrs.host_name }} - {{ item.attrs.name }}" - " State: {{ item.attrs.state }}" - " Last check time: {{ '%Y-%m-%d %H:%M:%S %Z' | strftime(item.attrs.last_check) }}" - " Output: {{ item.attrs.last_check_result.output }}" failed_when: "item.attrs.state != 0" loop: "{{ icinga2_services_status.json.results | sort(attribute='name') }}" loop_control: label: "{{ item.name }}" when: "'icinga2' in group_names"check_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/tasks/icinga2-config.yml000066400000000000000000000304761415336775100307300ustar00rootroot00000000000000--- # -------------------- # SSH connection setup # -------------------- - name: Generate Icinga2 SSH keys community.crypto.openssh_keypair: path: "{{ cluster_dir }}/keys/id_icinga2" delegate_to: localhost when: "'icinga2' in group_names" - name: Ensure that .ssh exists on Icinga2 server file: path: "~nagios/.ssh" state: directory mode: '0700' become_user: "nagios" become: yes when: "'icinga2' in group_names" - name: Install keypair on Icinga2 server copy: src: "{{ item.src }}" dest: "{{ item.dest }}" mode: "{{ item.mode }}" with_items: - src: "{{ cluster_dir }}/keys/id_icinga2" dest: "~nagios/.ssh/id_rsa" mode: '0600' - src: "{{ cluster_dir }}/keys/id_icinga2.pub" dest: "~nagios/.ssh/id_rsa.pub" mode: '0640' become_user: "nagios" become: yes when: "'icinga2' in group_names" - name: Setup user accessed_by_ssh on db and repo hosts user: name: accessed_by_ssh groups: wheel append: yes when: > ( inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers ) and ansible_os_family == 'RedHat' - name: Setup user accessed_by_ssh on db and repo hosts user: name: accessed_by_ssh groups: sudo append: yes when: > ( inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers ) and ansible_os_family == 'Debian' - name: Add user to sudoers file on db and repo hosts lineinfile: path: /etc/sudoers regexp: '^accessed_by_ssh' line: 'accessed_by_ssh ALL=(ALL) NOPASSWD:ALL' validate: 'visudo -cf %s' when: inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers - name: Authorise SSH connection on db and repo hosts authorized_key: user: accessed_by_ssh key: "{{ lookup('file', cluster_dir+'/keys/id_icinga2.pub') }}" when: inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers - name: Test SSH connection from Icinga2 server to db hosts shell: "/usr/bin/ssh {{ssh_args}} {{ user }}@{{ host }} uname -a" vars: host: "{{ hostvars[item].private_ip }}" user: "accessed_by_ssh" ssh_args: "-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no" loop: "{{ pgbackrest_servers }}" become_user: "nagios" become: yes when: "'icinga2' in group_names" - name: Test SSH connection from Icinga2 server to repo host shell: "/usr/bin/ssh {{ssh_args}} {{ user }}@{{ host }} uname -a" vars: host: "{{ hostvars[item].private_ip }}" user: "accessed_by_ssh" ssh_args: "-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no" loop: "{{ repository_server }}" become_user: "nagios" become: yes when: "'icinga2' in group_names" # ------------------------------------ # Configure Icinga2 hosts and services # ------------------------------------ - set_fact: icinga_url: "http://127.0.0.1/icingaweb2" icinga_user: "icingaadmin" icinga_pass: "icinga" when: "'icinga2' in group_names" - name: Create Icinga2 host template t_systems_mms.icinga_director.icinga_host_template: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: host-template check_command: hostalive when: "'icinga2' in group_names" - name: Create Icinga2 service template t_systems_mms.icinga_director.icinga_service_template: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: service-template max_check_attempts: "5" check_interval: "1m" retry_interval: "30s" when: "'icinga2' in group_names" - name: Add db hosts to Icinga2 t_systems_mms.icinga_director.icinga_host: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "{{ item }}" address: "{{ hostvars[item].private_ip }}" imports: - "host-template" vars: os: "Linux" loop: "{{ pgbackrest_servers }}" when: "'icinga2' in group_names" - name: Add repo hosts to Icinga2 t_systems_mms.icinga_director.icinga_host: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "{{ item }}" address: "{{ hostvars[item].private_ip }}" imports: - "host-template" vars: os: "Linux" loop: "{{ repository_server }}" when: "'icinga2' in group_names" - name: Create Icinga2 check retention command t_systems_mms.icinga_director.icinga_command: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "by_ssh_pgbackrest_retention" imports: - "by_ssh" vars: by_ssh_command: "check_pgbackrest --stanza=$stanza$ --service=retention --retention-full=$retention_full$ --prefix=\"$prefix$\"" when: "'icinga2' in group_names" - name: Create Icinga2 check archives command t_systems_mms.icinga_director.icinga_command: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "by_ssh_pgbackrest_archives" imports: - "by_ssh" vars: by_ssh_command: "check_pgbackrest --stanza=$stanza$ --service=archives --prefix=\"$prefix$\"" when: "'icinga2' in group_names" - name: Create Icinga2 check retention services for db hosts t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_retention" imports: - "service-template" check_command: "by_ssh_pgbackrest_retention" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" retention_full: "{{ hostvars[item].pgbackrest_repo_retention_full }}" prefix: "sudo -u {{ hostvars[item].pg_owner }}" loop: "{{ pgbackrest_servers }}" when: "'icinga2' in group_names" - name: Create Icinga2 check retention services for repo host t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_retention" imports: - "service-template" check_command: "by_ssh_pgbackrest_retention" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" retention_full: "{{ hostvars[item].pgbackrest_repo_retention_full }}" prefix: "sudo -u {{ hostvars[item].pgbackrest_user }}" loop: "{{ repository_server }}" when: "'icinga2' in group_names" - name: Create Icinga2 check archives services for db hosts t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_archives" imports: - "service-template" check_command: "by_ssh_pgbackrest_archives" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" prefix: "sudo -u {{ hostvars[item].pg_owner }}" loop: "{{ pgbackrest_servers }}" when: "'icinga2' in group_names" - name: Create Icinga2 check retention services for repo host t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_archives" imports: - "service-template" check_command: "by_ssh_pgbackrest_archives" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" prefix: "sudo -u {{ hostvars[item].pgbackrest_user }}" loop: "{{ repository_server }}" when: "'icinga2' in group_names" # Multiple repositories support, add check commands using --repo=1 - name: Set default repo when multiple repositories are defined set_fact: default_repo_key: "1" when: "pgbackrest_repo_type is defined and pgbackrest_repo_type == 'multi'" - name: Create Icinga2 check retention command - multiple repositories t_systems_mms.icinga_director.icinga_command: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "by_ssh_pgbackrest_retention_with_repo" imports: - "by_ssh" vars: by_ssh_command: "check_pgbackrest --stanza=$stanza$ --service=retention --retention-full=$retention_full$ --prefix=\"$prefix$\" --repo={{ default_repo_key }}" when: "'icinga2' in group_names and default_repo_key is defined" - name: Create Icinga2 check archives command - multiple repositories t_systems_mms.icinga_director.icinga_command: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "by_ssh_pgbackrest_archives_with_repo" imports: - "by_ssh" vars: by_ssh_command: "check_pgbackrest --stanza=$stanza$ --service=archives --prefix=\"$prefix$\" --repo={{ default_repo_key }}" when: "'icinga2' in group_names and default_repo_key is defined" - name: Create Icinga2 check retention services for db hosts - multiple repositories t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_retention_repo{{ default_repo_key }}" imports: - "service-template" check_command: "by_ssh_pgbackrest_retention_with_repo" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" retention_full: "{{ hostvars[item].pgbackrest_repo_retention_full }}" prefix: "sudo -u {{ hostvars[item].pg_owner }}" loop: "{{ pgbackrest_servers }}" when: "'icinga2' in group_names and default_repo_key is defined" - name: Create Icinga2 check retention services for repo host - multiple repositories t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_retention_repo{{ default_repo_key }}" imports: - "service-template" check_command: "by_ssh_pgbackrest_retention_with_repo" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" retention_full: "{{ hostvars[item].pgbackrest_repo_retention_full }}" prefix: "sudo -u {{ hostvars[item].pgbackrest_user }}" loop: "{{ repository_server }}" when: "'icinga2' in group_names and default_repo_key is defined" - name: Create Icinga2 check archives services for db hosts - multiple repositories t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_archives_repo{{ default_repo_key }}" imports: - "service-template" check_command: "by_ssh_pgbackrest_archives_with_repo" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" prefix: "sudo -u {{ hostvars[item].pg_owner }}" loop: "{{ pgbackrest_servers }}" when: "'icinga2' in group_names and default_repo_key is defined" - name: Create Icinga2 check retention services for repo host - multiple repositories t_systems_mms.icinga_director.icinga_service: state: present url: "{{ icinga_url }}" url_username: "{{ icinga_user }}" url_password: "{{ icinga_pass }}" object_name: "pgbackrest_archives_repo{{ default_repo_key }}" imports: - "service-template" check_command: "by_ssh_pgbackrest_archives_with_repo" host: "{{ item }}" vars: by_ssh_logname: "accessed_by_ssh" stanza: "{{ cluster_name }}" prefix: "sudo -u {{ hostvars[item].pgbackrest_user }}" loop: "{{ repository_server }}" when: "'icinga2' in group_names and default_repo_key is defined" # Deploy - name: Deploy Icinga2 config uri: url: "{{ icinga_url }}/director/config/deploy" user: "{{ icinga_user }}" password: "{{ icinga_pass }}" method: POST headers: Accept: "application/json" when: "'icinga2' in group_names" - name: Check Icinga2 services include_tasks: icinga2-check.yml when: "'icinga2' in group_names" check_pgbackrest-REL2_2/tests/roles/setup_check_pgbackrest/tasks/main.yml000066400000000000000000000041051415336775100270630ustar00rootroot00000000000000--- - name: Create a list of primary and standby instances using pgbackrest set_fact: pgbackrest_servers: "{{ pgbackrest_servers | default([]) | union([ item ]) }}" when: "hostvars[item].pgbackrest == true" loop: "{{ groups['primary'] | list | union (groups['standby'] | default([]) | list) }}" loop_control: label: >- {{ item }} - name: Identify repository server set_fact: repository_server: "{{ groups['pgbackrest_repo_host']| default([]) | list }}" - name: Install check_pgbackrest package package: name: - check-pgbackrest state: latest when: > (inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers) and not check_pgbackrest_build | bool and ansible_os_family == 'Debian' - name: Install check_pgbackrest package package: name: - nagios-plugins-pgbackrest state: latest when: > (inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers) and not check_pgbackrest_build | bool and ansible_os_family == 'RedHat' - name: Create a symbolic link file: src: /usr/lib64/nagios/plugins/check_pgbackrest dest: /usr/bin/check_pgbackrest state: link when: > (inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers) and not check_pgbackrest_build | bool and ansible_os_family == 'RedHat' - name: Build check_pgbackrest from sources include_tasks: build.yml vars: check_pgbackrest_src_dir: /opt/check_pgbackrest/src when: > (inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers) and check_pgbackrest_build | bool - shell: check_pgbackrest --version | cut -f1 -d"," | awk '{print $3}' register: version when: inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers - name: check_pgbackrest installed version debug: var=version.stdout when: inventory_hostname in repository_server or inventory_hostname in pgbackrest_servers - name: Deploy Icinga2 check services include_tasks: icinga2-config.yml when: deploy_icinga2 | bool check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/000077500000000000000000000000001415336775100231325ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/defaults/000077500000000000000000000000001415336775100247415ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/defaults/main.yml000066400000000000000000000016471415336775100264200ustar00rootroot00000000000000--- pgbackrest_build: false pgbackrest_configuration_file: "/etc/pgbackrest.conf" pgbackrest_excpected_release: "" pgbackrest_force_backup: false pgbackrest_git_url: "https://github.com/pgbackrest/pgbackrest.git" pgbackrest_git_branch: "main" pgbackrest_repo_path: "/var/lib/pgbackrest" pgbackrest_repo_retention_full: 1 pgbackrest_repo_type: "posix" pgbackrest_repo_s3_endpoint: "{{ cluster_name }}-minio" pgbackrest_repo_azure_host: "{{ cluster_name }}-azurite" pgbackrest_repo_cipher_pass: "it3BF2WqbFCNbY4KkSbvUsRybHyJkvcmQYAOB46x3qXfrc0EKqGGClsh42Q1g91O" pgbackrest_user: "pgbackrest" build_packages: common: - git - make - gcc Debian: - libpq-dev - libssl-dev - libxml2-dev - pkg-config - liblz4-dev - libzstd-dev - libbz2-dev - libz-dev - libyaml-dev RedHat: - openssl-devel - libxml2-devel - lz4-devel - libzstd-devel - bzip2-devel - libyaml-devel check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/000077500000000000000000000000001415336775100242575ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/backup.yml000066400000000000000000000050261415336775100262520ustar00rootroot00000000000000--- - name: Take full backup on repository server command: > pgbackrest backup --type=full --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server and pgbackrest_force_backup - name: Take diff backup on repository server command: > pgbackrest backup --type=diff --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server and pgbackrest_force_backup - name: Take incr backup on repository server command: > pgbackrest backup --type=incr --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server and pgbackrest_force_backup - name: Take incr backup from standby on repository server command: > pgbackrest backup --type=incr --backup-standby --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server and pgbackrest_force_backup and (pgbackrest_servers|length > 1) - name: Take full backup on primary server command: > pgbackrest backup --type=full --stanza="{{ cluster_name }}" become_user: "{{ postgres_user }}" become: yes when: pgbackrest_force_backup and (not repository_server|length > 0) and (inventory_hostname in pgbackrest_servers) and ('primary' in group_names) - name: Take diff backup on primary server command: > pgbackrest backup --type=diff --stanza="{{ cluster_name }}" become_user: "{{ postgres_user }}" become: yes when: pgbackrest_force_backup and (not repository_server|length > 0) and (inventory_hostname in pgbackrest_servers) and ('primary' in group_names) - name: Take incr backup on primary server command: > pgbackrest backup --type=incr --stanza="{{ cluster_name }}" become_user: "{{ postgres_user }}" become: yes when: pgbackrest_force_backup and (not repository_server|length > 0) and (inventory_hostname in pgbackrest_servers) and ('primary' in group_names) - name: Take backup on standby from primary server using ssh command: "/usr/bin/ssh {{ host }} pgbackrest backup --type=incr --backup-standby --stanza={{ cluster_name }}" vars: host: "{{ hostvars[item].private_ip }}" loop: "{{ pgbackrest_standbies }}" become_user: "{{ postgres_user }}" become: yes when: pgbackrest_force_backup and (not repository_server|length > 0) and (inventory_hostname in pgbackrest_servers) and ('primary' in group_names)check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/build.yml000066400000000000000000000036271415336775100261110ustar00rootroot00000000000000--- - name: Install build requirements package: name: > {{ query('flattened', package_lists) }} state: latest vars: package_lists: - "{{ build_packages['common'] }}" - "{{ build_packages[ansible_os_family] }}" - name: Install PG devel package package: name: "postgresql-devel" state: latest when: pg_type == 'PG' and ansible_os_family == 'RedHat' - name: Install EPAS devel package package: name: "edb-as{{ pg_version }}-server-devel" state: latest when: pg_type == 'EPAS' and ansible_os_family == 'RedHat' - name: Ensure src directory exists file: state=directory path="{{ pgbackrest_src_dir }}" - name: Fetch pgbackrest from {{ pgbackrest_git_url }}, {{ pgbackrest_git_branch }} git: repo: "{{ pgbackrest_git_url }}" dest: "{{ pgbackrest_src_dir }}" version: "{{ pgbackrest_git_branch }}" depth: 1 accept_hostkey: yes - name: Fetch currently checked-out branch in {{ pgbackrest_src_dir }} shell: git branch | sed -n 's/^\* //p' args: chdir: "{{ pgbackrest_src_dir }}" register: git_branch - name: pgbackrest source branch to build debug: var=git_branch.stdout - name: Remove old build directory file: state=absent path="{{ pgbackrest_build_dir }}" force=yes - name: Ensure build directory exists file: state=directory path="{{ pgbackrest_build_dir }}" - set_fact: cppflags: "CPPFLAGS='-I /usr/edb/as{{ pg_version }}/include' LDFLAGS='-L/usr/edb/as{{ pg_version }}/lib'" when: pg_type == 'EPAS' - name: Configure pgbackrest shell: "{{ pgbackrest_src_dir }}/src/configure {{ cppflags | default('', true) }} --prefix={{ pgbackrest_build_prefix }} --bindir={{ pgbackrest_build_prefix }}/bin" args: chdir: "{{ pgbackrest_build_dir }}" - name: Build pgbackrest shell: "make" args: chdir: "{{ pgbackrest_build_dir }}" - name: Install pgbackrest shell: "make install" args: chdir: "{{ pgbackrest_build_dir }}"check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/client.yml000066400000000000000000000013121415336775100262550ustar00rootroot00000000000000--- - name: Build configuration file {{ pgbackrest_configuration_file }} template: src: "pgbackrest-dbserver.conf.j2" dest: "{{ pgbackrest_configuration_file }}" owner: "{{ postgres_user }}" group: "{{ postgres_user }}" mode: 0640 become: yes register: pgbackrest_config - name: Record if a new backup is needed set_fact: pgbackrest_force_backup: true when: pgbackrest_config.changed - name: Ensure pgbackrest directories exist with the right ownership and permissions file: name: "{{ item }}" state: directory owner: "{{ postgres_user }}" group: "{{ postgres_user }}" mode: 0770 loop: - /var/log/pgbackrest - /var/spool/pgbackrest become: yes check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/create_user.yml000066400000000000000000000011321415336775100273000ustar00rootroot00000000000000--- - name: Ensure pgbackrest group exists group: name: "{{ pgbackrest_user }}" state: present become: yes - name: Ensure pgbackrest system user {{ pgbackrest_user }} exists user: name: "{{ pgbackrest_user }}" group: "{{ pgbackrest_user }}" state: present become: yes - name: Ensure pgbackrest directories exist with the right ownership and permissions file: name: "{{ item }}" state: directory owner: "{{ pgbackrest_user }}" group: "{{ pgbackrest_user }}" mode: 0770 loop: - /var/log/pgbackrest - "{{ pgbackrest_repo_path }}" become: yescheck_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/info.yml000066400000000000000000000007371415336775100257440ustar00rootroot00000000000000--- - name: Info command on repository server command: > pgbackrest info --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server register: info - name: Info command on database server command: > pgbackrest info --stanza="{{ cluster_name }}" become_user: "{{ postgres_user }}" become: yes when: inventory_hostname in pgbackrest_servers register: info - debug: var=info.stdout_linescheck_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/main.yml000066400000000000000000000051261415336775100257320ustar00rootroot00000000000000--- - name: Create a list of primary and standby instances using pgbackrest set_fact: pgbackrest_servers: "{{ pgbackrest_servers | default([]) | union([ item ]) }}" when: "hostvars[item].pgbackrest == true" loop: "{{ groups['primary'] | list | union (groups['standby'] | default([]) | list) }}" loop_control: label: >- {{ item }} - name: Create a list of standby instances using pgbackrest set_fact: pgbackrest_standbies: "{{ pgbackrest_standbies | default([]) | union([ item ]) }}" when: "hostvars[item].pgbackrest == true" loop: "{{ groups['standby'] | default([]) | list }}" loop_control: label: >- {{ item }} - name: Identify repository server set_fact: repository_server: "{{ groups['pgbackrest_repo_host']| default([]) | list }}" - set_fact: postgres_user: "{{ pg_owner }}" when: inventory_hostname in pgbackrest_servers - set_fact: pgbackrest_user: "{{ pgbackrest_user }}" pgbackrest_repo_retention_full: "{{ pgbackrest_repo_retention_full }}" pgbackrest_repo_type: "{{ pgbackrest_repo_type }}" - name: Ensure repository type is valid assert: msg: "Unsupported repository type: '{{ pgbackrest_repo_type }}'" that: - pgbackrest_repo_type in _available_repo_types vars: _available_repo_types: - 'azure' - 'multi' - 'posix' - 's3' - name: Build pgbackrest from sources include_tasks: build.yml vars: pgbackrest_src_dir: /opt/pgbackrest/src pgbackrest_build_dir: /opt/pgbackrest/build pgbackrest_build_prefix: /usr when: pgbackrest_build | bool - name: Install pgbackrest package package: name: - pgbackrest state: latest when: not pgbackrest_build | bool - shell: pgbackrest version | awk '{print $2}' register: version - name: pgbackrest installed version debug: var=version.stdout - name: Ensure the pgbackrest installed version match {{ pgbackrest_excpected_release }} assert: that: - "'{{ pgbackrest_excpected_release }}' in version.stdout" - include_tasks: server.yml when: inventory_hostname in repository_server - include_tasks: ssh_setup.yml - include_tasks: client.yml when: inventory_hostname in pgbackrest_servers - name: Configure archive_command include_role: name: edb_devops.edb_postgres.manage_dbserver vars: pg_postgres_conf_params: - name: archive_command value: "pgbackrest --stanza={{ cluster_name }} --log-level-console=debug archive-push %p" when: inventory_hostname in pgbackrest_servers - include_tasks: stanza-create.yml - include_tasks: backup.yml - include_tasks: info.ymlcheck_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/server.yml000066400000000000000000000007131415336775100263110ustar00rootroot00000000000000--- - include_tasks: create_user.yml - name: Build configuration file {{ pgbackrest_configuration_file }} template: src: "pgbackrest-repository.conf.j2" dest: "{{ pgbackrest_configuration_file }}" owner: "{{ pgbackrest_user }}" group: "{{ pgbackrest_user }}" mode: 0640 become: yes register: pgbackrest_config - name: Record if a new backup is needed set_fact: pgbackrest_force_backup: true when: pgbackrest_config.changed check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/ssh_setup.yml000066400000000000000000000075571415336775100270350ustar00rootroot00000000000000--- - name: Ensure the localhost keys directory exists file: state: directory path: "{{ cluster_dir }}/keys" delegate_to: localhost - name: Generate {{ pgbackrest_user }} SSH keys community.crypto.openssh_keypair: path: "{{ cluster_dir }}/keys/id_pgbackrest" delegate_to: localhost when: inventory_hostname in repository_server - name: Generate db hosts SSH keys community.crypto.openssh_keypair: path: "{{ cluster_dir }}/keys/id_postgres" delegate_to: localhost when: inventory_hostname in pgbackrest_servers - name: Authorise SSH connection from {{ pgbackrest_user }} authorized_key: user: "{{ postgres_user }}" key: "{{ lookup('file', cluster_dir+'/keys/id_pgbackrest.pub') }}" when: inventory_hostname in pgbackrest_servers and repository_server|length > 0 - name: Authorise SSH connection between db hosts authorized_key: user: "{{ postgres_user }}" key: "{{ lookup('file', cluster_dir+'/keys/id_postgres.pub') }}" when: inventory_hostname in pgbackrest_servers - name: Authorise SSH connection to {{ pgbackrest_user }} authorized_key: user: "{{ pgbackrest_user }}" key: "{{ lookup('file', cluster_dir+'/keys/id_postgres.pub') }}" when: inventory_hostname in repository_server - name: Ensure that .ssh exists on db hosts file: path: "~{{ postgres_user }}/.ssh" state: directory mode: '0700' become_user: "{{ postgres_user }}" become: yes when: inventory_hostname in pgbackrest_servers - name: Install keypair on db hosts copy: src: "{{ item.src }}" dest: "{{ item.dest }}" mode: "{{ item.mode }}" with_items: - src: "{{ cluster_dir }}/keys/id_postgres" dest: "~{{ postgres_user }}/.ssh/id_rsa" mode: '0600' - src: "{{ cluster_dir }}/keys/id_postgres.pub" dest: "~{{ postgres_user }}/.ssh/id_rsa.pub" mode: '0640' become_user: "{{ postgres_user }}" become: yes when: inventory_hostname in pgbackrest_servers - name: Ensure that .ssh exists on repo host file: path: "~{{ pgbackrest_user }}/.ssh" state: directory mode: '0700' become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server - name: Install keypair on repo host copy: src: "{{ item.src }}" dest: "{{ item.dest }}" mode: "{{ item.mode }}" with_items: - src: "{{ cluster_dir }}/keys/id_pgbackrest" dest: "~{{ pgbackrest_user }}/.ssh/id_rsa" mode: '0600' - src: "{{ cluster_dir }}/keys/id_pgbackrest.pub" dest: "~{{ pgbackrest_user }}/.ssh/id_rsa.pub" mode: '0640' become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server - name: Test SSH connection from repo host shell: "/usr/bin/ssh {{ssh_args}} {{ user }}@{{ host }} uname -a" vars: host: "{{ hostvars[item].private_ip }}" user: "{{ hostvars[item].pg_owner }}" ssh_args: "-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no" loop: "{{ pgbackrest_servers }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server - name: Test SSH connection to repo host shell: "/usr/bin/ssh {{ssh_args}} {{ user }}@{{ host }} uname -a" vars: host: "{{ hostvars[pgbackrest_repo_host].private_ip }}" user: "{{ pgbackrest_user }}" ssh_args: "-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no" become_user: "{{ postgres_user }}" become: yes when: inventory_hostname in pgbackrest_servers and repository_server|length > 0 - name: Test SSH connection to db hosts shell: "/usr/bin/ssh {{ssh_args}} {{ user }}@{{ host }} uname -a" vars: host: "{{ hostvars[item].private_ip }}" user: "{{ hostvars[item].pg_owner }}" ssh_args: "-o ConnectTimeout=10 -o BatchMode=yes -o StrictHostKeyChecking=no" loop: "{{ pgbackrest_servers }}" become_user: "{{ postgres_user }}" become: yes when: inventory_hostname in pgbackrest_serverscheck_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/tasks/stanza-create.yml000066400000000000000000000020131415336775100275370ustar00rootroot00000000000000--- - name: Initialise the stanza on repository server command: > pgbackrest stanza-create --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server - name: check cluster configuration on repository server command: > pgbackrest check --stanza="{{ cluster_name }}" become_user: "{{ pgbackrest_user }}" become: yes when: inventory_hostname in repository_server - name: Initialise the stanza on primary server command: > pgbackrest stanza-create --stanza="{{ cluster_name }}" become_user: "{{ postgres_user }}" become: yes when: (not repository_server|length > 0) and (inventory_hostname in pgbackrest_servers) and ('primary' in group_names) - name: check cluster configuration on primary server command: > pgbackrest check --stanza="{{ cluster_name }}" become_user: "{{ postgres_user }}" become: yes when: (not repository_server|length > 0) and (inventory_hostname in pgbackrest_servers) and ('primary' in group_names) check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/templates/000077500000000000000000000000001415336775100251305ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/templates/pgbackrest-dbserver.conf.j2000066400000000000000000000055551415336775100322620ustar00rootroot00000000000000[global] {% if repository_server|length > 0 %} repo1-host={{ hostvars[pgbackrest_repo_host].private_ip }} repo1-host-user={{ pgbackrest_user }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} {% if pgbackrest_repo_type == "multi" %} repo2-host={{ hostvars[pgbackrest_repo_host].private_ip }} repo2-host-user={{ pgbackrest_user }} repo2-cipher-type=aes-256-cbc repo2-cipher-pass={{ pgbackrest_repo_cipher_pass }} {% if ansible_os_family == 'Debian' %} repo1-path=/repo1 repo2-path=/repo2 {% endif %} {% endif %} {% elif pgbackrest_repo_type == "s3" %} repo1-type=s3 repo1-path=/repo1 repo1-s3-endpoint={{ pgbackrest_repo_s3_endpoint }} repo1-s3-region=eu-west-2 repo1-s3-bucket=bucket repo1-s3-key=accessKey repo1-s3-key-secret=superSECRETkey repo1-s3-uri-style=path repo1-storage-verify-tls=n repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} start-fast=y {% elif pgbackrest_repo_type == "azure" %} repo1-type=azure repo1-path=/repo1 repo1-storage-host={{ pgbackrest_repo_azure_host }} repo1-azure-account=pgbackrest repo1-azure-key=aF49wnZP repo1-azure-container=container repo1-storage-verify-tls=n repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} start-fast=y {% elif pgbackrest_repo_type == "multi" %} repo1-type=s3 repo1-path=/repo1 repo1-s3-endpoint={{ pgbackrest_repo_s3_endpoint }} repo1-s3-region=eu-west-2 repo1-s3-bucket=bucket repo1-s3-key=accessKey repo1-s3-key-secret=superSECRETkey repo1-s3-uri-style=path repo1-storage-verify-tls=n repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} repo2-type=azure repo2-path=/repo2 repo2-storage-host={{ pgbackrest_repo_azure_host }} repo2-azure-account=pgbackrest repo2-azure-key=aF49wnZP repo2-azure-container=container repo2-storage-verify-tls=n repo2-retention-full={{ pgbackrest_repo_retention_full }} repo2-cipher-type=aes-256-cbc repo2-cipher-pass={{ pgbackrest_repo_cipher_pass }} start-fast=y {% else %} repo1-type=posix repo1-path={{ pgbackrest_repo_path }} repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} start-fast=y {% endif %} log-level-console=info log-level-file=debug delta=y process-max=2 [{{ cluster_name }}] pg1-path={{ pg_data }} pg1-user={{ postgres_user }} pg1-port={{ pg_port }} pg1-socket-path={{ pg_unix_socket_directories[0] }} {% if 'standby' in group_names %} backup-standby=y pg2-host={{ upstream_node_private_ip }} pg2-host-user={{ postgres_user }} pg2-path={{ pg_data }} recovery-option=primary_conninfo=host={{ upstream_node_private_ip }} user={{ pg_replication_user }} port={{ pg_port }} application_name={{ inventory_hostname }} {% endif %} check_pgbackrest-REL2_2/tests/roles/setup_pgbackrest/templates/pgbackrest-repository.conf.j2000066400000000000000000000042051415336775100326540ustar00rootroot00000000000000[global] {% if pgbackrest_repo_type == "s3" %} repo1-type=s3 repo1-path=/repo1 repo1-s3-endpoint={{ pgbackrest_repo_s3_endpoint }} repo1-s3-region=eu-west-2 repo1-s3-bucket=bucket repo1-s3-key=accessKey repo1-s3-key-secret=superSECRETkey repo1-s3-uri-style=path repo1-storage-verify-tls=n repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} {% elif pgbackrest_repo_type == "azure" %} repo1-type=azure repo1-path=/repo1 repo1-storage-host={{ pgbackrest_repo_azure_host }} repo1-azure-account=pgbackrest repo1-azure-key=aF49wnZP repo1-azure-container=container repo1-storage-verify-tls=n repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} {% elif pgbackrest_repo_type == "multi" %} repo1-type=s3 repo1-path=/repo1 repo1-s3-endpoint={{ pgbackrest_repo_s3_endpoint }} repo1-s3-region=eu-west-2 repo1-s3-bucket=bucket repo1-s3-key=accessKey repo1-s3-key-secret=superSECRETkey repo1-s3-uri-style=path repo1-storage-verify-tls=n repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} repo2-type=azure repo2-path=/repo2 repo2-storage-host={{ pgbackrest_repo_azure_host }} repo2-azure-account=pgbackrest repo2-azure-key=aF49wnZP repo2-azure-container=container repo2-storage-verify-tls=n repo2-retention-full={{ pgbackrest_repo_retention_full }} repo2-cipher-type=aes-256-cbc repo2-cipher-pass={{ pgbackrest_repo_cipher_pass }} {% else %} repo1-type=posix repo1-path={{ pgbackrest_repo_path }} repo1-retention-full={{ pgbackrest_repo_retention_full }} repo1-cipher-type=aes-256-cbc repo1-cipher-pass={{ pgbackrest_repo_cipher_pass }} {% endif %} log-level-console=info log-level-file=debug start-fast=y delta=y process-max=2 [{{ cluster_name }}] {% for server in pgbackrest_servers %} {% set v = hostvars[server] %} pg{{ loop.index }}-host={{ v.private_ip }} pg{{ loop.index }}-host-user={{ v.pg_owner }} pg{{ loop.index }}-path={{ v.pg_data }} pg{{ loop.index }}-socket-path={{ v.pg_unix_socket_directories[0] }} {% endfor %} check_pgbackrest-REL2_2/tests/roles/sys/000077500000000000000000000000001415336775100204035ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/pkg/000077500000000000000000000000001415336775100211645ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/pkg/defaults/000077500000000000000000000000001415336775100227735ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/pkg/defaults/main.yml000066400000000000000000000003541415336775100244440ustar00rootroot00000000000000--- default_packages: common: - openssh-server - openssl - sudo - wget Debian: - openssh-client - iproute2 - gnupg - xxd RedHat: - openssh-clients - iproute - vim-common - yum-utils check_pgbackrest-REL2_2/tests/roles/sys/pkg/tasks/000077500000000000000000000000001415336775100223115ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/pkg/tasks/main.yml000066400000000000000000000003661415336775100237650ustar00rootroot00000000000000--- - name: Install required packages package: name: > {{ query('flattened', package_lists) }} state: latest vars: package_lists: - "{{ default_packages['common'] }}" - "{{ default_packages[ansible_os_family] }}"check_pgbackrest-REL2_2/tests/roles/sys/tasks/000077500000000000000000000000001415336775100215305ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/tasks/main.yml000066400000000000000000000033431415336775100232020ustar00rootroot00000000000000--- - include_role: name: sys/pkg - name: Start ssh server service: name: "{{ ssh_service_name }}" state: started vars: ssh_service_name: "{{ (ansible_os_family == 'RedHat')|ternary('sshd', 'ssh') }}" - name: Ensure authorized_keys allows admin access authorized_key: user: root state: present key: "{{ lookup('file', ssh_key_file_pub) }}" vars: ssh_key_file_pub: "{{ cluster_dir }}/{{ ssh_key_file }}.pub" - name: Ensure that .ssh exists on all hosts file: path: "~/.ssh" state: directory mode: '0700' - name: Install keypair on all hosts copy: src: "{{ item.src }}" dest: "{{ item.dest }}" mode: "{{ item.mode }}" with_items: - src: "{{ cluster_dir }}/{{ ssh_key_file }}" dest: "~/.ssh/id_rsa" mode: '0600' - src: "{{ cluster_dir }}/{{ ssh_key_file }}.pub" dest: "~/.ssh/id_rsa.pub" mode: '0640' - name: Update network facts ansible.builtin.setup: gather_subset: - network - name: Set main /etc/hosts entry set_fact: my_hosts_lines: "{{ [main_hosts_line] }}" vars: main_hosts_line: >- {{ ansible_default_ipv4.address|default(ansible_all_ipv4_addresses[0]) }} {{ [inventory_hostname, inventory_hostname_short]|unique|join(' ') }} - name: Aggregate /etc/hosts lines across hosts set_fact: etc_hosts_lines: "{{ etc_hosts_lines|default([])|union(hostvars[item].my_hosts_lines) }}" with_items: "{{ groups['all'] }}" - name: Add entries to /etc/hosts lineinfile: path: /etc/hosts line: "{{ item }}" loop: "{{ etc_hosts_lines }}" when: platform != 'docker' - name: Ensure /run/nologin does not exist file: path: /run/nologin state: absent when: platform in ['docker']check_pgbackrest-REL2_2/tests/roles/sys/testing_repo/000077500000000000000000000000001415336775100231055ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/testing_repo/tasks/000077500000000000000000000000001415336775100242325ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/testing_repo/tasks/edb-staging.yml000066400000000000000000000032331415336775100271420ustar00rootroot00000000000000--- - name: Set the os variable set_fact: os: "{{ ansible_distribution }}{{ ansible_distribution_major_version }}" - name: Set staging EDB yum repo replace: path: /etc/yum.repos.d/edb.repo regexp: 'yum.enterprisedb.com' replace: 'yum-staging.enterprisedb.com' when: > ansible_os_family == 'RedHat' and 'icinga2' not in group_names - name: Update yum info ansible.builtin.shell: yum-config-manager --enable edb-testing && yum --enablerepo=edb clean metadata && yum updateinfo -y when: > ansible_os_family == 'RedHat' and 'icinga2' not in group_names - name: Build staging EDB auth conf lineinfile: path: /etc/apt/auth.conf.d/edb.conf line: "machine apt-staging.enterprisedb.com login {{ repo_username }} password {{ repo_password }}" state: present when: > ansible_os_family == 'Debian' and os != 'Debian9' and 'icinga2' not in group_names - name: Add staging EDB apt repo apt_repository: repo: "deb https://apt-staging.enterprisedb.com/{{ ansible_distribution_release }}-edb/ {{ ansible_distribution_release }} main" state: present filename: "edb-{{ ansible_distribution_release }}" when: > ansible_os_family == 'Debian' and os != 'Debian9' and 'icinga2' not in group_names - name: Add staging EDB apt repo (Debian 9) apt_repository: repo: "deb https://{{ repo_username }}:{{ repo_password }}@apt-staging.enterprisedb.com/{{ ansible_distribution_release }}-edb/ {{ ansible_distribution_release }} main" state: present filename: "edb-{{ ansible_distribution_release }}" when: > ansible_os_family == 'Debian' and os == 'Debian9' and 'icinga2' not in group_names check_pgbackrest-REL2_2/tests/roles/sys/testing_repo/tasks/pgdg-apt-testing.yml000066400000000000000000000005231415336775100301330ustar00rootroot00000000000000--- - name: Add testing PG apt repo apt_repository: repo: "deb http://apt.postgresql.org/pub/repos/apt/ {{ ansible_distribution_release }}-pgdg-testing main {{ pg_version }}" state: present filename: "pgdg" - name: Add apt pin preferences template: src: "pin-pgdg.pref.j2" dest: '/etc/apt/preferences.d/pgdg.pref' check_pgbackrest-REL2_2/tests/roles/sys/testing_repo/templates/000077500000000000000000000000001415336775100251035ustar00rootroot00000000000000check_pgbackrest-REL2_2/tests/roles/sys/testing_repo/templates/pin-pgdg.pref.j2000066400000000000000000000001721415336775100300000ustar00rootroot00000000000000Package: pgbackrest Pin: release o=apt.postgresql.org,a={{ ansible_distribution_release }}-pgdg-testing Pin-Priority: 500 check_pgbackrest-REL2_2/tests/run.sh000066400000000000000000000107451415336775100176100ustar00rootroot00000000000000#!/usr/bin/env bash set -o errexit set -o nounset cd "$(dirname "$0")" usage() { echo "Usage:" echo " -A Activity step only." echo " -c Cluster directory." echo " -C Cleaning step only." echo " -h Display this help message." echo " -i Initial step only." } INIT_ONLY=false CLEAN_ONLY=false PROVISION=true DEPLOY=true while getopts "Ac:Chi" o; do case "${o}" in A) INIT_ONLY=false CLEAN_ONLY=false PROVISION=false DEPLOY=false ACTIVITY=true ;; c) CLUSTER_DIR=${OPTARG} ;; C) CLEAN_ONLY=true ;; h ) usage exit 0 ;; i) INIT_ONLY=true ;; *) usage 1>&2 exit 1 ;; esac done shift $((OPTIND-1)) if $INIT_ONLY; then #------------------------------------------------------------------------------------------------------------------- echo '-------------------- Init --------------------' && date #------------------------------------------------------------------------------------------------------------------- # This section is intended to update the GitHub Action Runner (Ubuntu) echo 'Update apt' sudo apt-get update echo '--------------------' echo 'Whoami?' whoami echo '--------------------' echo 'Docker installed?' docker version echo '--------------------' echo 'Ansible installed?' ansible --version echo '--------------------' echo 'Install ansible dependencies' pipx inject ansible-core docker-py ansible-galaxy collection install community.docker ansible-galaxy collection install edb_devops.edb_postgres ansible-galaxy collection install t_systems_mms.icinga_director echo '--------------------' echo 'Install MinIO Python SDK' pip install minio echo '--------------------' echo 'Install Azure Storage Blobs client library for Python' pip install azure-storage-blob # Exit with success exit 0; fi if $CLEAN_ONLY; then #------------------------------------------------------------------------------------------------------------------- echo '-------------------- Clean --------------------' && date #------------------------------------------------------------------------------------------------------------------- if [ -e $CLUSTER_DIR ]; then ansible-playbook platforms/deprovision.yml -e cluster_dir=$CLUSTER_DIR sudo rm --force --preserve-root --recursive $CLUSTER_DIR fi # Exit with success exit 0; fi if $PROVISION; then #----------------------------------------------------------------------------------------------------------------------- echo '-------------------- Provision --------------------' && date #----------------------------------------------------------------------------------------------------------------------- export ANSIBLE_ROLES_PATH=${ANSIBLE_ROLES_PATH:+$ANSIBLE_ROLES_PATH:}$(pwd)/roles : "${CLUSTER_DIR:?Variable not set or empty}" echo "CLUSTER_DIR=$CLUSTER_DIR" ansible-playbook platforms/provision.yml -e cluster_dir=$CLUSTER_DIR ansible-playbook platforms/system-config.yml -i "$CLUSTER_DIR/inventory.docker.yml" -e cluster_dir=$CLUSTER_DIR fi if $DEPLOY; then #----------------------------------------------------------------------------------------------------------------------- echo '-------------------- Deploy --------------------' && date #----------------------------------------------------------------------------------------------------------------------- : "${EDB_REPO_USERNAME:?Variable not set or empty}" : "${EDB_REPO_PASSWORD:?Variable not set or empty}" export ANSIBLE_HOST_KEY_CHECKING=False export ANSIBLE_REMOTE_USER="root" ansible-playbook playbooks/deploy.yml -i "$CLUSTER_DIR/inventory" -e cluster_dir=$CLUSTER_DIR fi if $ACTIVITY; then #----------------------------------------------------------------------------------------------------------------------- echo '-------------------- Simulate basic activity --------------------' && date #----------------------------------------------------------------------------------------------------------------------- ansible-playbook playbooks/activity.yml -i "$CLUSTER_DIR/inventory" fi check_pgbackrest-REL2_2/tests/vagrant.sh000077500000000000000000000022621415336775100204440ustar00rootroot00000000000000#!/usr/bin/env bash set -o errexit set -o nounset cd /vagrant export RUN_ARGS="" if [ "$ACTIVITY" == "only" ]; then export ACTIVITY=true export RUN_ARGS="-A" fi echo "ARCH = '$ARCH'" echo "PGBR_BUILD = '$PGBR_BUILD'" echo "PGBR_REPO_TYPE = '$PGBR_REPO_TYPE'" echo "PROFILE = '$PROFILE'" source profile.d/$PROFILE.profile source profile.d/vagrant.profile if [ ! -z "$EXTRA" ]; then export EXTRA_VARS="$EXTRA_VARS $EXTRA" fi if $PGBR_BUILD; then export EXTRA_VARS="$EXTRA_VARS pgbackrest_build=true" fi if [ ! -z "$PGBR_REPO_TYPE" ]; then export EXTRA_VARS="$EXTRA_VARS pgbackrest_repo_type=$PGBR_REPO_TYPE" [ "$PGBR_REPO_TYPE" = "posix" ] && export EXTRA_VARS="$EXTRA_VARS pgbackrest_repo_path=/shared/repo1" fi [ ! -z "$edb_repository_username" ] && export EDB_REPO_USERNAME=$edb_repository_username [ ! -z "$edb_repository_password" ] && export EDB_REPO_PASSWORD=$edb_repository_password [ ! -z "$pgbackrest_git_url" ] && export EXTRA_VARS="$EXTRA_VARS pgbackrest_git_url=$pgbackrest_git_url" [ ! -z "$pgbackrest_git_branch" ] && export EXTRA_VARS="$EXTRA_VARS pgbackrest_git_branch=$pgbackrest_git_branch" echo "EXTRA_VARS = '$EXTRA_VARS'" echo "CLNAME=$CLNAME" sh ci.shcheck_pgbackrest-REL2_2/tests/vagrant.yml-dist000066400000000000000000000005641415336775100215740ustar00rootroot00000000000000# EDB repositories personal credential # https://www.enterprisedb.com/repository-access-thank-you-page # edb_repository_username: "username" # edb_repository_password: "password" # When building pgBackRest from sources, define which Github repository and branch to use # pgbackrest_git_url: "https://github.com/pgbackrest/pgbackrest.git" # pgbackrest_git_branch: "main"