pax_global_header00006660000000000000000000000064146303631430014515gustar00rootroot0000000000000052 comment=dfc69386753e3793b6960301a00c90b71656bf6f pooch-1.8.2/000077500000000000000000000000001463036314300126355ustar00rootroot00000000000000pooch-1.8.2/.codecov.yml000066400000000000000000000003311463036314300150550ustar00rootroot00000000000000codecov: notify: require_ci_to_pass: no coverage: status: patch: default: target: '80' if_no_uploads: error if_not_found: success if_ci_failed: failure comment: off pooch-1.8.2/.coveragerc000066400000000000000000000000771463036314300147620ustar00rootroot00000000000000[run] omit = */tests/* */_version.py */__init__.py pooch-1.8.2/.flake8000066400000000000000000000001361463036314300140100ustar00rootroot00000000000000[flake8] ignore = E203, E266, E501, W503, F401, E741 max-line-length = 88 max-doc-length = 79 pooch-1.8.2/.gitattributes000066400000000000000000000003251463036314300155300ustar00rootroot00000000000000# Set the default behavior, in case people don't have core.autocrlf set. * text=auto # Declare test data files that will always have LF line endings on checkout (even on Windows). pooch/tests/data/** text eol=lf pooch-1.8.2/.github/000077500000000000000000000000001463036314300141755ustar00rootroot00000000000000pooch-1.8.2/.github/dependabot.yml000066400000000000000000000006721463036314300170320ustar00rootroot00000000000000# To get started with Dependabot version updates, you'll need to specify which # package ecosystems to update and where the package manifests are located. # Please see the documentation for all configuration options: # https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" pooch-1.8.2/.github/workflows/000077500000000000000000000000001463036314300162325ustar00rootroot00000000000000pooch-1.8.2/.github/workflows/docs.yml000066400000000000000000000155671463036314300177230ustar00rootroot00000000000000# Build the documentation and deploy to GitHub Pages using GitHub Actions. # # NOTE: Pin actions to a specific commit to avoid having the authentication # token stolen if the Action is compromised. See the comments and links here: # https://github.com/pypa/gh-action-pypi-publish/issues/27 # name: documentation # Only build PRs, the main branch, and releases. Pushes to branches will only # be built when a PR is opened. This avoids duplicated buids in PRs comming # from branches in the origin repository (1 for PR and 1 for push). on: pull_request: push: branches: - main release: types: - published # Use bash by default in all jobs defaults: run: shell: bash jobs: ############################################################################# # Build the docs build: runs-on: ubuntu-latest env: REQUIREMENTS: env/requirements-build.txt env/requirements-docs.txt steps: # Cancel any previous run of the test job # We pin the commit hash corresponding to v0.5.0, and not pinning the tag # because we are giving full access through the github.token. - name: Cancel Previous Runs uses: styfle/cancel-workflow-action@0.12.1 with: access_token: ${{ github.token }} # Checks-out your repository under $GITHUB_WORKSPACE - name: Checkout uses: actions/checkout@v4 with: # Need to fetch more than the last commit so that setuptools-scm can # create the correct version string. If the number of commits since # the last release is greater than this, the version still be wrong. # Increase if necessary. fetch-depth: 100 # The GitHub token is preserved by default but this job doesn't need # to be able to push to GitHub. persist-credentials: false # Need the tags so that setuptools-scm can form a valid version number - name: Fetch git tags run: git fetch origin 'refs/tags/*:refs/tags/*' - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.x" - name: Collect requirements run: | echo "Install Dependente to capture dependencies:" python -m pip install dependente==0.3.0 echo "" echo "Capturing run-time dependencies:" dependente --source install,extras > requirements-full.txt echo "" echo "Capturing dependencies from:" for requirement in $REQUIREMENTS do echo " $requirement" cat $requirement >> requirements-full.txt done echo "" echo "Collected dependencies:" cat requirements-full.txt - name: Get the pip cache folder id: pip-cache run: | echo "::set-output name=dir::$(pip cache dir)" - name: Setup caching for pip packages uses: actions/cache@v4 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('requirements-full.txt') }} - name: Install requirements run: | python -m pip install --requirement requirements-full.txt - name: List installed packages run: python -m pip freeze - name: Build source and wheel distributions run: | make build echo "" echo "Generated files:" ls -lh dist/ - name: Install the package run: python -m pip install dist/*.whl - name: Build the documentation run: make -C doc clean all # Store the docs as a build artifact so we can deploy it later - name: Upload HTML documentation as an artifact uses: actions/upload-artifact@v4 with: name: docs-${{ github.sha }} path: doc/_build/html ############################################################################# # Publish the documentation to gh-pages publish: runs-on: ubuntu-latest needs: build if: github.event_name == 'release' || github.event_name == 'push' steps: - name: Checkout uses: actions/checkout@v4 # Fetch the built docs from the "build" job - name: Download HTML documentation artifact uses: actions/download-artifact@v4 with: name: docs-${{ github.sha }} path: doc/_build/html - name: Checkout the gh-pages branch in a separate folder uses: actions/checkout@v4 with: ref: gh-pages # Checkout to this folder instead of the current one path: deploy # Download the entire history fetch-depth: 0 - name: Push the built HTML to gh-pages run: | # Detect if this is a release or from the main branch if [[ "${{ github.event_name }}" == "release" ]]; then # Get the tag name without the "refs/tags/" part version="${GITHUB_REF#refs/*/}" else version=dev fi echo "Deploying version: $version" # Make the new commit message. Needs to happen before cd into deploy # to get the right commit hash. message="Deploy $version from $(git rev-parse --short HEAD)" cd deploy # Need to have this file so that Github doesn't try to run Jekyll touch .nojekyll # Delete all the files and replace with our new set echo -e "\nRemoving old files from previous builds of ${version}:" rm -rvf ${version} echo -e "\nCopying HTML files to ${version}:" cp -Rvf ../doc/_build/html/ ${version}/ # If this is a new release, update the link from /latest to it if [[ "${version}" != "dev" ]]; then echo -e "\nSetup link from ${version} to 'latest'." rm -f latest ln -sf ${version} latest fi # Stage the commit git add -A . echo -e "\nChanges to be applied:" git status # Configure git to be the GitHub Actions account git config user.email "github-actions[bot]@users.noreply.github.com" git config user.name "github-actions[bot]" # If this is a dev build and the last commit was from a dev build # (detect if "dev" was in the previous commit message), reuse the # same commit if [[ "${version}" == "dev" && `git log -1 --format='%s'` == *"dev"* ]]; then echo -e "\nAmending last commit:" git commit --amend --reset-author -m "$message" else echo -e "\nMaking a new commit:" git commit -m "$message" fi # Make the push quiet just in case there is anything that could leak # sensitive information. echo -e "\nPushing changes to gh-pages." git push -fq origin gh-pages 2>&1 >/dev/null echo -e "\nFinished uploading generated files." pooch-1.8.2/.github/workflows/pypi.yml000066400000000000000000000102331463036314300177350ustar00rootroot00000000000000# Publish archives to PyPI and TestPyPI using GitHub Actions. # # NOTE: Pin actions to a specific commit to avoid having the authentication # token stolen if the Action is compromised. See the comments and links here: # https://github.com/pypa/gh-action-pypi-publish/issues/27 # name: pypi on: pull_request: push: branches: - main release: types: - published # Use bash by default in all jobs defaults: run: shell: bash jobs: ############################################################################# # Build and check wheels and source distrubutions build: runs-on: ubuntu-latest steps: # Checks-out your repository under $GITHUB_WORKSPACE - name: Checkout uses: actions/checkout@v4 with: # Need to fetch more than the last commit so that setuptools_scm can # create the correct version string. If the number of commits since # the last release is greater than this, the version will still be # wrong. Increase if necessary. fetch-depth: 100 # The GitHub token is preserved by default but this job doesn't need # to be able to push to GitHub. persist-credentials: false # Need the tags so that setuptools-scm can form a valid version number - name: Fetch git tags run: git fetch origin 'refs/tags/*:refs/tags/*' - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.x" - name: Install requirements run: | python -m pip install -r env/requirements-build.txt python -m pip install twine - name: List installed packages run: python -m pip freeze - name: Don't use local version numbers for TestPyPI uploads if: github.event_name != 'release' run: | # Change setuptools-scm local_scheme to "no-local-version" so the # local part of the version isn't included, making the version string # compatible with Test PyPI. sed --in-place "s/node-and-date/no-local-version/g" pyproject.toml - name: Build source and wheel distributions run: | make build echo "" echo "Generated files:" ls -lh dist/ - name: Check the archives run: twine check dist/* # Store the archives as a build artifact so we can deploy them later - name: Upload archives as artifacts # Only if not a pull request if: success() && github.event_name != 'pull_request' uses: actions/upload-artifact@v4 with: name: pypi-${{ github.sha }} path: dist ############################################################################# # Publish built wheels and source archives to PyPI and test PyPI publish: runs-on: ubuntu-latest needs: build # Only publish from the origin repository, not forks if: github.repository_owner == 'fatiando' && github.event_name != 'pull_request' environment: pypi permissions: # This permission allows trusted publishing to PyPI (without an API token) id-token: write steps: - name: Checkout uses: actions/checkout@v4 with: # The GitHub token is preserved by default but this job doesn't need # to be able to push to GitHub. persist-credentials: false # Fetch the built archives from the "build" job - name: Download built archives artifact uses: actions/download-artifact@v4 with: name: pypi-${{ github.sha }} path: dist - name: Publish to Test PyPI # Only publish to TestPyPI when a PR is merged (pushed to main) if: success() && github.event_name == 'push' uses: pypa/gh-action-pypi-publish@v1.8.14 with: repository_url: https://test.pypi.org/legacy/ # Allow existing releases on test PyPI without errors. # NOT TO BE USED in PyPI! skip_existing: true - name: Publish to PyPI # Only publish to PyPI when a release triggers the build if: success() && github.event_name == 'release' uses: pypa/gh-action-pypi-publish@v1.8.14 pooch-1.8.2/.github/workflows/style.yml000066400000000000000000000030001463036314300201060ustar00rootroot00000000000000# Linting and style checks with GitHub Actions # # NOTE: Pin actions to a specific commit to avoid having the authentication # token stolen if the Action is compromised. See the comments and links here: # https://github.com/pypa/gh-action-pypi-publish/issues/27 # name: checks # Only build PRs and the main branch. Pushes to branches will only be built # when a PR is opened. on: pull_request: push: branches: - main ############################################################################### jobs: format: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 with: persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install requirements run: python -m pip install -r env/requirements-style.txt - name: List installed packages run: python -m pip freeze - name: Check code format run: make check-format style: runs-on: ubuntu-latest steps: - name: Checkout uses: actions/checkout@v4 with: persist-credentials: false - name: Setup Python uses: actions/setup-python@v5 with: python-version: "3.10" - name: Install requirements run: python -m pip install -r env/requirements-style.txt - name: List installed packages run: python -m pip freeze - name: Check code style run: make check-style lint pooch-1.8.2/.github/workflows/test.yml000066400000000000000000000131031463036314300177320ustar00rootroot00000000000000# Run tests and upload to Codecov with GitHub Actions # # NOTE: Pin actions to a specific commit to avoid having the authentication # token stolen if the Action is compromised. See the comments and links here: # https://github.com/pypa/gh-action-pypi-publish/issues/27 # name: test # Only build PRs, the main branch, and releases. Pushes to branches will only # be built when a PR is opened. This avoids duplicated buids in PRs comming # from branches in the origin repository (1 for PR and 1 for push). on: pull_request: push: branches: - main release: types: - published # Use bash by default in all jobs defaults: run: shell: bash jobs: ############################################################################# # Run tests and upload to codecov test: name: ${{ matrix.os }} python=${{ matrix.python }} dependencies=${{ matrix.dependencies }} if: ${{ github.repository_owner == 'fatiando' || github.event_name != 'schedule' }} runs-on: ${{ matrix.os }} strategy: # Otherwise, the workflow would stop if a single job fails. We want to # run all of them to catch failures in different combinations. fail-fast: false matrix: os: - ubuntu-latest - macos-latest - windows-latest dependencies: - oldest - latest - optional include: - dependencies: oldest python: "3.7" - dependencies: latest python: "3.11" - dependencies: optional python: "3.11" # test on macos-13 (x86) using oldest dependencies and python 3.7 - os: macos-13 dependencies: oldest python: "3.7" exclude: # don't test on macos-latest (arm64) with oldest dependencies - os: macos-latest dependencies: oldest env: REQUIREMENTS: env/requirements-build.txt env/requirements-test.txt # Used to tag codecov submissions OS: ${{ matrix.os }} PYTHON: ${{ matrix.python }} DEPENDENCIES: ${{ matrix.dependencies }} steps: # Cancel any previous run of the test job # We pin the commit hash corresponding to v0.5.0, and not pinning the tag # because we are giving full access through the github.token. - name: Cancel Previous Runs uses: styfle/cancel-workflow-action@0.12.1 with: access_token: ${{ github.token }} # Checks-out your repository under $GITHUB_WORKSPACE - name: Checkout uses: actions/checkout@v4 with: # Need to fetch more than the last commit so that setuptools-scm can # create the correct version string. If the number of commits since # the last release is greater than this, the version still be wrong. # Increase if necessary. fetch-depth: 100 # The GitHub token is preserved by default but this job doesn't need # to be able to push to GitHub. persist-credentials: false # Need the tags so that setuptools-scm can form a valid version number - name: Fetch git tags run: git fetch origin 'refs/tags/*:refs/tags/*' - name: Setup Python uses: actions/setup-python@v5 with: python-version: ${{ matrix.python }} - name: Collect requirements run: | echo "Install Dependente to capture dependencies:" python -m pip install dependente==0.3.0 echo "" dependente_args="--source install" if [[ "${{ matrix.dependencies }}" == "oldest" ]]; then dependente_args="--oldest --source install,extras" fi if [[ "${{ matrix.dependencies }}" == "optional" ]]; then dependente_args="--source install,extras" fi echo "Capturing run-time dependencies:" dependente $dependente_args > requirements-full.txt echo "" echo "Capturing dependencies from:" for requirement in $REQUIREMENTS do echo " $requirement" cat $requirement >> requirements-full.txt done echo "" echo "Collected dependencies:" cat requirements-full.txt - name: Get the pip cache folder id: pip-cache run: | echo "::set-output name=dir::$(pip cache dir)" - name: Setup caching for pip packages uses: actions/cache@v4 with: path: ${{ steps.pip-cache.outputs.dir }} key: ${{ runner.os }}-pip-${{ hashFiles('requirements-full.txt') }} - name: Install requirements run: | python -m pip install --requirement requirements-full.txt - name: Build source and wheel distributions run: | make build echo "" echo "Generated files:" ls -lh dist/ - name: Install the package run: python -m pip install --no-deps dist/*.whl - name: List installed packages run: python -m pip freeze - name: Run the tests run: make test - name: Convert coverage report to XML for codecov run: coverage xml - name: Upload coverage to Codecov uses: codecov/codecov-action@v4 with: files: ./coverage.xml env_vars: OS,PYTHON,DEPENDENCIES # Don't mark the job as failed if the upload fails for some reason. # It does sometimes but shouldn't be the reason for running # everything again unless something else is broken. fail_ci_if_error: false env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} pooch-1.8.2/.gitignore000066400000000000000000000002521463036314300146240ustar00rootroot00000000000000*.pyc *~ *.so *.pyd .pytest_cache/ build/ dist/ .*.swp .coverage .cache doc/_build doc/api/generated .ipynb_checkpoints *.egg-info MANIFEST .coverage.* pooch/_version.py pooch-1.8.2/.pylintrc000066400000000000000000000304501463036314300145040ustar00rootroot00000000000000[MASTER] # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code extension-pkg-whitelist= # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS,_version.py # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Use multiple processes to speed up Pylint. jobs=1 # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= # Pickle collected data for later comparisons. persistent=yes # Specify a configuration file. #rcfile= # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED confidence= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" disable=raw-checker-failed,bad-inline-option,locally-disabled,file-ignored,suppressed-message,deprecated-pragma,attribute-defined-outside-init,similarities,,import-error # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. enable= [REPORTS] # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details #msg-template= # Set the output format. Available formats are text, parseable, colorized, json # and msvs (visual studio).You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Tells whether to display a full report or only the messages reports=no # Activate the evaluation score. score=yes [REFACTORING] # Maximum number of nested blocks for function / method body max-nested-blocks=5 [FORMAT] # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Maximum number of characters on a single line. max-line-length=100 # Maximum number of lines in a module max-module-lines=2000 # Allow the body of a class to be on the same line as the declaration if body # contains single statement. single-line-class-stmt=no # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no [VARIABLES] # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # Tells whether unused global variables should be treated as a violation. allow-global-unused-variables=yes # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_,_cb # A regular expression matching the name of dummy variables (i.e. expectedly # not used). dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.*|^ignored_|^unused_ # Tells whether we should check for unused import in __init__ files. init-import=no # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,future.builtins [SIMILARITIES] # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=yes # Minimum lines number of a similarity. min-similarity-lines=3 [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format logging-modules=logging [SPELLING] # Spelling dictionary name. Available dictionaries: none. To make it working # install python-enchant package. spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [BASIC] # Regular expression matching correct argument names argument-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ # Regular expression matching correct attribute names attr-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct constant names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 # Regular expression matching correct function names function-rgx=(([a-z][a-z0-9_]{2,50})|(_[a-z0-9_]*))$ # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,w,e,s,n,x,y,z,cv,ax # Include a hint for the correct naming format with invalid-name include-naming-hint=no # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Regular expression matching correct method names method-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. property-classes=abc.abstractproperty # Regular expression matching correct variable names variable-rgx=(([a-z][a-z0-9_]{2,30})|(_[a-z0-9_]*))$ [TYPECHECK] # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # This flag controls whether pylint should warn about no-member and similar # checks whenever an opaque object is returned when inferring. The inference # can return multiple potential results while evaluating a Python object, but # some branches might not be evaluated, which results in partial inference. In # that case, it might be useful to still emit no-member and other checks for # the rest of the inferred objects. ignore-on-opaque-inference=yes # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # Show a hint with possible names when a member name was not found. The aspect # of finding the hint is based on edit distance. missing-member-hint=yes # The minimum edit distance a name should have in order to be considered a # similar match for a missing member name. missing-member-hint-distance=1 # The total number of similar names that should be taken in consideration when # showing a hint for a missing member. missing-member-max-choices=1 [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [DESIGN] # Maximum number of arguments for function / method max-args=10 # Maximum number of attributes for a class (see R0902). max-attributes=10 # Maximum number of boolean expressions in a if statement max-bool-expr=5 # Maximum number of branch for function / method body max-branches=12 # Maximum number of locals for function / method body max-locals=15 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of statements in function / method body max-statements=50 # Minimum number of public methods for a class (see R0903). min-public-methods=2 [IMPORTS] # Allow wildcard imports from modules that define __all__. allow-wildcard-with-all=no # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no # Deprecated modules which should not be used, separated by a comma deprecated-modules=optparse,tkinter.tix # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception pooch-1.8.2/AUTHORS.md000066400000000000000000000063351463036314300143130ustar00rootroot00000000000000# Project Authors The following people have made contributions to the project (in alphabetical order by last name) and are considered "The Pooch Developers": * [Anderson Banihirwe](https://github.com/andersy005) - The US National Center for Atmospheric Research, USA (ORCID: [0000-0001-6583-571X](https://orcid.org/0000-0001-6583-571X)) * [Genevieve Buckley](https://github.com/GenevieveBuckley) - Monash University, Australia - (ORCID: [0000-0003-2763-492X](https://orcid.org/0000-0003-2763-492X)) * [Luke Gregor](https://github.com/lukegre) - Environmental Physics, ETH Zurich, Zurich, Switzerland (ORCID: [0000-0001-6071-1857](https://orcid.org/0000-0001-6071-1857)) * [Mathias Hauser](https://github.com/mathause) - Institute for Atmospheric and Climate Science, ETH Zurich, Zurich, Switzerland (ORCID: [0000-0002-0057-4878](https://orcid.org/0000-0002-0057-4878)) * [Mark Harfouche](https://github.com/hmaarrfk) - Ramona Optics Inc. - [0000-0002-4657-4603](https://orcid.org/0000-0002-4657-4603) * [Danilo Horta](https://github.com/horta) - EMBL-EBI, UK * [Hugo van Kemenade](https://github.com/hugovk) - Independent (Non-affiliated) (ORCID: [0000-0001-5715-8632](https://www.orcid.org/0000-0001-5715-8632)) * [Dominic Kempf](https://github.com/dokempf) - Scientific Software Center, Heidelberg University, Germany (ORCID: [0000-0002-6140-2332](https://www.orcid.org/0000-0002-6140-2332)) * [Kacper Kowalik](https://github.com/Xarthisius) - National Center for Supercomputing Applications, University of Illinois at Urbana-Champaign, USA (ORCID: [0000-0003-1709-3744](https://www.orcid.org/0000-0003-1709-3744)) * [John Leeman](https://github.com/jrleeman) * [Björn Ludwig](https://github.com/BjoernLudwigPTB) - Physikalisch-Technische Bundesanstalt, Germany (ORCID: [0000-0002-5910-9137](https://www.orcid.org/0000-0002-5910-9137)) * [Daniel McCloy](https://github.com/drammock) - University of Washington, USA (ORCID: [0000-0002-7572-3241](https://orcid.org/0000-0002-7572-3241)) * [Juan Nunez-Iglesias](https://github.com/jni) - Monash University, Australia (ORCID: [0000-0002-7239-5828](https://orcid.org/0000-0002-7239-5828)) * [Rémi Rampin](https://github.com/remram44) - New York University, USA (ORCID: [0000-0002-0524-2282](https://www.orcid.org/0000-0002-0524-2282)) * [Clément Robert](https://github.com/neutrinoceros) - Institut de Planétologie et d'Astrophysique de Grenoble, France (ORCID: [0000-0001-8629-7068](https://orcid.org/0000-0001-8629-7068)) * [Daniel Shapero](https://github.com/danshapero) - Polar Science Center, University of Washington Applied Physics Lab, USA (ORCID: [0000-0002-3651-0649](https://www.orcid.org/0000-0002-3651-0649)) * [Santiago Soler](https://github.com/santisoler) - CONICET, Argentina; Instituto Geofísico Sismológico Volponi, Universidad Nacional de San Juan, Argentina (ORCID: [0000-0001-9202-5317](https://www.orcid.org/0000-0001-9202-5317)) * [Matthew Turk](https://github.com/matthewturk) - University of Illinois at Urbana-Champaign, USA (ORCID: [0000-0002-5294-0198](https://www.orcid.org/0000-0002-5294-0198)) * [Leonardo Uieda](https://github.com/leouieda) - Universidade de São Paulo, Brazil (ORCID: [0000-0001-6123-9515](https://www.orcid.org/0000-0001-6123-9515)) * [Antonio Valentino](https://github.com/avalentino) pooch-1.8.2/CITATION.cff000066400000000000000000000034051463036314300145310ustar00rootroot00000000000000cff-version: 1.2.0 title: 'Pooch: A friend to fetch your data files' message: >- If you use this software, please cite it using the information in this file. type: software url: 'https://www.fatiando.org/pooch/' repository-code: 'https://github.com/fatiando/pooch' repository-artifact: 'https://pypi.org/project/pooch/' license: BSD-3-Clause preferred-citation: type: article title: 'Pooch: A friend to fetch your data files' journal: Journal of Open Source Software year: 2020 doi: 10.21105/joss.01943 volume: 5 issue: 45 start: 1943 license: CC-BY-4.0 authors: - given-names: Leonardo family-names: Uieda affiliation: University of Liverpool orcid: 'https://orcid.org/0000-0001-6123-9515' - given-names: Santiago Rubén family-names: Soler affiliation: Universidad Nacional de San Juan orcid: 'https://orcid.org/0000-0001-9202-5317' - given-names: Rémi family-names: Rampin affiliation: New York University orcid: 'https://orcid.org/0000-0002-0524-2282' - given-names: Hugo name-particle: van family-names: Kemenade orcid: 'https://orcid.org/0000-0001-5715-8632' - given-names: Matthew family-names: Turk affiliation: School of Information Sciences orcid: 'https://orcid.org/0000-0002-5294-0198' - given-names: Daniel family-names: Shapero affiliation: University of Washington orcid: 'https://orcid.org/0000-0002-3651-0649' - given-names: Anderson family-names: Banihirwe affiliation: National Center for Atmospheric Research orcid: 'https://orcid.org/0000-0001-6583-571X' - given-names: John family-names: Leeman affiliation: Leeman Geophysical orcid: 'https://orcid.org/0000-0002-3624-1821' pooch-1.8.2/CITATION.rst000066400000000000000000000023521463036314300146030ustar00rootroot00000000000000Citing Pooch ============ This is research software **made by scientists**. Citations help us justify the effort that goes into building and maintaining this project. If you used Pooch in your research, please consider citing our paper: Uieda, L., Soler, S.R., Rampin, R., van Kemenade, H., Turk, M., Shapero, D., Banihirwe, A., and Leeman, J. (2020). Pooch: A friend to fetch your data files. Journal of Open Source Software, 5(45), 1943. doi:10.21105/joss.01943 This is an open-access publication. The paper and the associated software review can be freely accessed at: https://doi.org/10.21105/joss.01943 Here is a Bibtex entry to make things easier if you’re using Latex: .. code:: bibtex @article{uieda2020, title = {{Pooch}: {A} friend to fetch your data files}, author = {Leonardo Uieda and Santiago Soler and R{\'{e}}mi Rampin and Hugo van Kemenade and Matthew Turk and Daniel Shapero and Anderson Banihirwe and John Leeman}, year = {2020}, doi = {10.21105/joss.01943}, url = {https://doi.org/10.21105/joss.01943}, month = jan, publisher = {The Open Journal}, volume = {5}, number = {45}, pages = {1943}, journal = {Journal of Open Source Software} } pooch-1.8.2/CODE_OF_CONDUCT.md000066400000000000000000000002341463036314300154330ustar00rootroot00000000000000# Contributor Code of Conduct Please refer to our organization-wide [Code of Conduct](https://github.com/fatiando/community/blob/main/CODE_OF_CONDUCT.md). pooch-1.8.2/CONTRIBUTING.md000066400000000000000000000030561463036314300150720ustar00rootroot00000000000000# Contributing Guidelines :tada: **First off, thank you for considering contributing to our project!** :tada: This is a community-driven project, so it's people like you that make it useful and successful. These are some of the many ways to contribute: * :bug: Submitting bug reports and feature requests * :memo: Writing tutorials or examples * :mag: Fixing typos and improving to the documentation * :bulb: Writing code for everyone to use **Please refer to our [organization-wide guidelines][contrib]** for general instructions on how to contribute to Fatiando projects. ## Ground Rules The goal is to maintain a diverse community that's pleasant for everyone. **Please be considerate and respectful of others**. Everyone must abide by our [Code of Conduct][coc] and we encourage all to read it carefully. ## Authorship and credit We strive to adequately reward and credit all those who contribute to our project in any way. This can vary from an acknowledgment in the release notes to authorship in scientific publications. **Please refer to our [Authorship Guidelines][authorship]** for more information. ## For maintainers You'll find more information about project maintenance (releases, reviews, etc) in our organization-wide [Maintainers Guide][maintenance]. [coc]: https://github.com/fatiando/community/blob/main/CODE_OF_CONDUCT.md [contrib]: https://github.com/fatiando/community/blob/main/CONTRIBUTING.md [maintenance]: https://github.com/fatiando/community/blob/main/MAINTENANCE.md [authorship]: https://github.com/fatiando/community/blob/main/AUTHORSHIP.md pooch-1.8.2/LICENSE.txt000066400000000000000000000027301463036314300144620ustar00rootroot00000000000000Copyright (c) 2018 The Pooch Developers All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the copyright holders nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. pooch-1.8.2/MANIFEST.in000066400000000000000000000004641463036314300143770ustar00rootroot00000000000000# Exclude these files from source distributions. # setuptools_scm includes everything else by default. prune .github prune data prune doc prune env prune paper prune tools exclude .*.yml exclude .*rc exclude Makefile exclude .gitignore exclude .gitattributes exclude environment.yml include pooch/tests/data pooch-1.8.2/Makefile000066400000000000000000000030041463036314300142720ustar00rootroot00000000000000# Build, package, test, and clean PROJECT=pooch TESTDIR=tmp-test-dir-with-unique-name PYTEST_ARGS=--cov-config=../.coveragerc --cov-report=term-missing --cov=$(PROJECT) --doctest-modules -v --pyargs LINT_FILES=$(PROJECT) CHECK_STYLE=$(PROJECT) doc help: @echo "Commands:" @echo "" @echo " install install in editable mode" @echo " test run the test suite (including doctests) and report coverage" @echo " format automatically format the code" @echo " check run code style and quality checks" @echo " lint run pylint for a deeper (and slower) quality check" @echo " build build source and wheel distributions" @echo " clean clean up build and generated files" @echo "" build: python -m build . install: python -m pip install --no-deps -e . test: # Run a tmp folder to make sure the tests are run on the installed version mkdir -p $(TESTDIR) cd $(TESTDIR); pytest $(PYTEST_ARGS) $(PROJECT) cp $(TESTDIR)/.coverage* . rm -r $(TESTDIR) format: black $(CHECK_STYLE) burocrata --extension=py $(CHECK_STYLE) check: check-format check-style check-format: black --check $(CHECK_STYLE) burocrata --check --extension=py $(CHECK_STYLE) check-style: flake8 $(CHECK_STYLE) lint: pylint --jobs=0 $(LINT_FILES) clean: find . -name "*.pyc" -exec rm -v {} \; find . -name "*.orig" -exec rm -v {} \; find . -name ".coverage.*" -exec rm -v {} \; rm -rvf build dist MANIFEST *.egg-info __pycache__ .coverage .cache .pytest_cache $(PROJECT)/_version.py rm -rvf $(TESTDIR) dask-worker-space pooch-1.8.2/README.md000066400000000000000000000211171463036314300141160ustar00rootroot00000000000000Pooch: A friend to fetch your data files

Documentation (latest)Documentation (main branch)ContributingContact

Part of the Fatiando a Terra project

Latest version on PyPI Latest version on conda-forge Test coverage status Compatible Python versions. DOI used to cite Pooch

## About > Just want to download a file without messing with `requests` and `urllib`? > Trying to add sample datasets to your Python package? > **Pooch is here to help!** *Pooch* is a **Python library** that can manage data by **downloading files** from a server (only when needed) and storing them locally in a data **cache** (a folder on your computer). * Pure Python and minimal dependencies. * Download files over HTTP, FTP, and from data repositories like Zenodo and figshare. * Built-in post-processors to unzip/decompress the data after download. * Designed to be extended: create custom downloaders and post-processors. Are you a **scientist** or researcher? Pooch can help you too! * Host your data on a repository and download using the DOI. * Automatically download data using code instead of telling colleagues to do it themselves. * Make sure everyone running the code has the same version of the data files. ## Projects using Pooch [SciPy](https://github.com/scipy/scipy), [scikit-image](https://github.com/scikit-image/scikit-image), [xarray](https://github.com/pydata/xarray), [Ensaio](https://github.com/fatiando/ensaio), [GemPy](https://github.com/cgre-aachen/gempy), [MetPy](https://github.com/Unidata/MetPy), [napari](https://github.com/napari/napari), [Satpy](https://github.com/pytroll/satpy), [yt](https://github.com/yt-project/yt), [PyVista](https://github.com/pyvista/pyvista), [icepack](https://github.com/icepack/icepack), [histolab](https://github.com/histolab/histolab), [seaborn-image](https://github.com/SarthakJariwala/seaborn-image), [Open AR-Sandbox](https://github.com/cgre-aachen/open_AR_Sandbox), [climlab](https://github.com/climlab/climlab), [mne-python](https://github.com/mne-tools/mne-python), [GemGIS](https://github.com/cgre-aachen/gemgis), [SHTOOLS](https://github.com/SHTOOLS/SHTOOLS), [MOABB](https://github.com/NeuroTechX/moabb), [GeoViews](https://github.com/holoviz/geoviews), [ScopeSim](https://github.com/AstarVienna/ScopeSim), [Brainrender](https://github.com/brainglobe/brainrender), [pyxem](https://github.com/pyxem/pyxem), [cellfinder](https://github.com/brainglobe/cellfinder), [PVGeo](https://github.com/OpenGeoVis/PVGeo), [geosnap](https://github.com/oturns/geosnap), [BioCypher](https://github.com/biocypher/biocypher), [cf-xarray](https://github.com/xarray-contrib/cf-xarray), [Scirpy](https://github.com/scverse/scirpy), [rembg](https://github.com/danielgatis/rembg), [DASCore](https://github.com/DASDAE/dascore), [scikit-mobility](https://github.com/scikit-mobility/scikit-mobility), [Py-ART](https://github.com/ARM-DOE/pyart), [HyperSpy](https://github.com/hyperspy/hyperspy), [RosettaSciIO](https://github.com/hyperspy/rosettasciio), [eXSpy](https://github.com/hyperspy/exspy) > If you're using Pooch, **send us a pull request** adding your project to the list. ## Example For a **scientist downloading a data file** for analysis: ```python import pooch import pandas as pd # Download a file and save it locally, returning the path to it. # Running this again will not cause a download. Pooch will check the hash # (checksum) of the downloaded file against the given value to make sure # it's the right file (not corrupted or outdated). fname_bathymetry = pooch.retrieve( url="https://github.com/fatiando-data/caribbean-bathymetry/releases/download/v1/caribbean-bathymetry.csv.xz", known_hash="md5:a7332aa6e69c77d49d7fb54b764caa82", ) # Pooch can also download based on a DOI from certain providers. fname_gravity = pooch.retrieve( url="doi:10.5281/zenodo.5882430/southern-africa-gravity.csv.xz", known_hash="md5:1dee324a14e647855366d6eb01a1ef35", ) # Load the data with Pandas data_bathymetry = pd.read_csv(fname_bathymetry) data_gravity = pd.read_csv(fname_gravity) ``` For **package developers** including sample data in their projects: ```python """ Module mypackage/datasets.py """ import pkg_resources import pandas import pooch # Get the version string from your project. You have one of these, right? from . import version # Create a new friend to manage your sample data storage GOODBOY = pooch.create( # Folder where the data will be stored. For a sensible default, use the # default cache folder for your OS. path=pooch.os_cache("mypackage"), # Base URL of the remote data store. Will call .format on this string # to insert the version (see below). base_url="https://github.com/myproject/mypackage/raw/{version}/data/", # Pooches are versioned so that you can use multiple versions of a # package simultaneously. Use PEP440 compliant version number. The # version will be appended to the path. version=version, # If a version as a "+XX.XXXXX" suffix, we'll assume that this is a dev # version and replace the version with this string. version_dev="main", # An environment variable that overwrites the path. env="MYPACKAGE_DATA_DIR", # The cache file registry. A dictionary with all files managed by this # pooch. Keys are the file names (relative to *base_url*) and values # are their respective SHA256 hashes. Files will be downloaded # automatically when needed (see fetch_gravity_data). registry={"gravity-data.csv": "89y10phsdwhs09whljwc09whcowsdhcwodcydw"} ) # You can also load the registry from a file. Each line contains a file # name and it's sha256 hash separated by a space. This makes it easier to # manage large numbers of data files. The registry file should be packaged # and distributed with your software. GOODBOY.load_registry( pkg_resources.resource_stream("mypackage", "registry.txt") ) # Define functions that your users can call to get back the data in memory def fetch_gravity_data(): """ Load some sample gravity data to use in your docs. """ # Fetch the path to a file in the local storage. If it's not there, # we'll download it. fname = GOODBOY.fetch("gravity-data.csv") # Load it with numpy/pandas/etc data = pandas.read_csv(fname) return data ``` ## Getting involved 🗨️ **Contact us:** Find out more about how to reach us at [fatiando.org/contact](https://www.fatiando.org/contact/). 👩🏾‍💻 **Contributing to project development:** Please read our [Contributing Guide](https://github.com/fatiando/pooch/blob/main/CONTRIBUTING.md) to see how you can help and give feedback. 🧑🏾‍🤝‍🧑🏼 **Code of conduct:** This project is released with a [Code of Conduct](https://github.com/fatiando/community/blob/main/CODE_OF_CONDUCT.md). By participating in this project you agree to abide by its terms. > **Imposter syndrome disclaimer:** > We want your help. **No, really.** There may be a little voice inside your > head that is telling you that you're not ready, that you aren't skilled > enough to contribute. We assure you that the little voice in your head is > wrong. Most importantly, **there are many valuable ways to contribute besides > writing code**. > > *This disclaimer was adapted from the* > [MetPy project](https://github.com/Unidata/MetPy). ## License This is free software: you can redistribute it and/or modify it under the terms of the **BSD 3-clause License**. A copy of this license is provided in [`LICENSE.txt`](https://github.com/fatiando/pooch/blob/main/LICENSE.txt). pooch-1.8.2/data/000077500000000000000000000000001463036314300135465ustar00rootroot00000000000000pooch-1.8.2/data/store.zip000066400000000000000000000014141463036314300154260ustar00rootroot00000000000000PK Mstore/UT |{[|{[ux PK M store/subdir/UT |{[|{[ux PK Mm;;store/subdir/tiny-data.txtUT |{[|{[ux # A tiny data file for test purposes only 1 2 3 4 5 6 PK Mm;;store/tiny-data.txtUT |{[|{[ux # A tiny data file for test purposes only 1 2 3 4 5 6 PK MAstore/UT|{[ux PK M A@store/subdir/UT|{[ux PK Mm;;store/subdir/tiny-data.txtUT|{[ux PK Mm;;store/tiny-data.txtUT|{[ux PKXpooch-1.8.2/data/subdir/000077500000000000000000000000001463036314300150365ustar00rootroot00000000000000pooch-1.8.2/data/subdir/tiny-data.txt000066400000000000000000000000731463036314300174710ustar00rootroot00000000000000# A tiny data file for test purposes only 1 2 3 4 5 6 pooch-1.8.2/data/tiny-data.txt000066400000000000000000000000731463036314300162010ustar00rootroot00000000000000# A tiny data file for test purposes only 1 2 3 4 5 6 pooch-1.8.2/doc/000077500000000000000000000000001463036314300134025ustar00rootroot00000000000000pooch-1.8.2/doc/Makefile000066400000000000000000000031411463036314300150410ustar00rootroot00000000000000# Makefile for Sphinx documentation # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build SPHINXAUTOGEN = sphinx-autogen BUILDDIR = _build # Internal variables. ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(SPHINXOPTS) . .PHONY: help clean html linkcheck doctest api all: html help: @echo "Please use \`make ' where is one of" @echo " all generate the complete webpage" @echo " html make only the HTML files from the existing rst sources" @echo " linkcheck check all external links for integrity" @echo " doctest run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/html/* rm -rf $(BUILDDIR)/doctrees rm -rf $(BUILDDIR)/linkcheck rm -rf api/generated rm -rf gallery rm -rf tutorials rm -rf sample_data rm -rf .ipynb_checkpoints api: @echo @echo "Building API docs." @echo $(SPHINXAUTOGEN) -i -t _templates -o api/generated api/index.rst html: api @echo @echo "Building HTML files." @echo $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." linkcheck: api $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." serve: cd $(BUILDDIR)/html && python -m http.server 8001 pooch-1.8.2/doc/_static/000077500000000000000000000000001463036314300150305ustar00rootroot00000000000000pooch-1.8.2/doc/_static/banner.svg000066400000000000000000000370631463036314300170270ustar00rootroot00000000000000 image/svg+xml pooch-1.8.2/doc/_static/favicon.png000066400000000000000000000122161463036314300171650ustar00rootroot00000000000000PNG  IHDR szzzTXtRaw profile type exifxmP 0g;xҦR7ņDIۓzmE[o ҥyi ʴ:ysڲC yTOBzOb]R_#))S)ka뺜WX7B0kΉ7ům\2K nXΊߝv84Y\LiCCPICC profilex}=H@ߦ";dNv" EjVL.& IZpg⬫ 8;8)H%1q{r 4*L…f:Ui0&3˘|=|,?G0m MOf%Y%>'4ď\W<~\tYa3'V:L8FQ5*-ZZ/ eEX j(QuR,<qrȱ*4Ȯ~*LOyI8@.Ь;4O3p0IzEm⺭){0dȦJAZB7倡[o[@z"eoִgr s xiTXtXML:com.adobe.xmp *bKGD pHYs+tIME4, 6IDATXýkh\E솸5iVFP6],DiMj+RXJTPbU8F$~!MDMPsh*kkԸͣcI\7x>͜{gΜgYHXGcvbd)*C"Y`;P aDFo1Y;֑%NI04*nKs3I`Wf|;#F #C089ڀbF1`A_*kƯjiC*``bjgc ɛFї'|`$^Վ7ہ,Ճvu xX?Yy0@\Y Pa;>)8ВfZ5aiYksRhMj"[ 7Yw&/8p*Y t @ L4y[{|)BњY%)*Ywg^x U(D_E`G`dS! ;,]su -IqgJCuVIP(𒝓`b@(M;v2l8j|| JJe|[r<{ԯ阵'T_FP|DJs*9@[=$6zΌb 6{5Ul: .W;/:E:χ%z_ Tp~IQOh||k[:}BM0aԘpKMׁ=yhYU&8ʎ:zK(/a]IC=\}3mXG c])u^$or~JnP'ٳpar|lW|gT2تeSDV2 % |jg>1Ɓe|oeF|'&IENDB`pooch-1.8.2/doc/_static/pooch-logo.png000066400000000000000000000647601463036314300176210ustar00rootroot00000000000000PNG  IHDRߊsBIT|d pHYs+tEXtSoftwarewww.inkscape.org< IDATxy|U$쨸!$A!E!h+Km Nwm2t.ppNLe]VimQBV%$!n`$"*9~Ϲ|,x-sgeܲnJpdiZ!}nNdȵ+ ]Իٟu&[MѨt&W'bLA*3Vdg )~W3\g1&.=ta?{.0XC7%'xWrŘk5䳆n TI WֳӘc_L)!kܘH4/:0f싞IW.N:19 # 6%_$VU2K9HcLXn?k0杬ĩ~ꥂcL ,TͮØ>MbW* J]1ƀ@"7,7c,A)J\ vSiz0:ကky1!ϫ2bNSLةP2PKPy1Yy 7FmuՅgwS 8ↇG Eoct\=M5t\0L/\:1&z(rmށ"kƙjax) *0ucL^,WKͺuBc ]hhF|^T.Fc??=,ɛjA 1c!/':LYC79WwR}qg(w;/|ɟ +>{Mݺ4xiP:1&/ Wxu$n9r 0ucLlA $Mt\p8ƘDkO_>wF Ia D֯8ucL Ws];uSHԾA4D2_>k];k&2<:1 U{ToƘ5tcLΔ*̫uȇbfL:XA{"U^LѻgX>CQA#6 CPƂƀN9obKkg]:EДE1oTym"dw6N_z'##iE5e DkgY`u ^Rlˉ:=29B;\H* :y35C@^ Xmx瀒1qM@d%lJ噭=Z<ჼL-MSi4`"u ѯ5͛1°89R/_O,l#|cbOϛ39 RTrp20qbn7dzd9yRkyQџ6wPl[fbrGs k9R?͡}y:z/ΙuOz!+'nqPǿwd zoArmYMsgue =G_r:G( ? wm֯ph&6-g]),Hzr>!uKq|3sdoXCϑjAYE~ZUhQ^ƨr@fJ4)T8c^T'P0x]SaN ](^*S&?8׉ ӽNl)2Psjۣfz o7F/:Ћf/XZ̾L$DR=[yJ(+oW^@&HzK?")L?&s,> r)zsVz\kyv5K.Acj\S^;oi~CA%#Zu0"NW݇2XOk'D|l{,Osb D_8}+XwI?-~X]>Vom[O uM\:ٳ1.fc+~yY?tcO@ kyٟ~j;sȩ|yތ]1{ў .<9isdwaN! erҚy']0S֚y25^v~]Tb ME@]kmݴR"U5͝ђHkHxySpyrF#&\1k4o Ln  K?:XCwdg|pwh:zmʔ e:sYK\0柵ɟyGp$ v4 Ň#kݺTU- I]xuڽ<ś473?3[g;7)MݭJ]ٞ5t7;p^#ȯSvO5e`@7'W[]牑7^:9< 쿛pd3 q)PE(ƹ3sĸWwU_ gxzj\NfgS\r^&fNoW[rJ2 ox\ϸu /S/.duMi:5o_{]>l&(R7{ Aq/ND8o?s$K[嗊u~wWon +y+5wL߳OYNaZNsg=͋7ܘ eTy/PM:BoFͮd2~(?oq1YuYƊ4›ޱtDbFP՝xv,x,yYЍq`uW3^A6W8\`D~-MXܸxX߂:Kɟyv>11S7RUvd*"XG֟鎲1۫ b;fΒO*y3nLl^u:^ˤzW|N V=yA/p%Vu:_0[C7@cca@slWB_*~5 r$_TynLx^Uȫf:)N~Uske:KgYC7wSe1f^vn1)~WIKqT3x,n1h3$yP71g-OX'Shc3Ls QS>ʥaXC7UGmht#j@?m,c1&QKjYa鯟is6B7(YiAos#b*0cI-+MsDI/1ƘiWEQRe{5tc1$@e{5tc1蟳Y 991ƘjہsDhT)Bkc]Lj~,9tcL"hہ Jo?T TS{1Ch/"'6@_C5ztL{as>~}&Z Y/(2zgKrcJGѫyP(c1d E׷gD 4]ŶFd  D2lL\յWpCDdlc x4OcPpQ2;{m~ FB8t銗@'H;*2 =3ke/MĖ74]L[ʿ]̧ޭ9P}}Ѝ1yZ ЏRD#0Ȕ6ׁr֯XΙGm1&DP}odS$#2x銵(R"ptS"2[тh< mz+Թ@,Ej,?#Z񲶔/t75]Yn];9vO1&4]Y 0u=}xRX;WNK]O5ӫlnqJjQPA ~@Nԏ)WRw0zOnqN&w+]0TU29" =gEЍ1P>7cЕU>. Dt!"{nЍ1+FT'O*uT~)tecRz:X(Y{ثW!n>x>O 4. GI$cv!eӣ؝L[Ɩ#YjBȤ5r#~Is{Z1EN(a# C:=[o>6L^۩ dMM~CDOc@۪M 8CjҾW攚Fm-_e"m%-%.}&|gW)wc .C'Cr*EQtu DQ˄NZiisɟqNųnLжQމ' &RrT?*wA.<#5GN~:GXQ.N)wcJӕQNP GQG?1ɴօ)$Bm[?Nf-3衺aQ,Lg 촡ݘeu Zw,IHL]JgɴƞЕZHӕBw,6BRi> |.:|ha4wn^x(\^]')ohkQT)QVy-'Iw5tc@WTi|!GVQxT~-cL(j> IDATG]gMw5t[gNhۡ  N Fuq2酎Ն.@$$p͊Ѝm.Ӗs4]LћmhԻ[[z+ԇ $#t䯽n̎F[ʿGf Cs᥮sE~~CW!`s̮&?柽 hs#I=n>`MWt r!?`͡( IM{#vz^!l|!ZF$ =UZ:k&Odm)_D5!IH.S%=HVOk!įto씗H1MWt#xn"v:W|7BRݹ}뢨W44HD?x0XC79-tE 'd ֗Q#'z!S=.M.Mk _53" ܢ#)u(j}uy!RZC7QG#/o]$Iȯ]j:~DMQ(I(#hw! дm4+*kqFIxV? Hj=^v! EqKWV ҖRMW|+F(<lLHCPՕ-$sНr { !s0zѶ2m"[V!#xZI^y )!lLZ>D׃PTK|l-=;m8#RS_zx_L }=7]gC l=Nu܉ @ Bٚݔn Y *c#SA&u<9$D"H}tDAl=CkhY])p:'c?嘎'@o++M7e6BwM[Ŧ=՟&,7bPI QIkg)g?b %m<蔈Kg㑪Mb1̐;a{: LEJ{c m:t_m [d5tSDl/aTդjGU/a*teUb~盦K*ӊ9|G=tSd_=]K᭭?㘈C7}ѐ;4obNb6&k7ik")5 EXED/ =Eq@>`w"z3P70=tS/*/:)zդ2wʪAQ)kf/EQ/&D`nINk,D` TtՑ293עce7vHk >y82u q])wc!]IMg+$~n%t{%M]Qy=5H)w6᫚Jdi#@wT5cG{k4]q9u z1&S[+?U13'į޹d>VB5b׿Y@$ h"k y7AgQm< 5 A>FA6uI{@o aYCϒ. oǒLv Ħۍٹ*ʓ*;i"3)Ϡ,KjD +qdů/βSAY]굆n̮ ?ꊱ*(5?G_)cWvbo$}XCKV>]K9=ٍ35tcvo^m:Ш ʤΟ rqTbA{7힂3^'BF@l-z-Y]YC7fƐ(T&D޽=I[C(m@o(0u=n tc ]mt|nH)M^C@h븣P8us0r7fw=Qm 5W"\U=gT\G[)w鲆\~< ueB4t;Ř":=-bdRe(߉Mtao~3[;[re4Ĕ.qda+*eA5_z. :^Q>CiSxJqjn ݘl(_;a1:Ry pu5#+#9W>_Mm6ZC m8ջ!5!T-yf%k:$ʚy#Lpaox !mm)? ?fXC7&^[+eEioDY3/M}tItCW4]>D" hSmݘД5]yA%erWIҎrBV+$zQzk4]A?fi#t,tc" 埈hMM;%kF Zbhk@w  uzWy*?זFYoG9$蒰$R:R9ntibFD)pTeQ ѫ2>I0u0){:DD?^)w)Θ"[m(>lnʘxhQ7 hTkK{@d{1Bd5BJEC,cGYTj:zOZI%d=:`]Q6tmWȟwFRMm_iy!=:(5K(F&)I vpmKQإ,Gllp486ʢ2yH)@gu#eDC\gC7UC׶CGv%gJK~J}ȘI_m(Juw**ʺ {@.To&z]IQdяz>$>1EAt:[jE^uo0C4*䟾|$k븣@@Jf Eq=){d͘+{X#?!L>Ljtg=R' M#! R6t}t{wks[w^Y3&yF}Z$eRgqiN3xR<1 ]O/bȑ'ݘBm8e q@oJj I MӞi謨1+,vݘ"2`&s-[ rŋ mWs!fQ0Ƹr銜`JMǃtgR3\gKyǏžk˘#Pdu,IkDHNꟂ?*={uqIIgcnj$=ў{$!nTnL!O#?=?e] l.8_w#Qyu/%koO +?>{\KE51l<Ǯs$Ѝ)vQ,:\t8zIobе񂻱^ B\mЍ)]wNԴ: r5#m;Dl.+E{*\gI$hS }lOn}T/'"(i蔽r^1[u{81 bLk>Xzʽo82C$DSi:BmXC7PnwFi힂oUW9R꽣]}.6 ]? :GBIkFJLw$_~K&w^$9S NCz7 uOR~A`#tc G,kW.9:GX@ŪKQuđϠ^0^HAԩnoMǪa.q 1BWS(>W %l9"d~w/]C/~ud 3^tb |kdJ{  /f}b)w|u(IJK׹ΑrkmݘBU7q# ܵIJ>w1A/֌)v2&=( ]ۺ 4{^nS@l{Z0nwmۆ@M@uScEq0u# ybE@/q#JMS89m yu9gbϠ}ustkY]YUrcA\uDsDsd7tqr#611I}=m ɝ\爥0Ϡ{ack]dSsDdÛx: ^P"܍)$=`Y³4tlt#vB\]YcL{b8ԓ;idtv$MSXJUu=B7 > ݛ /+xHО6B7Ь>un]V*7\爎>d@IJ` td76ƘXXу_?:GT2)~$K :G|x!:c"a\e7]爊c+f<% 1bl.ӕU#AGDUt ]_Y d>r{6W%ԗ^sF9r]vlvc QWT 4aR3D6t2%7myZ8nWnL!J=Md.*]爊/9lMlCVqjύ)HR<']~̽s?VwAjMתЍ)HAaO^ |Y").m ׹Αgojw]@ S)DR؏z/]爒)@?:FY4lXC7PAA6~ O53Zek ]9, Ƙ8)eECZz_XA5tj9煘rS gz-zvkv!r9#6B7Piar'p9GS?`5tj:-cTِ ] ;]cLrAH5oFY WΑCUP[qcLl΢]TΑz~uтltuc))Kg \:G.ȷC7tΑ#!Vsc ZEqS}Q)٣o\ 7m2}9t3XFt#'otA7>MJSЧ_i࣮sȺn~ХCsDJC=go#tc &d|uܑrq|o}KeXYMS$kn\tgߤhL+ IDATiu?BܠN^O_\K4t$5uм+E w:MbF]d-0uM%xj2iͣ_\'Ĕ{anhyMDCuYiOyכUCc.cdI)LmnU W2/~ p9㦹3vPX5t9 ?ΑN9r]V; p?*< T42 8gC/W\ȱezE>߰$oo^wHI[WǎE',I/Hywk^ MW̥2UD$3W.}ZǼ/>oX ]iҖ>:^g}zՉ$VT "2Wv|8 u. RhvdGS(]@,$4Ϣl璨لp/*'R 'Z* Ī^ݳuSΙmC+4]q'1Y7>F&JPSH^y9KefƝpӣCn|Y&.޸h:AyIÛL|.EN~kP.Y2lG v\evVuBjܭq]{kz~]`kYWLopq;-TU }TJ^qMVGk]Gy']"wNۂdJ&aClA8PdnI~RчHNhcoBH~WȬZ#;\gyVތ1 5gְu%;1 㶶3n[t*^F@'MmĥЧYTF\b'B?UP~: *{ږ)j-H~e0 :GZPn[|M9__uB?ȤnG^^8v7JA4|UD4` 1BưUfW;GBmqDЏC.'a{5BSLßI+*i)&a.{S6îu{7;;jkՈ1ѥQYUcNK0u4SB 7{![p}ż9Usti>YPF2]U005y_u$f^͛ q#-HtYE'g\4LZ9o\*+q>k3MB\rrVs5v=~В&FF`}&׸V] kߓb&Iic Z r]27ǘ~U@w GV]ⷰ/{r)j\Wt͘:G!Sz{EBaHmn?*B%1?ڬлH&o"n'^:S<0&&) I1҂޼|}̚Z|?q(Y*yuBȝ@sδ_ADP:L&n?iuA_ar{=.N7k))9S&IڅsлAf5'Z$Ix2Ѥaޥk ~5Su^+YݓUYO%:]a]cCXMs4iDj:/>@rn]R$J* "kBu7лIv%H[{ƗڨĞp.Z+Lw"(Cuw_^&K'|:TۜYĺZQ5lAefT$Gۼ)Q?:Hmt7fr}%ϟ`r9Ի16gaL HQ"T㛘kt!]"ĕ :_Ѝ\9Njxe f08NKuhJ {KNH5C~5Gx>IuLD/qBV.)[c;IbX-s6#eV1WyNjp!wNdn6B)ܺu􎨕.s)-XӳDVYgϟ-n/vi T+sBGn궧@[7`4KZA"j7z'bTݶ|]2*.]g+^&Ψ#n3-;7~ς9MdIRMiNj」OcOO wvD,4uXI/3NԕE%OIafDsY>v(dWO3ʛZxQF _fJTT/yuXBRnm'k^7\FFp֜Tb'MO/ʔ'L-W)|r~]5+(ºǠϐ ~iEWvy)D Ԫ<s?%Ӿ}B1'"TcУ)=eٲ(qsHMLBdVG|Ov-VKO jx5ɼjOXGّo֋"ֹ g LbZ1vlNaeIcl,kg]'7+Xh~խd֭1L-RTp&&xY+ f6!.#yjyo/[]q =dVY_+[* br qĂ E&hUkSyLsl/1QYGVǪo XLjL-EOCnS6{]g11;.Ҥ|~gVd6gͷkr%%/&`q."S^_i*P7$qZSZ1vPe!I3iGz -;wwCa]cR.$ys+^ =Qā}9YVGXk8[|OGG/.)N9"˴`n^z@h2LY#$es.ZltzD@{[{KmsZ]_-ps;rx"1v<+wB)lXw8&Aⷵq&Zk]=3/tٴF2]pNЧ|&T|uDc/.|i pă\CEZ>rX => Q.YsZ\'IDVq"5A#]$S$U9ʌ7vK8` )|wҚaPlٲ\}c\pqBAxI!uwDtV]tqyÛ縎"? Yz{סwYz,'`&n"VxuknS縎ί t5LKamҮf$lڏ'j4Ѫj͔wc-:."_|udanMN]=s8HyIrlٲ@^n?"(_kA[ \Ґv꼩 dNʞ;-]Lj= ?'!N&VGMmx@,&Ƃ,Go|<~b1L9]'IFV*8Z&.p"=S{[r#^>*TudenzgjK]0&\8\\p$ ʑ \ \'FsBv$ى&5P~:1ɮޕ ͽuTa#t2sØdwCU ̣ DO$<_\Lj/jMȬݨmbLO3DR~ACś\gIEV&qnLQ p#Vҏ}faRUuo@2?r$$Cl1pCP,F&h!"r1]roͧRu"/ieV&&dZm)aL+}G6:F ?l[/xutanb}<`$[}5ZKDnR__)YdV城ە˘wX4Qi'g+B% ]IG6B71%Eu"|ucͽk.O2Șfe~1IO֞˃/r#Z&Snw&%wv::RSK7S;ByAM-Puc\ڴ?Wܐ*e"3)#sB|4s::gЇ]1oK&:aLh~C]G魭xy%+\1enN\:1px(_.#\G%}pNyI̱Y':KQucbeutqNkdi~䣮 NBV&at/>:1]n_W~^b_ UշY:9+tPM2Y\WR4ujAF $Ήr,Ks{?~'xKwUyx &:MB%G3H ^]qzYEWjO |aLXu䑶Wbۡ:o{Zw@'3_^u˅uB7 M5Swpn2Clqz+9bb $#ލM`Q,AV&IU7Yr#]1ԟ&hU ރ-YlPGw)^p_ב@WBծB7IE @,&yE4}T9;jdXѲGEoҏI:b|@N2qu<65rϩ;I}OX ?ͤ6MRWG{NrŜp_eg71(ʊĜIkZ7߻/b_q ܇+ug^c72O:@޶l?4hfO]F5f M٦^ugW?Şv>'7S4d#e€$p[1=B7imݥ RK/=fw͙殊+(۝Z<Ĭk5l=EC6rkW]r7YgMZ9`:>-|q|zsdzNr?33af k8cZ&܆ -,<ӣ/[MJz "r+py?"g/WN|O]K㆏u^#9oT%g [Kv=f乻vpk ? V5+tR>Z8o R"0&='ƼR}vn:N:ѫ9odo- ]>/l4(M:?$"_ٞʹ s!:\6~16~1C{s{_r>}A(ѫ Ha]X4&M@h0;8GY8t%\2l{leZ9p_;ΥzI#ڤA( F"S;ɥc'M@hȈ@ȹ}\*O)s >4e6;nFt'FƔт:1f)y7+tw3:gD?KTƸMv0sz]!cZL};ݓXc*$H<'ѫxL)Cw:eʮnUo;^YGr\犧!L?9dy]{} xh(a_5QwpDdžL)$o}-#E2IA9+tUǹ=uD7AlA9lx"ǔrqD/Ha]0xM]`A`ú<L8&%BI\26j+ŏ}؇dV-c7GޞLgɽ!\\/bd=T%E 6:3 IIDATs"V=; *L7/OFަOx p_f˴Ay?<"/.;(Ld*= 鮊<|SͮDC6X`#73DN$2G&2I`D9o\2n),v'Z@/bLWYCK<=YT' W7^cyDCNKƾ&>}DS;eRT tz 3*y@!Y:NiHNO+N}ݤ8Z2)A.+tξgyNã:`Ĩ䝱~"P>.Eu/bLOXe 6,Bdx #P&(E  apט5u&n.?&nq%V^(k\1лhf>a H=RЌT!%0\a80 ؚN~+'>:NT ~\&o:1Vs=sZ`nЏxz** 3lff'gߛʣr ~6[1 a }^ #+~$GU$WTrU4WlEsr@9 Y 5& `muXQлZ]Խ`Kpm>-*VB5 CP!G##\ :_C_ @[;WȓW(GuޣQ+'>eR sĘh ܩc}W {o][ժG{5V$Dq/+ IamJ,eg+tc;(UOo@ΔeIk7+! ugIam8ēM3ӳgoy坓3/#r}'Jarlny]"||a=xtTCH #,A2ԍ)\2Z΍B1iK>\ƲBj͔Lbb`{Z#BY2Y9!%}3'5cғ)]ొܵ]yLi? 8!C"SPN)@G du3!`'}uD6Ra܌9>+tca*G.qWel6Mʠ!aZˬýdL v]1=#[yOmm#q,㶶tޗ7ĐЍI-`%"|{t˘4anL-D'y2Tu cLAz1% ,W9!\Kˏ7+>1I@a3BDʾuALƘb܍IL@V '*C%]zЍI  Y}?/;\2$ nG_FkxQM袽3cЍ.H)@0[.s:Ѝ]/,x,7u cLB7&z䋾k`Qu{VNWD(<1ƕ `7y╖+.1&Qݘz{[=VƘDen  R_*Tu&c*+t@52)1 ݤ LҁJ_e1bnRWo-Yg1fp@(aY0C=scL<$(S2O {cM{7BY5cy/+tH} Rj~cLYAV9{2Tu(cIFV&^U@@`eyxP*bM4=\+=N{×_qcRUO^P-T+Bu9yU*1Ƥ (Z6ZJ&T|u0c1vݼnF7#&Wn9SlDt/ߺ,ta1tz lݤxݔ۲z v=956f͞ uˡwcL;'*['=@B  _+J!o߭+vNc]r`h#.dC{}6]vɽwZA@KeF.5{vh]fc]M@BSg1k Mh$M~Sh5>uxc1#~ 4V}( $ЪB|h9,MyM"5na>GcL*J粝 fv~E[=Z|8h@ image/svg+xml pooch-1.8.2/doc/_static/readme-banner.png000066400000000000000000001766531463036314300202600ustar00rootroot00000000000000PNG  IHDR)ysBIT|d pHYs+tEXtSoftwarewww.inkscape.org< IDATxy|}$! K$." mbΌu;_tZ][VGmSqBYTJسB(T>Ix>`>uY#F -ئH B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B PiH#@ !F(4BA B P+5EtRS2Tq..?S|o FO98]\؋>u-\=ɣ CNqB*T6S;v(i.(Sdr*&Hǹ {ymL݋삁wx⫟Tf$>H}cWԹrOeݲ۰^ѕo-@P(i-R<ۊ#vyZ%ݾ6t B@gUx\)Y_ ~[V7-%Yp(SP,-9;[}Q;W'u.| B@ce)\=Kd9aYVGG VF#g+4\Y8}7rk5(=s~B'%,WϿ|儥HɑdE"!َ,;$+ks;qɍKN6Ѷ \lg)\up4]WG6?2d.U_3segJCE`$'/?&EEۤh}hk/yfp\(=TjճdEL9~xRyRw-KVv_}+Ov^"0g+v;[6}m4#py}?ؤĎUJ'+TrBc%;W~s;w GI@AHyV(9GVf8k%wy{7ۻE^KT@CAH]p4̓o: ;[$q7 (vz".45ɭ_D;vHxQRe+T9E)2)o߯DrkߕLGz By%b2],+tDvwu=MR!RhD̓ݧ4%%Tb -)`PGϗݷtRmxO_8TH:AMo(eEAH ".]t(Hc~S-_Ǐ mQRdEK'bJl^Ɨ3H* B@ JE^$(_繊oYQAAVNF]PQ.hGaw$EVVf̘>}W^Ѯ]LGIBAvA/b=kWsM1tP]z饚7orss%I| @: `"ɩ Y8@Zw+Ǖ[k: -"f̘/\Ǐ?3ow 駟ysQNNNKA@z twt e})VݧxUq91ch֬Y:UXX3(H/.[(#Y2&!NӑЋD"M4I3g9眣@mڴ)@j twt %N$h:_'8)G)xg+777)sښY 5Poo2H9V^2'Ŗ?DqЃh1cEA2@J1d3~5L@ ,K3gԼy4ydE"yGA2m+2劽Dt$P(:묳LG9lݦ#$ tj@N$;H#MA/8}X}Uaa233 E"effޑѡx<.&IFFPSS@5gΜ*%i׮]#$ tGAt]\7՟oh1)77W>>GY"< rG999= ԧO}ׂo߾,2jnnU[[;v~پ}@o5qDBA@ tRo:У5PIrlS=Zӄ 4x`nYMsss/8***RQQ vڴchڵZfn*u 讯M޽{MGIFAK5tDZr9C%aK87߬n?#Gjȑ袋$IZn֬YuwUkk@FqyJ|[i7h:_ZCo[1pD"իW7В%KT[[k: 6L>hJ,0}tuvvz˶r de+Y|ϕwkuA hY %g()qŧHG[GʏzKM{DR g g~Q(SdɊd*U eIʏ$ ¨QߦcSlٲEK,믿kGe]o#'["Д)SLIFA9}%+r4;e^$[)ԿZ E>sykiQށF-ݸ,',+HN)rߍo%߾_^nm,}}RG+#G̗W,+RYy3)X#vB!=S0`(;v詧?}@jʔ)0a9HhƌFfs(p/Y9EGR/]*\9K zYMr4o%uDtfEeV;HN){4Woncƌ=ܣlQEx\~i[DS*))<vVZk6 Pav~GΖ]:BvNqyWN.,[";DvnQwh o;>0cW4Vu޾9̲䔎Px虲+rR ?x]3#kȑ8qOnlX|0p@M6Ms6Nwy^  >\>222b }k_ l}( " SX&OeKc"?zYl *%]z\xmۦJ:B .TGG֯_R)Aٰa.TSSŋ6H],ewsޕY2}Ȳ~H|ˊ="ߒ.(e;c}٤=YkzTQQ6+hƌ߿VXqxM"5lذn_{ݺuZ 5t"/ԟ{kn^b ܖz.W߶t^okRtxyML9)(T>t PH+VHW_}53ݻo}KrKǀ.X@/ @, {A`@z chQWlRA@=VmXo:F5mWKW=|n:wݺuIŋ몫͛3zh= OR"uX> BH1Yo5}[+I%jܥnF=o:FJlz]2Xʘ}\jow^w}IF{ Ν;/~Q/rsJKK__: ۻ}]HKA)&<مDTo\w u,6y{ȭ[J^Rbr6۷]c,[ eJhQ?>Q;WkWhH Nt\bɍ+tͦM`egg>zLDB/ ٳn:6&Mn]wʕ+uMB>۳YNǿይ{A~~Cwzu)W*;TVn_~&{SO=\86wj7mSԫ2D^$n<4Soկ~5Y/u* lNffNp ZjU`sHm۶u  =Q@}NVf9'ɲ,k*Ꮶc<ϕߺ[nC˖WV^y%Zzh?]yZ1Қ߾_XᑳwdmG3Tqo xgtuqz_RXL7K?9$SG )AYd+{,KVv C+XV^ E8uk߰t H+%yv(̯1K %&ۚm:HCG(;0StCJƍ|;uLK[+td nrk9sf`3JKKPwh;`=[{{{y^ R!g۷Cڷ㨣K̼Uz䱥y%R$;lnBKMQ^~u.SӾ$|8 g*cߨs}˗knMr؜jx B!;[wۻWF?, [jyaf^̏qy#$%.O3Tr4ɩ lܺ5;vE C)/{>_~[s3 b!!鉂bkvԫ,',+DVG_Zrל"ɲ>sUJlz=n)6G63eL7܄(i}{6-KbΝ~oہι[zj555:tvvv' B>w-U("ãJ|1Z]1$VSS<9ַ-!Hu{߰tO);tWR4.쪪*͜932 Bc~Rmoَ"cNRSS^|1]_Wd|;HO|G pLb+w 3tYMHKڷo?,QYYӧ:TEAWn\7~!-E RƨS6zm۶iҥϹ@* ~ENe(+ti:B>cʔ)*++ |3xw*a1ɲmg,{e˖=m .t ;V)u*+t`EѤ3:k֬gj((oe:ќՔFA_~9Æ ӠA@* DLe܄$G ;KVFi#F?I>gʔ) PwG Ef:EZ#zڴiSs8gJ(S|5כqа$2#mo:BZXvm3&L ;4BApTAApܽ[Yb:QB2V(%iȑI@* 8Awj:DŽ.06 |Neee3H'ȏw*91>βz@}_)++ |$$j;|yqDA@TY+7(/U^jCI[-;8ڸ͒CĎp7Տoi:uٌL%F)m:pR"H3s/ MP=\H,ixk;f]8HVV1́ UP`HNӪ=]1'ۄ2s kF>5-}7.n YNd lii |$AiC} -4ӨJOe#RO߼3pOHߋ}RE?t%R3Lǐ$9زt ddd>N(F!G!OU5S)?s T?=7Z"zcΤ$TAAI()5fQNv5SIltjvZzmˏٸ?$؎t9ss>\UvBSe[,+P~~Q㖌. B݁=zaݻ=Ͷ}3j]o wknP$ݔ(%)Tb*1㖗 B@: (-u(OGz9SNXv{o֮[>Ұ OtL׳ fU&7*s%; <.*QӧO3@ C#]Mv5qSx m>+/u?tm'ߗ}OD; 6`Q/ewݻENpIp!zu]W PVQIz)QjOvMLgˏƤEտ(1Oa}JVAHX(q),, tm۶):TBAHy4y3Nu5<5ʣOs W&){~K_ADUF׻^ipI.6Cm."a(q8p`@ j+|;մ&ةQ[׎FKovkjlO#[l HV}; bDA%##CEEEΠ  B@RkhOƺ:tO=$ǖ~͸.'K.?{*k/%PnTRk~-/wOBN3C MGKeL7>УٶtOgOpui tC1>wkZۻ kX¥2O{w}ePR*e#%E֮] R !GXiѬNw vr_wuߍə'Oz;F J6m@_+יN!y{ IrKe"1Q. |TaÆ\SIIBaeee) )+}\Ç\ Iڿz-zx˲4fM>]SLQuul,UW]͛7'?n@Oe1"E~j_ (53{ Whݖf^߽V?Q *9st}ݩ@ .… uV=3_8ptWXX .KbHpXSLє)Sڪ_|QO>6md:ZrGw>kUPHgym۶v2335g͛7OvZw}<]\r~jҥƲ`!lsE<cLWKDѮê(tqmKxz{Wt:$Iv^@TUU믿A,K{/hܸq!Cn5\~Zh:V2dj͞= Z^^-Z/\+VЃ> NeKuWt|,t%hٲezt^/77WW_q>\w}^x:;;MG (ƌ9 -8UVT97~n~c߉iĠ( +|Wy%ϕlY}d")zj`kGQ-[,pgkQuu('-++KW\q-Z^xAwEaTTTk߭|`YNSjݺu{rJӱzP(?_^{h:Im[ӧOӵf}+_һ% /TUUnfܹtTkdO{aBLr*hsڢ~?]aFu(?p)̓wryde-\o6&Oگ[;M8Q74j(Q](҂ 4g=czuAӱzb]7o^QF{G˖-ӏcm޼t^e馛cK/{G|mt믿^gQo_W~zqHA@'G„ՏoQv.kޙq=n~?C{V -OkSl:4|[WGE=ӺKzT8 =Z`Aa6m~c:NW\\o]w}w,?bY?|wW_Kw|M7ݤ'xBs/~S@RtQBʈN{YcZ[~ma}qY]X3, :ic+U@rM4).655QhѢE딛{ڵK;wT}}Рh46% *??_PHyyy0`t):GuE6mڔٽu-hذa'V[[mۦC---bߗ"T\\rkȑ***ꆷke]3f[9t0[hկ*''t׿u͘1C~{ZD=ϷpX]vZuc*3twfJ@w ~a\)deHOQ]~K6w/7B,_0Y ؇_4;tI[b:N?="瞓fE>p@~?~ Q__Kw;C]NN(ȑ#5yd͘1C#F8ᵎرc#GsiݽD"]wup»:::d;zwy7SQQ &hƌtCA6%755*44}tPGca 1=EFD*/u~[ ReY:묳Yu]w duEo>;ZfMJ$Zv֮]_4h.-\0"ꫯɓ//ڶmck???윝6mذ˵o\bDCr+¢ HR䞒{ M}ggOr=~;׋>;>[h>_Szu:سg[رVURSSu!88۷Gbb"dq3M4ʕ+cƍYBBƌS픔;v 'NK.!//yyy(..FXXʕ+իiӦhڴVf=1115kOzڴ\2ƍӍ䠠 ӧ.H6L~@H1g=lx~p ̨\jfsz 4ޙwEN5hBboݺUs70qD<1>c Ο?sbҥ y+WNhڵkcŊx7fG /^rwwUdbnn. |hѢ $s=xPXX(4ޘL&=>1N8[b֭HMMfff"33)))شiFҥ :v쨩#qe|Gj"Ma???tÆ StZ"""o`ljCDD$;cxxjADDhqg{挴kkʪ]]S=COݟ;?XJN78aQz[&Ihݺ5jԨ!$v~~>>c!wIII|ވ;JU˅/F®]qiqQ|uz~~~{{͇Ya61~x9r$V\W .\ gϞEݺuOV^{/ك\|;w.Zn<$̛7BNNWq233{n\ΝCjU,5m6m&@[֘鷝mذ!z-tYTPصkڥNwɈHo˺0;aBbDGpjP{=\~U7QX{GWdM3~M)`0m۶Bb:tGl2{WbԨQxב%:cի;&<_nݰh" ϥ*Tヒ.]x:˅UVo߾HNNTl޼=>#իc>]kذǯ=s  Aadٰa&LPdvF#.Cț/.]WGh׮eɮ|/224uΜ9}gA=syQF1tۮ2e5kܹs=}Ř1cdmjb޼yؿ?L IDAT" ~'OƆ Ѳ-[b̙u+VŋQTr:شilق<쳷lbVvmr+zꈍתU SNB?r aZQXXٌrhРbOؿf&I!VxY>dSԛ`Ѹ"<6i7 3bJF=Y,:eUr`N`  $ѣO?$$'$I€0p@_?bرZ*+9Áŋѣxׄ؍ҥK1|pI&y&233GƑ#GUoN6lѻwo<Ӫ}r`oԩ.\i^zaȐ!0<_%=zGũSpY5 V7oz5k9cĈHJJR"""9\-"ԅ}B+<^U(¼mxo?|w% *jd˪' K +{[ Ӄ~~~xWjnܸGlszwF>}w}WhI:tyy?Я_?M7|2 M6 #I^x<3BhI1egϞE߾}m^jbɒ%ڵ+6oެx0ܣG!** .Ĉ#nb˖-2e :vN:a̘1X|9qs9+W'O>5kn{K.]tAƍUODD$7cxxjADD)°v\.\.Hcq(s0b\ؼϳl@xFkVRq+C"aH2|sj[[F5,$ 4ȫng˖-OFL2>ǯݵkƍóseՔ-[e˖BWE]vػw/\",VoSN opCHH&pf`Zqayc$> >|C XAAmۆ_5BH2;8wNѻwo#..Æ CժUoEEEؾ};z-L>[l'/{m_;w͛jժy|-$I_h^"""Q8AHD#رe_~ `c<+=lGBI5v`hwV:{ӌC'}#C|e ,!?K.ٳe ш$t&''c̘1j^#99Yh ̝;11*RBBN ѳ}V+y\pAPe\.̚5 ˖-kBރ"!!&LrI<TwكD|/^]v ͡{mCgggc…HHHgE:<ƌgyNR,5wq7ox^"""|nQ)f0 w9-+F*Fp_{~oe\[X1G1KÎx!6&!]Vd;S}E@@q.\tڼvǎC޽l2yk֬Ν;eGǏG޽xb]8,[ ={ 5jz)H6t(4؅)lXjj޸;,eml̟ x͌"A)  UrC a҆#FԱZxU۷/wkWXC\z1b' 4/,4{| 9s .P:\.NcX0{l ̞ͣ=OJJZڢ"̜9SXzc 0@RSSѧOEDDTzAHD#d[[㖍|{]m\$epLBx#0fN y*( Sw9:lqh۶qMs] :tWMIIEdH}Νøq7m;vD9#(ioÁ'z=v;Vx***ʫ;-ǜ9skW^_~O<9rF&Sega̘1x/),,رc|rE%$$YNDD$6t"&҅&0m:b}+SlقdҖ˗ ?l6#)) EhQƎ5jxl,[L抴%%%o< @ [vZorniիѧO9sƭ8,^cǎ-}aԩp`fϞ?\xVZ ADD$DD'S0e 9@}~#<Ɓn 6hjaŨds ڵÃ>(k̴4}W+9\.!4O\\F!4>(:w/_|7ֱ5k`˖-BsFL:UWk+W97oeH]NB^jժ[\rr2ŋ)&Wbĉ=~ך{e˖^KDDliT]|w򼙑QF$}r:晱W}}>.%?6IäId{NxV -Zx7zn222޽;6l(<\ԩQFy6B|ӅW\ C.'OBݯ"̚5 Ν;bb4hzcǎ]&(N'&Nta9ʔ)F ODD$e\1܆l=kf { F%IkClhQ߳&a>'#Eu40AD 9sdȱZx"[LO5lذD9N,]TƊoǎضmƍѨwłSzVXQV#(jgϞ8211fʕ+qe+Җ~ #GD6mp]wk׮4iS+"=z.Av?3>3ak׮-,6htKDT Ç?hb _eWƅ !CbI peHRSSN]3dX,;m۶h޼WX|ݻWüy._@!""6Tª׬H|@3\1ZF{7H#Vwa6xI4&.ha9F! cܹ -nrr2z)8qBØ1cJٺuk:7l6ܹ&ϑׯ_999ljݒ%K#4GO )ɄaÆy]v!--MƊH8v BmذAص+$.h#DDF5"ԋIWލʠ@ 4IHr`bu;jp8{&a7\.At31xO۶mC.[Lo=VZ2To۷oǩS0xTll,zU8wͦs'|"{?JI?k-Y"m$ ZBt:\ ??yyyGaans5_`` !..Y&ʕ+'d$%%!99YXo 4HFe+[ >!!!B$&&bBsߏuӃwUl۶ ;v| y3Oá̃5^:x?~\Xjbڵxꩧy0{l݈`ekV\)AXlYn7nFKXP_;cvAv&:I>ǏGdde~)uy2 *m۶;yl|Ijj*9"<#<"<ǍSxl_-<ǝwމXyG-q 799934r8 $"vU'VO/B(uVɭ!yJ +jM&|QX(W5p(׋M][._cǎ9s&._vIգGͭ_^x5kJ*\VZNi͛7n $Ivǝwމի(Fzz:.]$SE DhMiZJDDt+lɬ^'NV 1TX fuzuAj R.zɯ[ծq\l6v؁1cƠK.uܰX,֭1SRRdKl٢D/ B""Ԯćm V˕MPT FU&,L3;\3*o BtVT|gxЦM 2_~noSNǼx1}͝u",,LxС,1&~8yn9UV(ǏP DD2Uʼn'B6ȋn}Kf,ыOoY+i5A(Qs] rcǎŁtӱsN,^?<ڷo]7?BaҨ(YύÁtYc^ uw!45;w=&̷_~Ex;Sxh߾,qΝ;'K""""7uz5i9-!(. " B%&>{^Go> >\hp k֬K=55%"ZN,{W^(c\&?䇗aWhZ+F1A]j"77/^?es:QQQYq9} 'KVZj%{LÁL'O"++Kxэ-[!""""c M/ 9 *ݞds۽z;xv? OB `L\U"ͻ{9rvwh4jժ⇅^z~F/p:8tފѐ2 !0"ю{{7qxقWݻU BCpsވ+ B[v>;;r"͚5ڙt{GZjb7nXXW~"""" DDn2 ^*\7 ]1O1$/|R%s5 +Wo.JZ`Xf}ǎ#22RXl9;5~M: *@Îׯ/[,N5lA!6HAvscYp%l6xaE ǁcb>( 4A ,lpf.݊/O(b4"B稨(DGG V,$vݺue B"""" B""7SGs][_1)+f /'T|({]>O\^S̙v DרQ#a <4pY_˗W5dZE)))sF#j֬)[<>@DDDDװAHDtCt'PSCN{?Z19bŷ͘$kM+;lhq\R"ͫU><%„ĕObӧO !:Ry5lBHח\7e8AHts^(À.0o&-6g 2VR6M8r.H,Րo{… B '$.&O9D\G111{]!M4A6-l+FBt Z:0"X޽~f#O7!Z\(\PDzPjU98Mv~hKb߳! 'AFg 5f5DD7ۣaS Bҋ6MX6傽ňG#BoV,`lEr݊+%i控D$ fO܄)TNz&-M<'HO $"`li#ӨO-AwǶ^+Ne~PHL Ҵ6j{022R*=Q2KyFn򽇈a:~FQŨTd\1J_مӊPwMB 0c \^*Uu2DHr+O|#RJxx.o߰Jl9^!k5䙂MU;˗dML""""'6KE}>UWADQ &[s%k0rEOD),3jAz^4Oěp } L&}HD0,,Lxn6khb<|7a'IclxMow3R/y$,_1DTS&m83S.HDO}ޝ1Y&bszkF=DDDDt DD:n@N?3I&lt{ɂ{ݟ /ư*%i^2bas3VUh|_!???}QQQr_GʕlLDDDD_lQW/Ή׆ woU0 &2q}1Mœ|`,3g͍PSX`P3eiGF%{L=QHH񀿚DDDDDDTE ^L,a`NYS}+,8wֿ'p+' .D@tsGč_&sUcA="V^OHĤg~AHD3,sA;f]fyNK?{A1J Bg)K tA=N^#ksߎBvv1E/_^DDDDOlQ5 )pAle0Vj$.\Yp?%CD7w5د}nbAٲe5"'E\C"TZUH# $RbyX؇U'"W)ETN`?t{.ld!rv DQTt}2gDO?^͞y2`JWs{[!Ouz*ՀZzp%]n  I z r spHmP$$ISqH $Re yV \qgC $3NOLXYT_fBzÙv\tիBF6w<`Xžte!{VDuticZu\~}!qH_ $RJ:/z5&;b@g1sfA"df}W }'D76=UL!qN:%{ܜ7P&M%""""}aJq%K*䗙[UDrRA() `C $_s@aŬ_VAXzu9|ӰZQFB)H $R*a>ylgW+֋ $˗?cǎ {E!qWJ F)B!Gٌƍ B"*Vmùu,< $=0?UxcymLv68/P "]9{uօ$QD5NaNN)BIPn]9|)~MXlQSm۶ B"*U5E F?#P&@p1D21~eq":w!!!\<@Ԕɓ'5`ezq֭֕-[VX &$n֭a4I,""""*16vGS|m03۽FDhK+= kծ.Hw\|y4i"6T?:NۮD$;xTK;8.ljAKGo7oNXX1V+;a믿  6GD5#$5"~a/DDDD}&JDMpPgu H>z 4TCXe׋yOarPV-yNݻaZe{SNHh뮻;QM]vz"'ХKa񉈈H $R6, .HFf?+v;KN?]n)1[V$^IHn߾]T\\dyx ^TT1كB^(( 11EX|"""".6 eyadKRoj( J .(_2t+==O]; =XA*TaٰsNYcތ3 &&Yll1m&{r:B ODDDD!S%|SS9 ܛ WÅJ7&M$~`?#4۽{G*W,{]v!?_(m[Er_GVU&o&4SO=Y? $"L(]w.^q!Qz:P .cl8.HT(IHHP$ڵkey3ǎCff<> $ =%eʔ -[(d>pQQQxDž """"aH6}N^Frt짟տC he?wp" _'۷o'** ͚5Gjժ%kupv6nܨHN:)Goԩ#k3gȑ#ƼB=zTh@LxxѲeKyHs'H#yjWᙋMr(Ț0VvcO٩v TBF>W^_~iiiXdpɄx}駰ZsWjj*N<)AW.«] ݦvTB75?&wk׮G/4|]v!%%ExJIITڴi#<^4hfYXVW%7nݪHs`rY'g{+FC$3-7%ںyr:a?C2J s''ddd("2((={G/7o.K˅K$V\H  !௳doC\/"֭H.""""R1GMuKl $QSÇ!YB.ov$QOYٳgO ϣuZ,oߎG$vڅg ֭[ ϣr5sss[/_Ʈ]eX0gDEE)!M)obDAHb5k }j/.Tkë́hMɓ'w ޽{ ϣu-Z%neUR. K,Q$נAJ␐ԫWOX>rrrdU_~b[oZh~:ty B"[XވlKWX 4bT2I@(Gz WN]FRlY+V(4>ywUdߓO>Y'wڷo/K/ĒÖ-[p)yЭ[7yM64IT={zwƹsWzu;l2e`ڴiX`^{ /v"""MںED1E[KusDr(錿-<cXe=y%:bvM6 cX0l0y*,, ͚5+q ,\Pt:iȐ!r뚄Y̚5 EEE*)Ӊ>HќkƼy|IŋᄦMC*UEDDD$DD[?\Y6+ww}6:I`Q2ř쩇.T1VZByXVy:t&MϣEڵeb7Dnn ~ Ѣ;,qm۶aǎ%/HF6l@ZZ9ׯ{g'x3=V1mـk5֥4<HS+F Fnq%U\Bsԯ__h|^ZZ-Z$<$IxaXIеkٽ{76o,CEbL6MݻQFhM׮]a0G^^|M*n|x޸8>Kll,.]*UgHlӭ~H#B$٤#0FTSqf {/jQԬYSxJ*!2oZrJ$''  ϣ%-[D5J#77ӦM"1RSSi L0fpb+qٳg2T$kɓP{=tY"4lK.dV"""` E6`ڛ"ʕ B]f?m\0nv7d;pi4Qb$I!8$%%n ջwoqhE^Jcԩ`G}]xRueǎQ\شi֭['SEs8M7Z,꫘0aTA?0-Z) B""7}.kk X ZX1*YBi@?ӃhѢ"yڴiHNrr"FF#"<6l͛(ի}v*ǏWL˞={UV+Q .h~M;_Nxo "qI:`@"nq$vR׮]e0X>ĤIAc[n[ׯQPTT$cUw 7|Ex.5DGGN˅)SjdVVx Uk0 ڵ+K(V\ :Á s˗=r[wAHDxsGT.Yš]cD̍W\M⢣ѱcGEsv$\|/0`ڶ晨%#FxzՊ#Gq\4iRRR]6^y$g-Y7n*el۶MUcȑꫯгgO4k֬ `ܸq_ɓ*NJtSe+[[gYi DDZ 6^2Of:׮P FUߎpfSR瞃kR )СCxׄ$ SLݙ_燤$oغ\.L:ǎ2eիWСF->͛g$ ͚55& $"é)" ŦF,$}R B?3,? )0TrZ(:tvکSNhժ*}| >yoVZs)^@ƍz51=%/bܸqls%&&⩧G ͚5СC~Ν;1qDt|oaa!ƏAAA֭VZz~bA0{l[ݺu_fgg dZn-{HԬYSZt=ۡC!qD2vDDz:t z5 CjQԩSfR|zz{/vڅ jۻw/*V(*oT IDATFlF6m~]_O>gƹ\./dJ]'RRRжm[ bm޼9pQyD^:ϟ:ݻw+"X \|ڥ$ID˖-OC^:ʔ)Ӊ 6Dǎѷo_3:t@*UJcժU^^ O>BbaBbkEʕ1a!ﭱto2>R͚5yǘ w90woF~<M딮ƒ*a?|gJvtb xĮreΓмrט?S]>*o^Fwqf̘rʩ] 2220b8qBRF#^u!SaÆ~Kn=^|EnԺ\.̜9WP6t "k,X>@xwֽ{bԨQnΝŋ,**BAA$IBPPaXP|yTX+VDDD#!C44$ |ׯ/$j#<+W6I0k,w}r9r ܵCDDt3ly`(B4^{߄eܛd|2S$lkF\^+JWHB'iUk([qg(5F#{L\7{Uٌӧ yMAA&N;v%~aȐ!^طx_ L[v튱c*駟bΜ90RÆ 1g-J-[`ҤIsU$IHJJC/ᩧBffڥ1p@9~p8Baذa¦/zj̘1Cx"""9p(\.riZHMPـ~1`^^dª~5ŀ|3iLs  >KA_ ZvFoCWy X,8|O۶mCJPF L&x>GbW_EϞ=jbԨQؾ}'99ϟ}'~h֬كJꡇo2exO>ӦMM3;wD͚5QJKэ, 2/^T$ }Ő!CR ʕ+={hOH!C_~WBCC}|DD%pOZ5v"*\.0C35DyX+  &,2cw~8~7Is[j:Rيh3IUsQBԮlƍàAv97%I7nܹsjD7r#$$DJk]͛7Ç-47֭yiӦ^ʕ+xgqP:5%%ǏG֭N:!-- O4iy*̙%K{Ӊ۷N:UСC→]aƌxYn]4o?3rrr+Bpp0&Oݻ+^z]6<.tQ"ャ%${ֽfFv܏jΓ}GGUmfOI%=!@ CBG(E *EP|(z@EP6** ґBI]H HǼ i03{~kLc_=ǤwB@SGk3u,"`lm_oAVd= s.jҤ  N˗/cʕHHHꂑ#GbڴivRb͚5Rz/cĈ5O8wyήe͛cѢEكEv_e$ )Sx<̙3Gru .D׮].a`ʔ)pإ4nǏEsh~zP*PS}Ō3D=KTbҥ?t2^ yĶ_w#RLYOفy&B  O =`|5:tσRL²zuI}6`$5;h/Ƃ l0ul-hժ:w#22R쒬۷o޽{qIr0111X`AL |Wعs(R=z4F mh4bXd ç~:e* ֭ʕ+E A@޽1n8hѢ9{,fϞ,+VW{r̝; ɓ'#55UR ~3⼤6mڵk.v90a#gX[VVVZ͛7XrLYAt |h./.PN-/XC] ba)N]`c. R(l;i!-MXփk x٧ ƌFZV]vh׮ڶm͛|L#P*8y$N>3gŋPTbBBB}vիW~zlٲ.ρ-Z'^ᣏ>BBBr9^}U;nR6ߟ? z 5v ֮]K:D7$ x ;VRƥK0}tQcL 0{hT`0ɓ?g BBB0l0<v벮 JDܹ^FDDc@HDd%߿A9.sBR8 n.||N} G 0 mOsderl˺Aނ۵{9A2jAЦMom۶E۶m(vYAʕ+8{,Μ9dܽ{W천T*K/TujJTbϞ=ؽ{7;fڵC׮]ѧOG tKHH\+TX7u]z={nݻVٮD"AӦMѹsg:txƭ[0o<Z>9s ?%rW}3g(̂j03 `ԨQ4hPקr6N¯ݻws|ڵkw}M4KJJpY>}.]Bzz:+<@ !,, ለ@XX4i:۷[e{uMHIIӧqYܼy+RFFn/**jS Kp@-pBJEaƍv߷L&СC /qv߿XhN8agȑ#Q^=rssdl޼YR0 $"34ŶX3r$E}~Ź)YKR]IRlM9z;NmqqmsLlW]q+J^47gУt`Ƚ.v%رckAzƬ9+Lcb„ puݖJJJRPZZ T xxxؼ3C`Æ DY뮶ٳҽj54 [~lkIIIXx1o﫶Wf͚]ݜ9s|oL%K &&шUVa6O߾}1oc˨RSS1fv$$${Z7&QFڵ _}.Vsqq1c /ԙK/Dbbإ:ĬYsFTbҥظq# Qg2n89sfߺu+l}GG~ ""r]ԇ])`K Onɿ-?hضsv= }߽nb;̌Dۦ  -:? Iq0p7 gaÆ]BѤIlΝ9s~ĉѯ_?H1\$:{ʕ+qE˩j5VXcĈ7n<<<.˦.]kb\L فN աyLVPի5kZh{LVwVTT?k׮H[)))ʕ+i&<3xgڬXj?.v9uBaa!.\~ 'OF|||p4E*v~ CAإQ-mۢM6h׮"""iYUt:ӑ/ܹs8͹О,vDDDDVԫWDJZ ^R|T*%"""r>%"tzr,#p~gurtkg@&مV]R9㴈jd)DF \eww~Wd}̋Н"vDDDD6Q\\b\蛈3Gڒ(kv{ek>Hf-Cبz|^5wnc, e 24*JF{""""""""?DD6f0|m8~QR^)󵖍jӭ,PI8!alk ?%Eۡp $jS(4%()""""""""rR 9)-e2=WDgbĆپaDDtހuzp5Cj¨""""""""rb ӟjݣ{0+9lX67# uvgT#|7TlgD߅4+Ϻ$vDDDDDDDDt ٨':#Gx8#;N_Bhb-E[=~o@-JM֠ +.!=}^|e"-ΦZz^)њO}_ ovŴrހ`%rWEIڢ~ S.B""*,`ٯkA{\#' :0OhaP":,:b{Oifpd;\գg]yca@HDdgɐic}SJ0+G`j~o8|V)m¥TH<.3> `pЗB"";dYF+;xJϻ9jQtSFꬺ]g_0Z ͆H5Ṹ*a1RK!""""""""2SRH:_u@wť`39jQH$Z|:EuEz 7 إ<2CqJ,PR`@HD$~A_EXNj>D%KU@UKsy1| J%v̨RB%w.\ Dr5+5j4 m;hٸS\@V5 +CNGfT/`("""""""" 1 $"+9d gSq` !F OlLhL+/7J7e<Mɽ +!""""""""*ymMm}zýQ*]vW()yNj&BeԘQS҄/Ͻ!v)DDDDDDDDDUb@HD$M8z< 4m@- >[]~PI'75psqN_.4%PŮ" Df4At ki Gcz^WA sAQNbQ#Fuѽ5sQ#DDMEfug03Tؾd]:Ҁ_?Q}T݁1N=/#4^bQmbBDDDDDDDDT-  2Ε@ov%ɪ~ Ε`. 5bj=A""""""""sQ_7eP6(O]Fb,qK!"""""""":DDT6Hq ?2:~<Y=h(̂jrR.&x7H0k*ؕPmwW/?.vg]BPtGRl!Yj Wq(=OόEM22{T{!"""""""")DDdUdR*vTKj]b.,90ŮŌF`29 %bBLea(B8!v)DDDDDDDDDvÀvsq(UG^@W2@S .ȮQ8"ŖD%|C5 Оվ¨)"""""""""c@HDD57 dp(U(,:3F2L"-bCDDDDDDDD$ DDT#*`dBU1MYPg^"""""""""Q1 $"K<-?dbA([W@إ _X/v5DDDDDDDDDQ]""z$VfijbB"OW@zqإP͡"""""""""NTubC"Qൃqإ@w(kFR B""zdF#7rx1;; U!&ۥ~aT)IZݭ3ADDDDDDDDU H 1$t62I(ҹZ>ԇ@:B""n.ĐR,`!VͩߡQ:jIfxJ.>S]eʆ .js$8W:&ZjX4m(,,իWqܸqCRrݺuC||˼ +W"Vn?7ѣѦMdrrr QЦMx{{=7' & 44oƟ "$$ݺu3{HӫW/ |eSO!..en˳BDDT1 $""$ZOfHX[ #_Z%y[ݭ3AuSpp0=z?C;#pl޼?3zZWWW<0` @ӦMѱcGH$R\sٳ-[`tfquuȑ#1h #""& @ʽY.88]vT*-vZm(..&MXWZ֭/2.7 ޽{,%""} "h hqSYW cWFgi+h.- `Ȼ%^Tgbܹp/8<ܹ#na3gDTT~w_q:NBAAڷoo õ@JJ L&M_~q1"M-Z`XfM't:>}fanBBӌ~:]#GTQ岳sNueNعsg׵ԩN ш?III~wIJJBpp05jdv[^^vQ1{Avv6:tAn;tӭ $""3yʑ+ZT(A"3%7R ˎ!&4C*v)T/ڿ qIhyfmZVOhh(LAj*\t ,_h޼_{\a߾}رcnߙb n*Cv'i!!"R Bq.7'4M,3M9xȆڴiS溘upկ_Æ lڴ))) IIIV%<<s5okݺ5b :t, M O> kѼ9~8?nZqq1?ݻcMšCDL4 ...Ν;-zV… 1~xxyy!99ݻcƌ1ߙb Ȯ~#ŝ|`4-\렻#:yz>"!-RRxJ.3_[CkT"zq9>, ]'OU?NTsWUJ!'2!** /_9r:'N}Y۩_~Xf v\FJ꾑#GBP`0رcbSdgg]4\%#{߿{۫ubAqq1/^\t?O*ƀn )^+w4bHXY1kPbb7+ ] @CTt[LLCܺu %%%A j*..66ѥK@FFDvy=:*R`0֧\ۓJ:t({G>B*&T}""";".8o~iAcIԇ~j z 8rnjv[ll٘*Ǜ:E8uT._B[F! yJ@ff&9%""dݕ.;;5n$N{i=%]Z2!j֬BBB 8r@DDDU^m۶5#d;t6l؀"z9sK,, ="u333EDD$R50 1wRy",<,֥3MMK Į;wҥK0 HKK3 cccqU*?r͚53]VT~6n܈7Bj+DdoR͛77]TDD#ڵkg̑DDDDDb(,vQ~Ts7"<-IZpD₮]8` w?F#ArtAAAffjZ!"Q^=eDDD!96G9h_{EvU߹߹ Ϡ>,!B.] ш8 Fƍ]^5k^z]Ֆא|Ind2d.j4h`v!Q""r(w$xq?ÌPccouŇ'kw> '5ɑDRRRuzz:ѰaCuqk|P``UV9s&o>˱{g߿bDD6&J1e ??bdf\㙈;na;.HY7G |tE4ΆƢP %[> 0$G-Z_vG3hDG81rl͛7nj3P("rPz"$$bDD6&^}U@z/ڶAնGDDD0 $""u񚀧vŏʠ7ԝ\^<jK cq.Fɖz0r]2r<={D"F#Gp@ZfT"̙3Ѯݣ8ԩS1m4+Tfnnnx1|pKLf.-[̙3˳/y 'ٜ СϟTe @"qOr+3]W/aaa;w.^z%kEDDDQ""rh* rG^!^jLmP໋O˃7::!wzBcs)@=E w\ȏi=qw Ew= ={?~e;w t].]qFDEE] Gv!''0rH `ǎr9ѧOB"ٳPPPPf[ >>FN8QO&!::qqqhҤ |}}!HPTTWرcؿ?Zر#ƍY6\VZsh֬ J˗/c8uTk׮h޼95j///d2#777n@jjU;-2h S رcѦM##=<GVnjOO?bYH#@lEtv~)w-{AzL:0gs3Bsi7t)}3Io>>}=xMoݺ3gڥ0|pӧf/فP}v{~UС^xCڵkG`` L!֝;w_"%%m VZaȐ!h߾ pgrTO?80iԨFN:wŔ)Sj؊rL8ݻwc{>'..GF`` Z-rrrV^y_{T6l z  k۷Nj/Xyp%Kpʕrѿ⋐Jŋq oH$ٳ'Fm>V͛q 3d;ty߾}X|9{!C0pJ5ϝ;EAVWYSuDEEaԩUu:>2'6 ؿo=z4uDHy'XjǎXre >}gpYYY7o^m}A߾}05 (--EII F#AU}dÆ gsիW-v튱cjW^RDѨQ#k ==/2kذ!Njz*ヨ(i00sLO!""vÑ# 4( pHd .I|3m RV2_}.(DWU>Mƈm 2 ۡƎA]z ٳg+߱cPH̝;ڡ}j* oVaZF||<&LPayhuAرc1h Flݺ7o6;`^x{g `#FWJJJ7|0AAAR)BBB4hJeF% #F@&!''vɓ' gϞ֭[5SpVwwwL8òeːd1h zR&L@\\,YRY*yAPZ%%%f 1chĶm۰iӦJs~ϛm׿jTYI&!::ظq#N> RF.5j9s/(3f"O4ٳ'y:a۴iɓ'>j0|눋øqV>;v͛ /^ݻcرeGVVVĉcӦM4~kԨQ ޽;:voҠ7//垸Hjj*ׯ2{·y{{c֬Y +vF1c`СnCw.]`@QQ݋Ç#332MڧOug0Gj5233PuثW/={p[/RA}DD^y`0 ++ ...5_pp0ƍ?F5T$88 .x ot;d2Kիz=~7l۶,DnРƍVZaÆ7o>C\vmރ d\pt/^~ekaDDDb@HDD⓹@i`SM l H+*ѹbX2/6ۊ{.(,Oaq_ h㗊!t-| H`\=].- Ri 8Phfv͛?777DFF'N_~ze}Ν1l0( [՚!MNNل IDAT,[&H0i$ .Ĺs/''}&L޽{C.㭷›oY核N0j۶-&M|,_N2 td2t{?dCaA0a {nOf󑟟/?ɓiem۶pww7ݦVM#w;[RRRa`PYgJnn.VZǏcM67oϟoյoݺ͛7nnn@HH$''WyN"`ĉٳ'Z--Z3gΔyѧOr6m루?36l2Й3grzyya Í7pBܹst{ff&>3>;J6m0}tnĩS|x ѽ{w>ǏǢE|\.ܹs˄1~V{;...x^Yf!==ǵl٘4̝;f=3o޼k׮ÇǨQ_uᖋ kӁצM;˹4r3gά0ׯ^z%={wU}TٳgUV >s?~ǎ;C 1]VT7n\Y7_|J׷o_? mZ AкukO?7oZ\_Uzꅗ_~Ϝ9s,P|WѣG2W5bo66m,L>O[\\ &TY\./ZO:+2rH<w}n2)t6[Ӹqп?ׯq2'+|7ػwoɈc-Xܓ?ュKիWc˖-f  ^/VxbZDD>CX!""Z)HO"B;nCO}@1M)'{(n_G^ÁΩ lgß7=)n >> ?cz8h0%|YC~׮]+1{!!!ZtR]*'N: ]~+_jUZWq?aGU p +)...e˰{*go.DGG[42::XiZ|6 *p'Fec +3~xkb׮]fӦMfׅ l:(xgεyVg*bzSVGf͚wAe ~_>ڀ:-t?7n,j~m߾k}Uu6nXߢI&? {%potRH~JnnnΒ={qF`0u}GV v-[fڡD")Sдu+ w.[fpyA}/ׯ)PzG)ӵg׀ׂ|eUa˖-1p@: {٦彿? (H*]+ڵkػw/ ENB""* P ^"M!x6G _1zM 9>KBe—µ6%%ŢߗL&3 *=@5jdQ*eB\t WV.6lP&w|[U,YN'MzK !fwNv.<2lXfiC6 $z|%%Vtu$_){ B{nKۢ$ɷ]0v]}BB}}۷] [ yyyNْRSS-MSPFYYYϣkѢEu }Zzd6U^^h0==2iօQӿkN >Uc_`vg}SB@bޡĞ2:WM6 f\]!H]rEU2G+Uqb[btNtTٶLC7Z]i\+~^:׶K<M`f*ԩr.L  E((YYYꆘˠoc}uVsx^UUU]g_zo?qDgϞ HoS] ȕ_JUof fffZ[˥|kTеx_!Ikꀽ =5k4o<@~߾}էO:/ҥRSSe-[Vqke)SVRRm۶Y233|NK&Llf܀GuVӿ\A(sqaV~S^w2%^7{ֿի~__nwhB@P6wSd$H)[\nV)Dn:`IPsԬޫv?ҒoX{WI&wRŜVvvbcc}Fu.gs~...?3g΄tHkmIIIiO>ݤs%G0S65ɓ'-aV=tرfKJJχwnvĈr >\뢢"mڴI}&M T] lٴe˖:׬իwn֭Ӟe@r}vl61·ߤR eg R.W~ɀusVAf8$??_/BHR.ڳgΜ9c{oO>D}e_ UesޔOhXZ1gWr6+\an,**ҋ/L.\ЬY|YYYziFJϞ=}?8p U߾}-eMilUKhN޽{-T].a8p@h翶U8NQVVXa nj~;5 Ccǎ$-].jje={ %''}PwZlibogQQQwբ;i^~3Lv]gVvv?3h?hD(vekQFNw-%ʰpehRMZp I A- wNyL[7ȑ#$z]PAIڱc% LNN5\AA2 R.G6`~8U7[5y[v7x-0 u@zxRΝ;;J"v̘1S4H:tٳg}:׫W}M\z jcZ~[Z~,ٳ*ÕƭzgOvʕ+[oo!\M Deu-nR+MWhNbu $m+vsNs6m4ϛ6mj1߯RF" ATTe611 [r+2Z.9xjWBQPP!@K3 2\sA]5™iZjΝ7y䀀pܸqvGGG7R%%%@)C/}l>}t 7LN[||&Mn!wd^NQQ!^~C=}Z~g4i$O/B fhMp%GɈMbd&U(;鯡`d(dN{n\wE>]RkXͮSW! rݻwWzzꊚ]v58Gvȑ#}[o]5pRi–p ZB$45(š,rKԽ{wܹ [^uV1·oɾ0;;[NS+VPqqqǪo9:uIǹZ:uvESLve6mڤoF]v7m[111;vL[;ԩSZrN<'x:CٖJKKUYY)!5M|-[h…:t蠇zHկZZMrj'ծ:s]eKQM.WBziG[7%GW7J pp\;vo{:^5 CpY^G֟6lQwMnߕZU%wMzRzyyykv;[ʙ3g,1caXژŋaϊ+,avv-Zb ^{ڽ{wm2 #x!jvI˶aӧOȯSS͙ؔ3G2 C^W˗/g}sI b̋Ռ34c _{iΝ*̲2Kfiܹs00 5Q@s>|X?@999_iiiڻwoPh٢EEh--9bes挕9be@mM PY%={g _ p3fe൩Ӌ;effjѢE?MWc߿7T]dɒ+z7a9rV^q: Mԑ#Gg :>lZe8po{ڴiZtiNjӉ',W^:vXK7튵uV7өcjǎ߿ר+X05|p!)11򷮬 z1J]R5JQQΜ9]egg_p}S _0RP}?dI>䓀)K& 3gj͚5*/ou***ꫯIJVT.FÝ$E%[}u]͈j<r6_JGI٢\=J6G%C6+ࡀpUzy p0}ѩ-XWfp9wΟ?ܷ鍊nڵkC܁aX~% +Bò|њmۦٳg۵kO/R=fk:n߾]&Lml6}zgX'LԪU,SAN6M{$-YQٹs,>[ݺu yj$}!U)**AaܹsHm ׯ׬Y|'N'|F#XZ_?{u鮻mWVV_ԡCn52/!!@LLx K룏> nX?6+}ҤIN:iЬCݺu뼍բE={'$$4!zEZ)Uyr ζ-u%5j(˾믿ݫgϪK.}ڵkWHМ9sm6QQQ4hnڨ5jT]rXbnF_x@/B`a`}ՔU][nol76z5'''7zQFYA4 Py5۲e,)S{zý rJE999sz_ǣo6& xۺ@+>;*^&Uvnkck4tVy :xyꦛnjj0 {z3gNXUEm۶-`ʹ3gj鍺LLϟoPђ߿HU l6-X !++W}SZZn}/׫?0`w߭Yf5꼽{t-l9rdX^^/"`3QsNN~iM8я{ӎ@X"jO?4 ###>ah̘1> .}sMq\s=h8IDATQ^^-[f7tP{mojރf5j_"ؿ4 o~^zYv7v~4hl=zt/X/-==]wqGvu뭷JZr=Ӿ}Fwݺu!pMO?niӦuV ymYٷo_ 0O? Z}9yd= z.'$$׃>(ͦu}LIg C+VX=O^tI%l2{l_߰{n{ ާ\Gӧ|zÞ!C藿eӧQ?wj{9:{eɓ]aÍz )**Ҙ1c,]vMYףG >\G߮qiժUjƍJOOW|k ݷo_Y||~ӟgϞxI&ilڽ{^|ŀizuuYCYYYnWrrRRR[nESAA^|EM~k׮UanTkN:zeYˮJvTKSjĈJLLTBB3gSΝ+##r̴4W&Lս{wEqqqt*==]'NPqq:tI&$T.KSIIz衛oYw}>Ӏe%,޽:tteeeiʔ)xqFIԩS-UPP+::ZÆ C=~V 4H.]RRRrrrww:|&jOzAN74j ѣG[kIyyvQiڵkFwv7]#4zhM>]WNN┛^zN&LXhÆ A?99Y?}5 -ʌ~†3cAM^cC P,-C4J/pY$''n]PPPuiٲe:w\۷)---**۷O6m=0 4\0MSwuQmڴ)[M@:a˚WUVVjÆ Z|yk6FGGSffӮiڵꫯ\KK.zԭ[:_AA}ـ*X5J4hP oڵkuE ȑ#5m4aaV^%K5eۿmtC~~~ߩ[nZpaۘ'N7Ծ}uO~oW\\W^y%Z}ݧq쳠zQ7o+bYrzW\\7xC /P'OE> &==]&MҨQNZVVJK,  u]ɓ'kȐ!A_TT,ϗT}aRvvNZgUj*-[,tR>hHk;wNoߣ+Ç֞={|Aazz~uYGwmcB\̢3!ϩVԌѦnш4 /*݆(}Pi9:=c1bD~cFLL PeeV^u;PNNxuYd|͆H#G#<.//WYYerr,t {GӦMmLG ?suAӬY|%%%*++SUUbbbr|Ǫ_`Ҝ9s4mڴz3MS˖-ӻ4}R4Mnرcz'JKK7))IϯsTڴix iƌ;}?y֭[}ό3|ULԺu?? 2DsQkqy-^XK.m6l0=!0hs_x?{\˥ٳgk ˗/׻ۨ*EG Z?f։'6۵hѢNϞ=skr\J-\\;v쨗^z?&&7`>ĉf]v5k&N|~ԼUUUڼy>C;ִ%\.kN y1O}3<}~~kٳG N~_|*,,ls 7߬iӦ;:yߝ?^֭ݻu]RR)?_itw=[YY_}Y%裏ך5k~g.g|*\R}–!Cۼy^}f222TVV'Ly7[nmY\^^G*//;EEEZpa8͛7R_|>:?uw*;;w%?^;vUc={VoS!X G g^̂ڡc{ hFyumS=TRԡ:sئs6έ614rHWҥKtQ}Jɓ'[nѺu_K՚:v쨱cǪo߾х t!_IkvUÆ SϞ=(PNҾ}tСmf5=TPPPn233.]|޽[7oVm ˥+!!A%%%:}6msVrrUXXjky/kt:WvtҥKMkO~,!?Xh :T\svXy<ܹs:zvb!h޽=z(66=qℶm0UdzzymٲEk׮Ν;=vU\\\gW7]֩brȑ߿t:UVVǏk˖-ˀkƍ/gϞFLCRڶmvfx=zTK.mT?ZS>tPuQi*77W[nնm2+**~Q|kw% Rw_:u}~?^vҖ-[}'%%iJMMU||.^SNiϞ=ڽ{w?whi !XU:O:]Sׄ%%*W^ m*| !]v] .ld~&;@00'=2OT]xlsTVI] Z_ffn-[qL$?w`WoU'C! иqp8tUTTfɓ'w޶n"6a[Y&QZ%xPTufϟ3gݲePV%Ձ,:#1=:2}嗗5@x# Vl,BY =(3̂2 Oupق[?Wi)S.oi@! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @! "!AB@DB  @ByIENDB`pooch-1.8.2/doc/_static/readme-banner.svg000066400000000000000000000410311463036314300202500ustar00rootroot00000000000000 image/svg+xml A friend to fetch your data files pooch-1.8.2/doc/_static/style.css000066400000000000000000000007421463036314300167050ustar00rootroot00000000000000span.project-version { color: #999999; font-size: 0.8em; font-weight: 600; margin-left: 5px; } .table td { padding: 0.5rem; } .table td p { margin-bottom: 0 !important; } .py-obj { margin-right: 0.15rem; } .lead { font-size: 1.5rem; } .front-page-callout { margin-bottom: 4rem; } .text-large { font-size: 1.35rem; } /* Overwrite the main link definition in the book-theme for button links */ main.bd-content #main-content a.btn:hover { color: white; } pooch-1.8.2/doc/_templates/000077500000000000000000000000001463036314300155375ustar00rootroot00000000000000pooch-1.8.2/doc/_templates/autosummary/000077500000000000000000000000001463036314300201255ustar00rootroot00000000000000pooch-1.8.2/doc/_templates/autosummary/base.rst000066400000000000000000000002401463036314300215650ustar00rootroot00000000000000{{ fullname | escape | underline }} .. currentmodule:: {{ module }} .. auto{{ objtype }}:: {{ objname }} .. raw:: html
pooch-1.8.2/doc/_templates/autosummary/class.rst000066400000000000000000000013121463036314300217610ustar00rootroot00000000000000{{ fullname | escape | underline }} .. currentmodule:: {{ module }} .. autoclass:: {{ objname }} .. rubric:: Methods Summary .. autosummary:: {% for item in members %} {% if item in ['__call__'] %} {{ objname }}.{{ item }} {% endif %} {% endfor %} {% for item in methods %} {% if item != '__init__' %} {{ objname }}.{{ item }} {% endif %} {% endfor %} ---- {% for item in members %} {% if item in ['__call__'] %} .. automethod:: {{ objname }}.{{ item }} {% endif %} {% endfor %} {% for item in methods %} {% if item != '__init__' %} .. automethod:: {{ objname }}.{{ item }} {% endif %} {% endfor %} .. raw:: html
pooch-1.8.2/doc/_templates/autosummary/exception.rst000066400000000000000000000002351463036314300226550ustar00rootroot00000000000000{{ fullname | escape | underline }} .. currentmodule:: {{ module }} .. autoexception:: {{ objname }} .. raw:: html
pooch-1.8.2/doc/_templates/autosummary/function.rst000066400000000000000000000002331463036314300225020ustar00rootroot00000000000000{{ fullname | escape | underline }} .. currentmodule:: {{ module }} .. autofunction:: {{ objname }} .. raw:: html
pooch-1.8.2/doc/_templates/autosummary/module.rst000066400000000000000000000014151463036314300221450ustar00rootroot00000000000000.. raw:: html
``{{ fullname }}`` {% for i in range(fullname|length + 15) %}-{% endfor %} .. raw:: html
.. automodule:: {{ fullname }} {% block classes %} {% if classes %} .. rubric:: Classes .. autosummary:: :toctree: ./ {% for item in classes %} {{ fullname }}.{{ item }} {% endfor %} {% endif %} {% endblock %} {% block functions %} {% if functions %} .. rubric:: Functions .. autosummary:: :toctree: ./ {% for item in functions %} {{ fullname }}.{{ item }} {% endfor %} {% endif %} {% endblock %} {% block exceptions %} {% if exceptions %} .. rubric:: Exceptions .. autosummary:: :toctree: ./ {% for item in exceptions %} {{ fullname }}.{{ item }} {% endfor %} {% endif %} {% endblock %} .. raw:: html
pooch-1.8.2/doc/_templates/layout.html000066400000000000000000000010171463036314300177410ustar00rootroot00000000000000{# Import the theme's layout. #} {% extends "!layout.html" %} {% block extrahead %} {% endblock %} {% block htmltitle %} {% if title == '' or 'no title' in title or title == 'Home' %} {{ docstitle|striptags|e }} {% else %} {{ title|striptags|e }} | {{ docstitle|striptags|e }} {% endif %} {% endblock %} pooch-1.8.2/doc/about.rst000066400000000000000000000041441463036314300152510ustar00rootroot00000000000000.. _about: Why use Pooch? ============== Use cases --------- .. tab-set:: .. tab-item:: Just download a file **Who**: Scientists/researchers/developers looking to simply download a file. Pooch makes it easy to download a file (one function call). On top of that, it also comes with some bonus features: * Download and cache your data files locally (so it's only downloaded once). * Make sure everyone running the code has the same version of the data files by verifying cryptographic hashes. * Multiple download protocols HTTP/FTP/SFTP and basic authentication. * Download from Digital Object Identifiers (DOIs) issued by repositories like figshare and Zenodo. * Built-in utilities to unzip/decompress files upon download **Start here:** :ref:`retrieve` .. tab-item:: Manage sample data for a Python program **Who**: Package developers wanting to include sample data for use in tutorials and tests. Pooch was designed for this! It offers: * Pure Python and :ref:`minimal dependencies `. * Download a file only if necessary. * Verification of download integrity through cryptographic hashes. * Extensible design: plug in custom download and post-processing functions. * Built-in utilities to unzip/decompress files upon download * Multiple download protocols HTTP/FTP/SFTP and basic authentication. * User control of data cache location through environment variables. **Start here:** :ref:`intermediate` History ------- Pooch was born out of shared need between the `Fatiando a Terra `__ libraries and `MetPy `__. During the `Scipy Conference 2018 `__ sprints, developers from both projects got together and, realizing the shared necessity, devised a package that would combine the best of the existing functionality already present in each project and extend it's capabilities. pooch-1.8.2/doc/api/000077500000000000000000000000001463036314300141535ustar00rootroot00000000000000pooch-1.8.2/doc/api/index.rst000066400000000000000000000020611463036314300160130ustar00rootroot00000000000000.. _api: List of functions and classes (API) =================================== .. note:: **All functions and classes should be accessed from the** :mod:`pooch` **top-level namespace.** Modules inside of the :mod:`pooch` package are meant mostly for internal organization. Please **avoid importing** directly from submodules since functions/classes may be moved around. .. automodule:: pooch Core ---- .. autosummary:: :toctree: generated/ pooch.create pooch.Pooch pooch.retrieve Utilities --------- .. autosummary:: :toctree: generated/ pooch.os_cache pooch.make_registry pooch.file_hash pooch.check_version pooch.get_logger Downloaders ----------- .. autosummary:: :toctree: generated/ pooch.HTTPDownloader pooch.FTPDownloader pooch.SFTPDownloader pooch.DOIDownloader Processors ---------- .. autosummary:: :toctree: generated/ pooch.Unzip pooch.Untar pooch.Decompress Miscellaneous ------------- .. autosummary:: :toctree: generated/ pooch.test pooch-1.8.2/doc/authentication.rst000066400000000000000000000047241463036314300171620ustar00rootroot00000000000000.. _authentication: Authentication ============== HTTP authentication ------------------- Use the :class:`~pooch.HTTPDownloader` class directly to provide login credentials to HTTP servers that require basic authentication. For example: .. code:: python from pooch import HTTPDownloader def fetch_protected_data(): """ Fetch a file from a server that requires authentication """ # Let the downloader know the login credentials download_auth = HTTPDownloader(auth=("my_username", "my_password")) fname = GOODBOY.fetch("some-data.csv", downloader=download_auth) data = pandas.read_csv(fname) return data It's probably not a good idea to hard-code credentials in your code. One way around this is to ask users to set their own credentials through environment variables. The download code could look something like so: .. code:: python import os def fetch_protected_data(): """ Fetch a file from a server that requires authentication """ # Get the credentials from the user's environment username = os.environ.get("SOMESITE_USERNAME") password = os.environ.get("SOMESITE_PASSWORD") # Let the downloader know the login credentials download_auth = HTTPDownloader(auth=(username, password)) fname = GOODBOY.fetch("some-data.csv", downloader=download_auth) data = pandas.read_csv(fname) return data FTP/SFTP with authentication ---------------------------- Pooch also comes with the :class:`~pooch.FTPDownloader` and :class:`~pooch.SFTPDownloader` downloaders that can be used when files are distributed over FTP or SFTP (secure FTP). .. note:: To download files over SFTP, `paramiko `__ needs to be installed. Sometimes the FTP server doesn't support anonymous FTP and needs authentication or uses a non-default port. In these cases, pass in the downloader class explicitly (works with both FTP and SFTP): .. code:: python import os def fetch_c137(): """ Load the C-137 sample data as a pandas.DataFrame (over FTP this time). """ username = os.environ.get("MYDATASERVER_USERNAME") password = os.environ.get("MYDATASERVER_PASSWORD") download_ftp = pooch.FTPDownloader(username=username, password=password) fname = GOODBOY.fetch("c137.csv", downloader=download_ftp) data = pandas.read_csv(fname) return data pooch-1.8.2/doc/changes.rst000066400000000000000000000770041463036314300155540ustar00rootroot00000000000000.. _changes: Changelog ========= Version 1.8.2 ------------- Released on: 2024/06/06 DOI: https://doi.org/10.5281/zenodo.11493461 Bug fixes: * Use a variable to set the default request timeout (`#418 `__) Documentation: * Add HyperSpy, RosettaSciIO, eXSpy to projects using pooch (`#408 `__) * Add more packages using Pooch (`#403 `__) Maintenance: * Add optional dependencies to environment.yml (`#413 `__) * Run tests with oldest dependencies on x86 macos (`#414 `__) * Mark additional tests requiring network (`#412 `__) * Fix package description in pyproject.toml (`#407 `__) * Setup Trusted Publisher deployment to PyPI (`#406 `__) * Use Burocrata to check and add license notices (`#402 `__) * Use pyproject.toml instead of setup.cfg (`#401 `__) This release contains contributions from: * Sandro * Jonas Lähnemann * Santiago Soler * Leonardo Uieda Version 1.8.1 ------------- Released on: 2024/02/19 DOI: https://doi.org/10.5281/zenodo.10680982 Bug fixes: * Use the ID instead of persistentID for Dataverse downloads since some repositories don't issue persistentIDs but all issue normal IDs (`#355 `__) * Ensure all archive members are unpacked in subsequent uses of ``Untar``/``Unzip`` if the first call only asked for a few members (`#365 `__) Documentation: * Move "Projects using Pooch" further up the README (`#386 `__) * Update the versions of sphinx and its plugins (`#385 `__) Maintenance: * Remove many deprecated pylint options (`#329 `__) * Use Dependabot to manage GitHub Actions (`#387 `__) * Simplify the test GitHub Actions workflow (`#384 `__) * Update format for Black 24.1.1 (`#383 `__) This release contains contributions from: * Mark Harfouche * Juan Nunez-Iglesias * Santiago Soler * Leonardo Uieda Version 1.8.0 ------------- Released on: 2023/10/24 DOI: https://doi.org/10.5281/zenodo.10037888 Bug fixes: * Fix bug: add support for old and new Zenodo APIs (`#375 `__) New features: * Only create local data directories if necessary (`#370 `__) * Speed up import time by lazy loading requests (`#328 `__) Maintenance: * Add support for Python 3.11 (`#348 `__) * Only run CI cron job for the upstream repository (`#361 `__) Documentation: * Add GemGIS to list of projects using Pooch (`#349 `__) * Fix spelling of Dataverse (`#353 `__) * Fix grammar on retrieve documentation (`#359 `__) This release contains contributions from: * Hugo van Kemenade * AlexanderJuestel * Mark Harfouche * Philip Durbin * Rob Luke * Santiago Soler * Stephan Hoyer Version 1.7.0 ------------- Released on: 2023/02/27 DOI: https://doi.org/10.5281/zenodo.7678844 Bug fixes: * Make archive extraction always take members into account (`#316 `__) * Figshare downloaders fetch the correct version, instead of always the latest one. (`#343 `__) New features: * Allow spaces in filenames in registry files (`#315 `__) * Refactor ``Pooch.is_available`` to use downloaders (`#322 `__) * Add support for downloading files from Dataverse DOIs (`#318 `__) * Add a new ``Pooch.load_registry_from_doi`` method that populates the Pooch registry using DOI-based data repositories (`#325 `__) * Support urls for Zenodo repositories created through the GitHub integration service, which include slashes in the filename of the main zip files (`#340 `__) * Automatically add a trailing slash to ``base_url`` on ``pooch.create`` (`#344 `__) Maintenance: * Drop support for Python 3.6 (`#299 `__) * Port from deprecated ``appdirs`` to ``platformdirs`` (`#339 `__) * Update version of Codecov's Action to v3 (`#345 `__) Documentation: * Update sphinx, theme, and sphinx-panels (`#300 `__) * Add CITATION.cff for the JOSS article (`#308 `__) * Use Markdown for the README (`#311 `__) * Improve docstring of `known_hash` in `retrieve` function (`#333 `__) * Replace link to Pooch's citation with a BibTeX code snippet (`#335 `__) Projects that started using Pooch: * Open AR-Sandbox (`#305 `__) * ``climlab`` (`#312 `__) * SciPy (`#320 `__) * ``napari`` (`#321 `__) * ``mne-python`` (`#323 `__) This release contains contributions from: * Alex Fikl * Anirudh Dagar * Björn Ludwig * Brian Rose * Dominic Kempf * Florian Wellmann * Gabriel Fu * Kyle I S Harrington * Leonardo Uieda * myd7349 * Rowan Cockett * Santiago Soler Version 1.6.0 ------------- Released on: 2022/01/24 DOI: https://doi.org/10.5281/zenodo.5793074 .. warning:: **Pooch v1.6.0 is the last release that is compatible with Python 3.6.** Important notes: * Pooch now specifies version bounds for our required dependencies and a plan for dropping support for older versions. Please revise it if you depend on Pooch. Enhancements: * Add option to disable updates on hash mismatch (`#291 `__ and `#292 `__) * Allow enabling progress bars with an argument in ``Pooch.fetch`` and ``retrieve`` (`#277 `__) Documentation: * Use real data URLs in the README example code (`#295 `__) * Tell users to import from the top-level namespace (`#288 `__) * Update the contact link to `fatiando.org/contact `__ (`#282 `__) * Refer the community guides to `fatiando/community `__ (`#281 `__) * Mention in docs that figshare collections aren't supported (`#275 `__) Maintenance: * Replace Google Analytics for `Plausible `__ to make our docs more privacy-friendly (`#293 `__) * Use `Dependente `__ to capture dependencies on CI (`#289 `__) * Use ``build`` instead of setup.py (`#287 `__) * Run the tests weekly on GitHub Actions (`#286 `__) * Set minimum required version of dependencies (`#280 `__) * Rename "master" to "main" throughout the project (`#278 `__) * Remove trailing slash from GitHub handle in ``AUTHORS.md`` (`#279 `__) This release contains contributions from: * Santiago Soler * Genevieve Buckley * Ryan Abernathey * Ryan May * Leonardo Uieda Version 1.5.2 ------------- Released on: 2021/10/11 DOI: https://doi.org/10.5281/zenodo.5560923 Bug fixes: * Fix bug when unpacking an entire subfolder from an archive. Now both unpacking processors (``Untar`` and ``Unzip``) handle ``members`` that are folders (not files) correctly. (`#266 `__) Enhancements: * Add support for Python 3.10 (`#260 `__) * Point to the user's code for the file_hash warning instead of our internal code (which isn't very useful) (`#259 `__) Documentation: * Fix typo in a variable name of the examples in the documentation (`#268 `__) * Fix typo when specifying the SFTP protocol in the about page (`#267 `__) Maintenance: * Remove old testing checks if running on TravisCI (`#265 `__) This release contains contributions from: * Santiago Soler * Hugo van Kemenade * Mark Harfouche * Leonardo Uieda Version 1.5.1 ------------- Released on: 2021/08/24 DOI: https://doi.org/10.5281/zenodo.5242882 .. warning:: **Please use** ``from pooch import file_hash`` **instead of** ``from pooch.utils import file_hash``. This is backwards compatible with all previous versions of Pooch. We recommend importing all functions and classes from the top-level namespace. Bug fixes: * Make ``file_hash`` accessible from the ``pooch.utils`` module again. Moving this function to ``pooch.hashes`` caused crashes downstream. To prevent these crashes, add a wrapper back to utils that issues a warning that users should import from the top-level namespace instead. (`#257 `__) * Use a mirror of the test data directory in tests that write to it. (`#255 `__) * Add a pytest mark for tests accessing the network so that they can easily excluded when testing offline. (`#254 `__) This release contains contributions from: * Antonio Valentino * Leonardo Uieda Version 1.5.0 ------------- Released on: 2021/08/23 DOI: https://doi.org/10.5281/zenodo.5235242 New features: * Add support for non-cryptographic hashes from the xxhash package. They aren't as safe (but safe enough) and compute in fractions of the time from SHA or MD5. This makes it feasible to use hash checking on large datasets. (`#242 `__) * Add support for using figshare and Zenodo DOIs as URLs (with the protocol ``doi:{DOI}/{file name}``, which works out-of-the-box with ``Pooch.fetch`` and ``retrieve``). Can only download 1 file from the archive (not the full archive) and the file name must be specified in the URL. (`#241 `__) Maintenance: * Move hash functions to their own private module. No changes to the public API. (`#244 `__) * Run CI jobs on Python version extremes instead of all supported versions (`#243 `__) This release contains contributions from: * Mark Harfouche * Leonardo Uieda Version 1.4.0 ------------- Released on: 2021/06/08 DOI: https://doi.org/10.5281/zenodo.4914758 Bug fixes: * Fix bug in ``Untar`` and ``Unzip`` when the archive contains subfolders (`#224 `__) Documentation: * New theme (``sphinx-book-theme``) and layout of the documentation (`#236 `__ `#237 `__ `#238 `__) Enhancements: * Add support for non-tqdm progress bars on HTTPDownloader (`#228 `__) * Allow custom unpack locations in ``Untar`` and ``Unzip`` (`#224 `__) Maintenance: * Replace versioneer with setuptools-scm (`#235 `__) * Automatically check license notice on code files (`#231 `__) * Don't store documentation HTML as CI build artifacts (`#221 `__) This release contains contributions from: * Leonardo Uieda * Agustina Pesce * Clément Robert * Daniel McCloy Version 1.3.0 ------------- Released on: 2020/11/27 DOI: https://doi.org/10.5281/zenodo.4293216 Bug fixes: * Properly handle capitalized hashes. On Windows, users might sometimes get capitalized hashes from the system. To avoid false hash mismatches, convert stored and computed hashes to lowercase before doing comparisons. Convert hashes to lowercase when reading from the registry to make sure stored hashes are always lowercase. (`#214 `__) New features: * Add option to retry downloads if they fail. The new ``retry_if_failed`` option to ``pooch.create`` and ``pooch.Pooch`` allows retrying the download the specified number of times in case of failures due to hash mismatches (coming from Pooch) or network issues (coming from ``requests``). This is useful for running downloads on CI that tend to fail sporadically. Waits a period of time between consecutive downloads starting with 1s and increasing up to 10s in 1s increments. (`#215 `__) * Allow user defined decompressed file names. Introduce new ``name`` argument to ``pooch.Decompress`` to allow user defined file names. Defaults to the previous naming convention for backward compatibility. (`#203 `__) Documentation: * Add seaborn-image to list of packages using Pooch (`#218 `__) Maintenance: * Add support for Python 3.9. (`#220 `__) * Drop support for Python 3.5. (`#204 `__) * Use pip instead of conda to speed up Actions (`#216 `__) * Add license and copyright notice to every .py file (`#213 `__) This release contains contributions from: * Leonardo Uieda * Danilo Horta * Hugo van Kemenade * SarthakJariwala Version 1.2.0 ------------- Released on: 2020/09/10 DOI: https://doi.org/10.5281/zenodo.4022246 .. warning:: **Pooch v1.2.0 is the last release that is compatible with Python 3.5.** Bug fixes: * Fix FTP availability check when the file is in a directory. If the data file is not in the base directory, the ``Pooch.is_available`` test was broken since we were checking for the full path in ``ftp.nlst`` instead of just the file name. (`#191 `__) New features: * Add the SFTPDownloader class for secure FTP downloads (`#165 `__) * Expose Pooch version as ``pooch.__version__`` (`#179 `__) * Allow line comments in registry files with ``#`` (`#180 `__) Enhancements: * Point to Unzip/tar from Decompress docs and errors (`#200 `__) Documentation: * Re-factor the documentation into separate pages (`#202 `__) * Add warning to the docs about dropping Python 3.5 (`#201 `__) * Add `histolab `__ to the Pooch-powered projects (`#189 `__) Maintenance: * Push documentation to GitHub Pages using Actions (`#198 `__) * Add GitHub Actions workflow for publishing to PyPI (`#196 `__) * Set up GitHub Actions for testing and linting (`#194 `__) * Test FTP downloads using a local test server (`#192 `__) This release contains contributions from: * Leonardo Uieda * Hugo van Kemenade * Alessia Marcolini * Luke Gregor * Mathias Hauser Version 1.1.1 ------------- Released on: 2020/05/14 DOI: https://doi.org/10.5281/zenodo.3826458 Bug fixes: * Delay data cache folder creation until the first download is attempted. As seen in `recent issues in scikit-image `__, creating the data folder in ``pooch.create`` can cause problems since this function is called at import time. This means that importing the package in parallel can cause race conditions and crashes. To prevent that from happening, delay the creation of the cache folder until ``Pooch.fetch`` or ``retrieve`` are called. (`#173 `__) * Allow the data folder to already exist when creating it. This is can help cope with parallel execution as well. (`#171 `__) Documentation: * Added scikit-image to list of Pooch users. (`#168 `__) * Fix typo in README and front page contributing section. (`#166 `__) This release contains contributions from: * Leonardo Uieda * Egor Panfilov * Rowan Cockett Version 1.1.0 ------------- Released on: 2020/04/13 DOI: https://doi.org/10.5281/zenodo.3747184 New features: * New function ``pooch.retrieve`` to fetch single files This is much more convenient than setting up a ``Pooch`` while retaining the hash checks and use of downloaders and processors. It automatically selects a unique file name and saves files to a cache folder. (`#152 `__) * Allow to use of different hashing algorithms (other than SHA256). Optionally specify the hash as ``alg:hash`` and allow ``pooch.Pooch`` to recognize the algorithm when comparing hashes. Setting an algorithsm is optional and omiting it defaults to SHA256. This is particularly useful when data are coming from external sources and published hashes are already available. (`#133 `__) Documentation: * Add example for fetching datasets that change on the server, for which the hash check would always fail. (`#144 `__) * Fix path examples in docstring of ``pooch.os_cache``. The docstring mentioned the data path as examples instead of the cache path. (`#140 `__) * Add example of creating a registry when you don't have the data files locally and would have to download them manually. The example uses the ``pooch.retrieve`` function to automate the process. The example covers two cases: when all remote files share the same base URL and when every file has its own URL. (`#161 `__) Maintenance: * A lot of general refactoring of the internals of Pooch to facilitate development of the new ``pooch.retrieve`` function (`#159 `__ `#157 `__ `#156 `__ `#151 `__ `#149 `__) This release contains contributions from: * Leonardo Uieda * Santiago Soler * Kacper Kowalik * Lucas Martin-King * Zac Flamig Version 1.0.0 ------------- Released on: 2020/01/28 DOI: https://doi.org/10.5281/zenodo.3629329 This release marks the stabilization of the Pooch API. Further changes to the 1.* line will be fully backwards compatible (meaning that updating Pooch should not break existing code). If there is great need to make backwards incompatible changes, we will release a 2.* line. In that case, bug fixes will still be ported to the 1.* line for a period of time. Improvements: * Allow blank lines in registry files. Previously, they would cause an error. (`#138 `__) **Backwards incompatible changes**: * Using Python's ``logging`` module to instead of ``warnings`` to inform users of download, update, and decompression/unpacking actions. This allows messages to be logged with different priorities and the user filter out log messages or silence Pooch entirely. Introduces the function ``pooch.get_logger`` to access the ``logging`` object used by Pooch. **Users who relied on Pooch issuing warnings will need to update to capturing logs instead.** All other parts of the API remain unchanged. (`#115 `__) This release contains contributions from: * Daniel Shapero Version 0.7.2 ------------- Released on: 2020/01/17 🚨 **Announcement:** 🚨 We now have a `JOSS paper about Pooch `__! Please :ref:`cite it ` when you use Pooch for your research. (`#116 `__ with reviews in `#132 `__ and `#134 `__) This is minor release which only updates the citation information to the new JOSS paper. No DOI was issued for this release since there are no code or documentation changes. Version 0.7.1 ------------- Released on: 2020/01/17 DOI: https://doi.org/10.5281/zenodo.3611376 Improvements: * Better error messages when hashes don't match. Include the file name in the exception for a hash mismatch between a downloaded file and the registry. Before, we included the name of temporary file, which wasn't very informative. (`#128 `__) * Better error message for malformed registry files. When loading a registry file, inform the name of the file and include the offending content in the error message instead of just the line number. (`#129 `__) Maintenance: * Change development status flag in ``setup.py`` to "stable" instead of "alpha". (`#127 `__) This release was reviewed at the `Journal of Open Source Software `__. The code and software paper contain contributions from: * Anderson Banihirwe * Martin Durant * Mark Harfouche * Hugo van Kemenade * John Leeman * Rémi Rampin * Daniel Shapero * Santiago Rubén Soler * Matthew Turk * Leonardo Uieda Version 0.7.0 ------------- Released on: 2019/11/19 DOI: https://doi.org/10.5281/zenodo.3547640 New features: * New ``pooch.FTPDownloader`` class for downloading files over FTP. Uses the standard library ``ftplib``. The appropriate downloader is automatically selected by ``pooch.Pooch.fetch`` based on the URL (for anonymous FTP only), so no configuration is required. If authentication is required, ``pooch.FTPDownloader`` provides the need support. Ported from `NCAR/aletheia-data `__ by the author. (`#118 `__) * Support for file-like objects to ``Pooch.load_registry`` (opened either in binary or text mode). (`#117 `__) Maintenance: * Testing and official support for Python 3.8. (`#113 `__) * 🚨 **Drop support for Python 2.7.** 🚨 Remove conditional dependencies and CI jobs. (`#100 `__) Documentation: * In the tutorial, use ``pkg_resources.resource_stream()`` from setuptools to load the ``registry.txt`` file. It's less error-prone than using ``os.path`` and ``__file__`` and allows the package to work from zip files. (`#120 `__) * Docstrings formatted to 79 characters (instead of 88) for better rendering in Jupyter notebooks and IPython. These displays are limited to 80 chars so the longer lines made the docstring unreadable. (`#123 `__) This release contains contributions from: * Anderson Banihirwe * Hugo van Kemenade * Remi Rampin * Leonardo Uieda Version 0.6.0 ------------- Released on: 2019/10/22 DOI: https://doi.org/10.5281/zenodo.3515031 🚨 **Pooch v0.6.0 is the last release to support Python 2.7** 🚨 New features: * Add optional download progress bar to ``pooch.HTTPDownloader`` (`#97 `__) Maintenance: * Warn that 0.6.0 is the last version to support Python 2.7 (`#108 `__) Documentation: * Update contact information to point to our Slack channel (`#107 `__) * Add icepack to list of projects using Pooch (`#98 `__) This release contains contributions from: * Daniel Shapero * Leonardo Uieda Version 0.5.2 ------------- Released on: 2019/06/24 Maintenance: * Add back support for Python 3.5 with continuous integration tests. No code changes were needed, only removing the restriction from ``setup.py``. (`#93 `__) This release contains contributions from: * Leonardo Uieda Version 0.5.1 ------------- Released on: 2019/05/21 Documentation fixes: * Fix formatting error in ``pooch.Decompress`` docstring. (`#81 `__) * Fix wrong imports in the usage guide for post-processing hooks. (`#84 `__) * Add section to the usage guide explaining when to use ``pooch.Decompress``. (`#85 `__) This release contains contributions from: * Santiago Soler * Leonardo Uieda Version 0.5.0 ------------- Released on: 2019/05/20 New features: * New processor ``pooch.Decompress`` saves a decompressed version of the downloaded file. Supports gzip, lzma/xz, and bzip2 compression. **Note**: Under Python 2.7, lzma and bzip2 require the ``backports.lzma`` and ``bz2file`` packages as well. These are soft dependencies and not required to use Pooch. See :ref:`install`. (`#78 `__) * New processor ``pooch.Untar`` unpacks files contained in a downloaded tar archive (with or without compression). (`#77 `__) This release contains contributions from: * Matthew Turk * Leonardo Uieda Version 0.4.0 ------------- Released on: 2019/05/01 New features: * Add customizable downloaders. Delegate file download into separate classes that can be passed to ``Pooch.fetch``. Created the ``HTTPDownloader`` class (used by default) which can also be used to download files that require authentication/login. (`#66 `__) * Add post-download processor hooks to ``Pooch.fetch``. Allows users to pass in a function that is executed right before returning and can overwrite the file path that is returned by ``fetch``. Use this, for example, to perform unpacking/decompression operations on larger files that can be time consuming and we only want to do once. (`#59 `__) * Add the ``Unzip`` post-download processor to extract files from a downloaded zip archive. Unpacks files into a directory in the local store and returns a list of all unzipped files. (`#72 `__) * Make the ``check_version`` function public. It's used internally but will be useful in examples that want to download things from the pooch repository. (`#69 `__) Maintenance: * Pin sphinx to version 1.8.5. New versions of Sphinx (2.0.*) are messing up the numpydoc style docstrings. (`#64 `__) This release contains contributions from: * Santiago Soler * Leonardo Uieda Version 0.3.1 ------------- Released on: 2019/03/28 Minor patches: * Add a project logo (`#57 `__) * Replace ``http`` with ``https`` in the ``README.rst`` to avoid mixed content warnings in some browsers (`#56 `__) Version 0.3.0 ------------- Released on: 2019/03/27 New features: * Use the ``appdirs`` library to get the cache directory. **Could change the default data location on all platforms**. Locations are compatible with the `XDG Base Directory Specification `__ (`#45 `__) * Add method ``Pooch.is_available`` to check remote file availability (`#50 `__) * Add ``Pooch.registry_files`` property to get a name of all files in the registry (`#42 `__) * Make ``Pooch.get_url`` a public method to get the download URL for a given file (`#55 `__) Maintenance: * **Drop support for Python 3.5**. Pooch now requires Python >= 3.6. (`#52 `__) * Add a private method to check if a file is in the registry (`#49 `__) * Fix typo in the ``Pooch.load_registry`` docstring (`#41 `__) This release contains contributions from: * Santiago Soler * Rémi Rampin * Leonardo Uieda Version 0.2.1 ------------- Released on: 2018/11/15 Bug fixes: * Fix unwanted ``~`` directory creation when not using a ``version`` in ``pooch.create`` (`#37 `__) Version 0.2.0 ------------- Released on: 2018/10/31 Bug fixes: * Avoid copying of files across the file system (`#33 `__) * Correctly delete temporary downloads on error (`#32 `__) New features: * Allow custom download URLs for individual files (`#30 `__) * Allow dataset versioning to be optional (`#29 `__) Maintenance: * Move URLs building to a dedicated method for easy subclassing (`#31 `__) * Add testing and support for Python 3.7 (`#25 `__) Version 0.1.1 ------------- Released on: 2018/08/30 Bug fixes: * Check if the local data folder is writable and warn the user instead of crashing (`#23 `__) Version 0.1 ----------- Released on: 2018/08/20 * Fist release of Pooch. Manages downloading sample data files over HTTP from a server and storing them in a local directory. Main features: - Download a file only if it's not in the local storage. - Check the SHA256 hash to make sure the file is not corrupted or needs updating. - If the hash is different from the registry, Pooch will download a new version of the file. - If the hash still doesn't match, Pooch will raise an exception warning of possible data corruption. pooch-1.8.2/doc/citing.rst000066400000000000000000000000521463036314300154060ustar00rootroot00000000000000.. _citing: .. include:: ../CITATION.rst pooch-1.8.2/doc/compatibility.rst000066400000000000000000000040061463036314300170050ustar00rootroot00000000000000.. _compatibility: Version compatibility ===================== Pooch backwards incompatible changes ------------------------------------ We try to retain backwards compatibility whenever possible. Major breaking changes to the Pooch API will be marked by a major release and deprecation warnings will be issued in previous releases to give developers ample time to adapt. If there are any backwards incompatible changes, they will be listed below: .. list-table:: :widths: 20 10 70 * - **Version introduced** - **Severity** - **Notes** * - v1.0.0 - Low - We replaced use of ``warning`` with the ``logging`` module for all messages issued by Pooch. This allows messages to be logged with different priorities and the user filter out log messages or silence Pooch entirely. **Users who relied on Pooch issuing warnings will need to update to capturing logs instead.** The vast majority of users are unaffected. .. _dependency-versions: Supported dependency versions ----------------------------- Pooch follows the recommendations in `NEP29 `__ for setting the minimum required version of our dependencies. In short, we support **all minor releases of our dependencies from the previous 24 months** before a Pooch release with a minimum of 2 minor releases. We follow this guidance conservatively and won't require newer versions if the older ones are still working without causing problems. Whenever support for a version is dropped, we will include a note in the :ref:`changes`. .. note:: This was introduced in Pooch v1.6.0. .. _python-versions: Supported Python versions ------------------------- If you require support for older Python versions, please pin Pooch to the following releases to ensure compatibility: .. list-table:: :widths: 40 60 * - **Python version** - **Last compatible Pooch release** * - 2.7 - 0.6.0 * - 3.5 - 1.2.0 * - 3.6 - 1.6.0 pooch-1.8.2/doc/conf.py000066400000000000000000000056561463036314300147150ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # import os import datetime import pooch # Project information # ----------------------------------------------------------------------------- project = "Pooch" copyright = f"{datetime.date.today().year}, The {project} Developers" if len(pooch.__version__.split(".")) > 3: version = "dev" else: version = pooch.__version__ # General configuration # ----------------------------------------------------------------------------- extensions = [ "sphinx.ext.autodoc", "sphinx.ext.autosummary", "sphinx.ext.coverage", "sphinx.ext.mathjax", "sphinx.ext.doctest", "sphinx.ext.viewcode", "sphinx.ext.extlinks", "sphinx.ext.intersphinx", "sphinx.ext.napoleon", "sphinx_design", ] # Configuration to include links to other project docs when referencing # functions/classes intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), "pandas": ("http://pandas.pydata.org/pandas-docs/stable/", None), "requests": ("https://requests.readthedocs.io/en/latest/", None), } # Autosummary pages will be generated by sphinx-autogen instead of sphinx-build autosummary_generate = [] # Otherwise, the Return parameter list looks different from the Parameters list napoleon_use_rtype = False # Always show the source code that generates a plot plot_include_source = True plot_formats = ["png"] # Sphinx project configuration templates_path = ["_templates"] exclude_patterns = ["_build", "**.ipynb_checkpoints"] source_suffix = ".rst" # The encoding of source files. source_encoding = "utf-8" master_doc = "index" pygments_style = "default" add_function_parentheses = False # HTML output configuration # ----------------------------------------------------------------------------- html_title = f'{project} {version}' html_short_title = project # Don't use the logo since it gets in the way of the project name and is # repeated in the front page. # html_logo = "_static/pooch-logo.png" html_favicon = "_static/favicon.png" html_last_updated_fmt = "%b %d, %Y" html_copy_source = True html_static_path = ["_static"] # CSS files are relative to the static path html_css_files = ["style.css"] html_extra_path = [] html_show_sourcelink = False html_show_sphinx = True html_show_copyright = True html_theme = "sphinx_book_theme" html_theme_options = { "repository_url": f"https://github.com/fatiando/{project.lower()}", "repository_branch": "main", "path_to_docs": "doc", "launch_buttons": { "binderhub_url": "https://mybinder.org", "notebook_interface": "jupyterlab", }, "use_edit_page_button": True, "use_issues_button": True, "use_repository_button": True, "use_download_button": True, "home_page_in_toc": True, } pooch-1.8.2/doc/decompressing.rst000066400000000000000000000034521463036314300170020ustar00rootroot00000000000000.. _decompressing: Decompressing ============= If you have a compressed file that is not an archive (zip or tar), you can use :class:`pooch.Decompress` to decompress it after download. For example, large binary files can be compressed with ``gzip`` to reduce download times but will need to be decompressed before loading, which can be slow. You can trade storage space for speed by keeping a decompressed copy of the file: .. code:: python from pooch import Decompress def fetch_compressed_file(): """ Load a large binary file that has been gzip compressed. """ # Pass in the processor to decompress the file on download fname = GOODBOY.fetch("large-binary-file.npy.gz", processor=Decompress()) # The file returned is the decompressed version which can be loaded by # numpy data = numpy.load(fname) return data :class:`pooch.Decompress` returns ``"large-binary-file.npy.gz.decomp"`` as the decompressed file name by default. You can change this behaviour by passing a file name instead: .. code:: python import os from pooch import Decompress def fetch_compressed_file(): """ Load a large binary file that has been gzip compressed. """ # Pass in the processor to decompress the file on download fname = GOODBOY.fetch("large-binary-file.npy.gz", processor=Decompress(name="a-different-file-name.npy"), ) # The file returned is now named "a-different-file-name.npy" data = numpy.load(fname) return data .. warning:: Passing in ``name`` can cause existing data to be lost! For example, if a file already exists with the specified name it will be overwritten with the new decompressed file content. **Use this option with caution.** pooch-1.8.2/doc/downloaders.rst000066400000000000000000000105241463036314300164570ustar00rootroot00000000000000.. _downloaders: Downloaders: Customizing the download ===================================== By default, :meth:`pooch.Pooch.fetch` and :meth:`pooch.retrieve` will detect the download protocol from the given URL (HTTP, FTP, SFTP, DOI) and use the appropriate download method. Sometimes this is not enough: some servers require logins, redirections, or other non-standard operations. To get around this, use the ``downloader`` argument of :meth:`~pooch.Pooch.fetch` and :meth:`~pooch.retrieve`. Downloaders are Python *callable objects* (like functions or classes with a ``__call__`` method) and must have the following format: .. code:: python def mydownloader(url, output_file, pooch): ''' Download a file from the given URL to the given local file. The function **must** take the following arguments (in order). Parameters ---------- url : str The URL to the file you want to download. output_file : str or file-like object Path (and file name) to which the file will be downloaded. pooch : pooch.Pooch The instance of the Pooch class that is calling this function. No return value is required. ''' ... Pooch provides downloaders for HTTP, FTP, and SFTP that support authentication and optionally printing progress bars. See :ref:`api` for a list of available downloaders. Common uses of downloaders include: * Passing :ref:`login credentials ` to HTTP and FTP servers * Printing :ref:`progress bars ` Creating your own downloaders ----------------------------- If your use case is not covered by our downloaders, you can implement your own. :meth:`pooch.Pooch.fetch` and :func:`pooch.retrieve` will accept any *callable obejct* that has the signature specified above. As an example, consider the case in which the login credentials need to be provided to a site that is redirected from the original download URL: .. code:: python import requests def redirect_downloader(url, output_file, pooch): """ Download after following a redirection. """ # Get the credentials from the user's environment username = os.environ.get("SOMESITE_USERNAME") password = os.environ.get("SOMESITE_PASSWORD") # Make a request that will redirect to the login page login = requests.get(url) # Provide the credentials and download from the new URL download = HTTPDownloader(auth=(username, password)) download(login.url, output_file, mypooch) def fetch_protected_data(): """ Fetch a file from a server that requires authentication """ fname = GOODBOY.fetch("some-data.csv", downloader=redirect_downloader) data = pandas.read_csv(fname) return data Availability checks ------------------- **Optionally**, downloaders can take a ``check_only`` keyword argument (default to ``False``) that makes them only check if a given file is available for download **without** downloading the file. This makes a downloader compatible with :meth:`pooch.Pooch.is_available`. In this case, the downloader should return a boolean: .. code:: python def mydownloader(url, output_file, pooch, check_only=False): ''' Download a file from the given URL to the given local file. The function **must** take the following arguments (in order). Parameters ---------- url : str The URL to the file you want to download. output_file : str or file-like object Path (and file name) to which the file will be downloaded. pooch : pooch.Pooch The instance of the Pooch class that is calling this function. check_only : bool If True, will only check if a file exists on the server and **without downloading the file**. Will return ``True`` if the file exists and ``False`` otherwise. Returns ------- None or availability If ``check_only==True``, returns a boolean indicating if the file is available on the server. Otherwise, returns ``None``. ''' ... If a downloader does not implement an availability check (i.e., doesn't take ``check_only`` as a keyword argument), then :meth:`pooch.Pooch.is_available` will raise a ``NotImplementedError``. pooch-1.8.2/doc/hashes.rst000066400000000000000000000077511463036314300154210ustar00rootroot00000000000000.. _hashes: Hashes: Calculating and bypassing ================================= Pooch uses hashes to check if files are up-to-date or possibly corrupted: * If a file exists in the local folder, Pooch will check that its hash matches the one in the registry. If it doesn't, we'll assume that it needs to be updated. * If a file needs to be updated or doesn't exist, Pooch will download it from the remote source and check the hash. If the hash doesn't match, an exception is raised to warn of possible file corruption. * Cryptographic hashes may be used where users wish to ensure the security of their download. Calculating hashes ------------------ You can generate hashes for your data files using ``openssl`` in the terminal: .. code:: bash $ openssl sha256 data/c137.csv SHA256(data/c137.csv)= baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d Or using the :func:`pooch.file_hash` function (which is a convenient way of calling Python's :mod:`hashlib`): .. code:: python import pooch print(pooch.file_hash("data/c137.csv")) Specifying the hash algorithm ----------------------------- By default, Pooch uses `SHA256 `__ hashes. Other hash methods that are available in :mod:`hashlib` can also be used: .. code:: python import pooch print(pooch.file_hash("data/c137.csv", alg="sha512")) In this case, you can specify the hash algorithm in the **registry** by prepending it to the hash, for example ``"md5:0hljc7298ndo2"`` or ``"sha512:803o3uh2pecb2p3829d1bwouh9d"``. Pooch will understand this and use the appropriate method. Bypassing the hash check ------------------------ Sometimes we might not know the hash of the file or it could change on the server periodically. To bypass the check, we can set the hash value to ``None`` when specifying the ``registry`` argument for :func:`pooch.create` (or the ``known_hash`` in :func:`pooch.retrieve`). In this example, we want to use Pooch to download a list of weather stations around Australia: * The file with the stations is in an FTP server and we want to store it locally in separate folders for each day that the code is run. * The problem is that the ``stations.zip`` file is updated on the server instead of creating a new one, so the hash check would fail. This is how you can solve this problem: .. code:: python import datetime import pooch # Get the current data to store the files in separate folders CURRENT_DATE = datetime.datetime.now().date() GOODBOY = pooch.create( path=pooch.os_cache("bom_daily_stations") / CURRENT_DATE, base_url="ftp://ftp.bom.gov.au/anon2/home/ncc/metadata/sitelists/", registry={ "stations.zip": None, }, ) When running this same code again at a different date, the file will be downloaded again because the local cache folder changed and the file is no longer present in it. If you omit ``CURRENT_DATE`` from the cache path, then Pooch will only fetch the files once, unless they are deleted from the cache. .. attention:: If this script is run over a period of time, your cache directory will increase in size, as the files are stored in daily subdirectories. .. _hashes-other: Other supported hashes ---------------------- Beyond hashing algorithms supported by ``hashlib``, Pooch supports algorithms provided by the `xxhash package `__. If the ``xxhash`` package is available, users may specify to use one of the algorithms provided by the package. .. code:: bash $ xxh128sum data/store.zip 6a71973c93eac6c8839ce751ce10ae48 data/store.zip $ # ^^^^^^^^^^^^^^^^^^^ The hash ^^^^^^^^^^^^^^ The filename .. code:: python import datetime import pooch # Get the current data to store the files in separate folders CURRENT_DATE = datetime.datetime.now().date() GOODBOY = pooch.create( [...], registry={ "store.zip": "xxh128:6a71973c93eac6c8839ce751ce10ae48", }, ) pooch-1.8.2/doc/index.rst000066400000000000000000000077061463036314300152550ustar00rootroot00000000000000.. title:: Home .. raw:: html

Pooch

A friend to fetch your data files

.. raw:: html

Just want to download a file without messing with requests and urllib? Trying to add sample datasets to your Python package? Pooch is here to help!

*Pooch* is a **Python library** that can manage data by **downloading files** from a server (only when needed) and storing them locally in a data **cache** (a folder on your computer). * Pure Python and minimal dependencies. * Download files over HTTP, FTP, and from data repositories like Zenodo and figshare. * Built-in post-processors to unzip/decompress the data after download. * Designed to be extended: create custom downloaders and post-processors. Are you a **scientist** or researcher? Pooch can help you too! * Host your data on a repository and download using the DOI. * Automatically download data using code instead of telling colleagues to do it themselves. * Make sure everyone running the code has the same version of the data files. ---- .. grid:: 1 2 1 2 :margin: 5 5 0 0 :padding: 0 0 0 0 :gutter: 4 .. grid-item-card:: :octicon:`info` Getting started :text-align: center :class-title: sd-fs-5 :class-card: sd-p-3 New to Pooch? Start here! .. button-ref:: about :ref-type: ref :click-parent: :color: primary :outline: :expand: .. grid-item-card:: :octicon:`comment-discussion` Need help? :text-align: center :class-title: sd-fs-5 :class-card: sd-p-3 Ask on our community channels. .. button-link:: https://www.fatiando.org/contact :click-parent: :color: primary :outline: :expand: Join the conversation :octicon:`link-external` .. grid-item-card:: :octicon:`file-badge` Reference documentation :text-align: center :class-title: sd-fs-5 :class-card: sd-p-3 A list of modules and functions. .. button-ref:: api :ref-type: ref :color: primary :outline: :expand: .. grid-item-card:: :octicon:`bookmark` Using Pooch for research? :text-align: center :class-title: sd-fs-5 :class-card: sd-p-3 Citations help support our work! .. button-ref:: citing :ref-type: ref :color: primary :outline: :expand: ---- .. seealso:: Pooch is a part of the `Fatiando a Terra `_ project. .. toctree:: :caption: Getting Started :hidden: :maxdepth: 1 about.rst install.rst retrieve.rst multiple-files.rst sample-data.rst .. toctree:: :caption: Training your Pooch :hidden: :maxdepth: 1 hashes.rst user-defined-cache.rst registry-files.rst multiple-urls.rst protocols.rst logging.rst downloaders.rst processors.rst authentication.rst progressbars.rst unpacking.rst decompressing.rst .. toctree:: :caption: Reference :hidden: :maxdepth: 1 api/index.rst compatibility.rst citing.rst changes.rst versions.rst .. toctree:: :caption: Community :hidden: Join the community Code of Conduct How to contribute Source code on GitHub Authors Fatiando a Terra pooch-1.8.2/doc/install.rst000066400000000000000000000035721463036314300156110ustar00rootroot00000000000000.. _install: Installing ========== There are different ways to install Pooch: .. tab-set:: .. tab-item:: pip Using the `pip `__ package manager: .. code:: bash python -m pip install pooch .. tab-item:: conda/mamba Using the `conda `__ or mamba package manager that comes with the Anaconda/Miniconda/Miniforge distributions: .. code:: bash conda install pooch --channel conda-forge or .. code:: bash mamba install pooch --channel conda-forge .. tab-item:: Development version Using ``pip`` to install the latest **unreleased** version from GitHub (**not recommended** in most situations): .. code:: bash python -m pip install --upgrade git+https://github.com/fatiando/pooch .. note:: The commands above should be executed in a terminal. On Windows, use the ``cmd.exe`` or the "Anaconda Prompt" app if you're using Anaconda. Which Python? ------------- You'll need **Python >= 3.7**. See :ref:`python-versions` if you require support for older versions. .. _dependencies: Dependencies ------------ The required dependencies should be installed automatically when you install Pooch using ``conda`` or ``pip``. Optional dependencies have to be installed manually. Required: * `platformdirs `__ * `packaging `__ * `requests `__ Optional: * `tqdm `__: For printing a download progress bar. See :ref:`progressbars`. * `paramiko `__: For SFTP downloads. See :class:`pooch.SFTPDownloader`. * `xxhash `__: For the faster xxHash algorithms. See :ref:`hashes-other`. pooch-1.8.2/doc/logging.rst000066400000000000000000000015551463036314300155700ustar00rootroot00000000000000.. _logging: Logging and verbosity ===================== Pooch uses the :mod:`logging` module to print messages about downloads and :ref:`processor ` execution. Adjusting the logging level --------------------------- Pooch will log events like downloading a new file, updating an existing one, or unpacking an archive by printing to the terminal. You can change how verbose these events are by getting the event logger from pooch and changing the logging level: .. code:: python logger = pooch.get_logger() logger.setLevel("WARNING") Most of the events from Pooch are logged at the info level; this code says that you only care about warnings or errors, like inability to create the data cache. The event logger is a :class:`logging.Logger` object, so you can use that class's methods to handle logging events in more sophisticated ways if you wish. pooch-1.8.2/doc/multiple-files.rst000066400000000000000000000075321463036314300170760ustar00rootroot00000000000000.. _beginner: Fetching files from a registry ============================== If you need to manage the download of multiple files from one or more locations, then this section is for you! Setup ----- In the following example we'll assume that: 1. You have several data files served from the same base URL (for example, ``"https://www.somewebpage.org/science/data"``). 2. You know the file names and their `hashes `__. We will use :func:`pooch.create` to set up our download manager: .. code:: python import pooch odie = pooch.create( # Use the default cache folder for the operating system path=pooch.os_cache("my-project"), base_url="https://www.somewebpage.org/science/data/", # The registry specifies the files that can be fetched registry={ "temperature.csv": "sha256:19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc", "gravity-disturbance.nc": "sha256:1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w", }, ) The return value (``odie``) is an instance of :class:`pooch.Pooch`. It contains all of the information needed to fetch the data files in our **registry** and store them in the specified cache folder. .. note:: The Pooch **registry** is a mapping of file names and their associated hashes (and optionally download URLs). .. tip:: If you don't know the hash or are otherwise unable to obtain it, it is possible to bypass the check. This is **not recommended** for general use, only if it can't be avoided. See :ref:`hashes`. .. attention:: You can have data files in **subdirectories** of the remote data store (URL). These files will be saved to the same subdirectories in the local storage folder. However, the names of these files in the registry **must use Unix-style separators** (``'/'``) **even on Windows**. Pooch will handle the appropriate conversions. Downloading files ----------------- To download one our data files and load it with `xarray `__: .. code:: python import xarray as xr file_path = odie.fetch("gravity-disturbance.nc") # Standard use of xarray to load a netCDF file (.nc) data = xr.open_dataset(file_path) The call to :meth:`pooch.Pooch.fetch` will check if the file already exists in the cache folder. If it doesn't: 1. The file is downloaded and saved to the cache folder. 2. The hash of the downloaded file is compared against the one stored in the registry to make sure the file isn't corrupted. 3. The function returns the absolute path to the file on your computer. If it does: 1. Check if it's hash matches the one in the registry. 2. If it does, no download happens and the file path is returned. 3. If it doesn't, the file is downloaded once more to get an updated version on your computer. Why use this method? -------------------- With :class:`pooch.Pooch`, you can centralize the information about the URLs, hashes, and files in a single place. Once the instance is created, it can be used to fetch individual files without repeating the URL and hash everywhere. A good way to use this is to place the call to :func:`pooch.create` in Python module (a ``.py`` file). Then you can ``import`` the module in ``.py`` scripts or Jupyter notebooks and use the instance to fetch your data. This way, you don't need to define the URLs or hashes in multiple scripts/notebooks. Customizing the download ------------------------ The :meth:`pooch.Pooch.fetch` method supports for all of Pooch's :ref:`downloaders ` and :ref:`processors `. You can use HTTP, FTP, and SFTP (even with :ref:`authentication `), :ref:`decompress files `, :ref:`unpack archives `, show :ref:`progress bars `, and more with a bit of configuration. pooch-1.8.2/doc/multiple-urls.rst000066400000000000000000000057001463036314300167540ustar00rootroot00000000000000.. _multipleurls: Multiple download URLs ====================== You can set different download URLs for individual files with the ``urls`` argument of :func:`pooch.create`. It should be a dictionary with the file names as keys and the URLs for downloading the files as values. For example, say we have a ``citadel.csv`` file that we want to download from ``https://www.some-data-hosting-site.com`` instead: .. code:: python # The basic setup is the same POOCH = pooch.create( path=pooch.os_cache("plumbus"), base_url="https://github.com/rick/plumbus/raw/{version}/data/", version=version, version_dev="main", registry={ "c137.csv": "19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc", "cronen.csv": "1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w", # Still include the file in the registry "citadel.csv": "893yprofwjndcwhx9c0ehp3ue9gcwoscjwdfgh923e0hwhcwiyc", }, # Now specify custom URLs for some of the files in the registry. urls={ "citadel.csv": "https://www.some-data-hosting-site.com/files/citadel.csv", }, ) When ``POOCH.fetch("citadel.csv")`` is called, the download will by from the specified URL instead of the ``base_url``. The file name will not be appended automatically to the URL in case you want to change the file name in local storage. .. attention:: **Versioning of custom URLs is not supported** since they are assumed to be data files independent of your project. The file will **still be placed in a versioned cache folder**. .. tip:: Custom URLs can be used along side ``base_url`` or you can omit ``base_url`` entirely by setting it to an empty string (``base_url=""``). **Doing so requires setting a custom URL for every file in the registry**. Usage with registry files ------------------------- You can also include custom URLs in a :ref:`registry file ` by adding the URL for a file to end of the line (separated by a space): .. code-block:: none c137.csv 19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc cronen.csv 1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w citadel.csv 893yprofwjndcwhx9c0ehp3ue9gcwoscjwdfgh923e0hwhcwiyc https://www.some-data-hosting-site.com/files/citadel.csv :meth:`pooch.Pooch.load_registry` will automatically populate the ``urls`` attribute. This way, custom URLs don't need to be set in the code. In fact, the module code doesn't change at all: .. code:: python # Define the Pooch exactly the same (urls is None by default) POOCH = pooch.create( path=pooch.os_cache("plumbus"), base_url="https://github.com/rick/plumbus/raw/{version}/data/", version=version, version_dev="main", registry=None, ) # If custom URLs are present in the registry file, they will be set # automatically. POOCH.load_registry(os.path.join(os.path.dirname(__file__), "registry.txt")) pooch-1.8.2/doc/processors.rst000066400000000000000000000121741463036314300163430ustar00rootroot00000000000000.. _processors: Processors: Post-download actions ================================= Post-download actions sometimes need to be taken on downloaded files (unzipping, conversion to a more efficient format, etc). If these actions are time or memory consuming, it might be worth doing them only once after the file is downloaded. This is a way of trading disk space for computation time. :meth:`pooch.Pooch.fetch` and :func:`pooch.retrieve` accept the ``processor`` argument to handle these situations. Processors are Python *callable objects* (like functions or classes with a ``__call__`` method) that are executed after a file is downloaded to perform these actions. They must have the following format: .. code:: python def myprocessor(fname, action, pooch): ''' Processes the downloaded file and returns a new file name. The function **must** take as arguments (in order): fname : str The full path of the file in the local data storage action : str Either: "download" (file doesn't exist and will be downloaded), "update" (file is outdated and will be downloaded), or "fetch" (file exists and is updated so no download is necessary). pooch : pooch.Pooch The instance of the Pooch class that is calling this function. The return value can be anything but is usually a full path to a file (or list of files). This is what will be returned by Pooch.fetch and pooch.retrieve in place of the original file path. ''' ... return full_path The processor is executed after a file downloaded attempted (whether the download actually happens or not) and before returning the path to the downloaded file. The processor lets us intercept the returned path, perform actions, and possibly return a different path. Pooch provides built-in processors for common tasks, like decompressing files and unpacking tar and zip archives. See the :ref:`api` for a full list. Common uses cases for processors include: * :ref:`Unpacking archives ` to load individual members * :ref:`Decompressing ` files Creating your own processors ---------------------------- Let's say we want to implement the :class:`pooch.Unzip` processor ourselves to extract a single file from the archive. We could do that with the following function: .. code:: python import os from zipfile import ZipFile def unpack(fname, action, pup): """ Post-processing hook to unzip a file and return the unzipped file name. Parameters ---------- fname : str Full path of the zipped file in local storage action : str One of "download" (file doesn't exist and will download), "update" (file is outdated and will download), and "fetch" (file exists and is updated so no download). pup : Pooch The instance of Pooch that called the processor function. Returns ------- fname : str The full path to the unzipped file. (Return the same fname is your processor doesn't modify the file). """ # Create a new name for the unzipped file. Appending something to the # name is a relatively safe way of making sure there are no clashes # with other files in the registry. unzipped = fname + ".unzipped" # Don't unzip if file already exists and is not being downloaded if action in ("update", "download") or not os.path.exists(unzipped): with ZipFile(fname, "r") as zip_file: # Extract the data file from within the archive with zip_file.open("actual-data-file.txt") as data_file: # Save it to our desired file name with open(unzipped, "wb") as output: output.write(data_file.read()) # Return the path of the unzipped file return unzipped def fetch_zipped_file(): """ Load a large zipped sample data as a pandas.DataFrame. """ # Pass in the processor to unzip the data file fname = GOODBOY.fetch("zipped-data-file.zip", processor=unpack) # fname is now the path of the unzipped file which can be loaded by # pandas directly data = pandas.read_csv(fname) return data Similarly, you could build any custom processor function so long as it receives the ``fname, action, pup`` arguments. Example use cases for this would be: * Converting data from a download-friendly format (compressed and minimal file size) to a more user friendly format (easy to open and fast to load into memory). * Add missing metadata to data from public servers. You might be using public data that has known issues (poorly formated entries, missing metadata, etc) which can be fixed when the file is downloaded. The main advantage to using a processor for these actions is that they are performed only when the file is downloaded. A modified version of the file can be kept on disk so that loading the file is easier. This is particularly convenient if the processor task takes a long time to run. pooch-1.8.2/doc/progressbars.rst000066400000000000000000000063761463036314300166640ustar00rootroot00000000000000.. _progressbars: Printing progress bars ====================== .. _tqdm-progressbar: Using ``tqdm`` progress bars ---------------------------- Pooch uses `tqdm `__ to print a download progress bar. This is turned off by default but can be enabled using ``progressbar=True`` in :func:`pooch.retrieve`: .. code:: python fname = retrieve( url="https://some-data-server.org/a-data-file.nc", known_hash="md5:70e2afd3fd7e336ae478b1e740a5f08e", progressbar=True, ) The resulting progress bar will be printed to the standard error stream (STDERR) and should look something like this: .. code:: 100%|█████████████████████████████████████████| 336/336 [...] You can also do the same with :meth:`pooch.Pooch.fetch`: .. code:: python POOCH = pooch.create( ... ) fname = POOCH.fetch( "large-data-file.h5", progressbar=True, ) Alternatively, you can pass ``progressbar=True`` directly into one of our :ref:`downloaders `: .. code:: python # Using fetch fname = POOCH.fetch( "large-data-file.h5", downloader=pooch.HTTPDownloader(progressbar=True), ) # Using retrieve fname = retrieve( url="https://some-data-server.org/a-data-file.nc", known_hash="md5:70e2afd3fd7e336ae478b1e740a5f08e", downloader=pooch.HTTPDownloader(progressbar=True), ) .. note:: ``tqdm`` is not installed by default with Pooch. You will have to install it separately in order to use this feature. .. _custom-progressbar: Using custom progress bars -------------------------- .. note:: At the moment, this feature is only available for :class:`pooch.HTTPDownloader`. Alternatively, you can pass an arbitrary object that behaves like a progress that implements the ``update``, ``reset``, and ``close`` methods: * ``update`` should accept a single integer positional argument representing the current completion (in bytes). * ``reset`` and ``close`` do not take any argument beside ``self``. The object must also have a ``total`` attribute that can be set from outside the class. In other words, the custom progress bar needs to behave like a ``tqdm`` progress bar. Here's a minimal working example of such a custom "progress display" class: .. code:: python import sys class MinimalProgressDisplay: def __init__(self, total): self.count = 0 self.total = total def __repr__(self): return str(self.count) + "/" + str(self.total) def render(self): print(f"\r{self}", file=sys.stderr, end="") def update(self, i): self.count = i self.render() def reset(self): self.count = 0 def close(self): print("", file=sys.stderr) An instance of this class can now be passed to an ``HTTPDownloader`` as: .. code:: python # Assuming you have a pooch.Pooch instance setup POOCH = pooch.create( ... ) minimal_progress = MinimalProgressDisplay(total=None) fname = POOCH.fetch( "large-data-file.h5", downloader=pooch.HTTPDownloader(progressbar=minimal_progress), ) pooch-1.8.2/doc/protocols.rst000066400000000000000000000076321463036314300161700ustar00rootroot00000000000000.. _protocols: Download protocols ================== Pooch supports the HTTP, FTP, and SFTP protocols by default. It also includes a custom protocol for Digital Object Identifiers (DOI) from providers like `figshare `__ and `Zenodo `__ (see :ref:`below `). It will **automatically detect** the correct protocol from the URL and use the appropriate download method. .. note:: To download files over SFTP, `paramiko `__ needs to be installed. For example, if our data were hosted on an FTP server, we could use the following setup: .. code:: python POOCH = pooch.create( path=pooch.os_cache("plumbus"), # Use an FTP server instead of HTTP. The rest is all the same. base_url="ftp://garage-basement.org/{version}/", version=version, version_dev="main", registry={ "c137.csv": "19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc", "cronen.csv": "1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w", }, ) def fetch_c137(): """ Load the C-137 sample data as a pandas.DataFrame (over FTP this time). """ fname = POOCH.fetch("c137.csv") data = pandas.read_csv(fname) return data You can even specify custom functions for the download or login credentials for **authentication**. See :ref:`downloaders` for more information. .. _doidownloads: Digital Object Identifiers (DOIs) --------------------------------- Pooch can download files stored in data repositories from the DOI by formatting the URL as ``doi:{DOI}/{file name}``. Notice that there are no ``//`` like in HTTP/FTP and you must specify a file name after the DOI (separated by a ``/``). .. seealso:: For a list of supported data repositories, see :class:`pooch.DOIDownloader`. For example, one of our test files (``"tiny-data.txt"``) is stored in the figshare dataset doi:`10.6084/m9.figshare.14763051.v1 `__. We can could use :func:`pooch.retrieve` to download it like so: .. code-block:: python file_path = pooch.retrieve( url="doi:10.6084/m9.figshare.14763051.v1/tiny-data.txt", known_hash="md5:70e2afd3fd7e336ae478b1e740a5f08e", ) We can also make a :class:`pooch.Pooch` with a registry stored entirely on a figshare dataset: .. code-block:: python POOCH = pooch.create( path=pooch.os_cache("plumbus"), # Use the figshare DOI base_url="doi:10.6084/m9.figshare.14763051.v1/", registry={ "tiny-data.txt": "md5:70e2afd3fd7e336ae478b1e740a5f08e", "store.zip": "md5:7008231125631739b64720d1526619ae", }, ) def fetch_tiny_data(): """ Load the tiny data as a numpy array. """ fname = POOCH.fetch("tiny-data.txt") data = numpy.loadtxt(fname) return data .. warning:: A figshare DOI must point to a figshare *dataset*, not a figshare *collection*. Collection DOIs have a ``.c.`` in them, e.g. ``doi:10.6084/m9.figshare.c.4362224.v1``. Attempting to download files from a figshare collection will raise an error. See `issue #274 `__ details. Since this type of repositories store information about the files contained in them, we can avoid having to manually type the registry with the file names and their hashes. Instead, we can use the :meth:`pooch.Pooch.load_registry_from_doi` to automatically populate the registry: .. code-block:: python POOCH = pooch.create( path=pooch.os_cache("plumbus"), # Use the figshare DOI base_url="doi:10.6084/m9.figshare.14763051.v1/", registry=None, ) # Automatically populate the registry POOCH.load_registry_from_doi() # Fetch one of the files in the repository fname = POOCH.fetch("tiny-data.txt") pooch-1.8.2/doc/registry-files.rst000066400000000000000000000133741463036314300171140ustar00rootroot00000000000000.. _registryfiles: Registry files ============== Usage ----- If your project has a large number of data files, it can be tedious to list them in a dictionary. In these cases, it's better to store the file names and hashes in a file and use :meth:`pooch.Pooch.load_registry` to read them. .. code:: python import os import pkg_resources POOCH = pooch.create( path=pooch.os_cache("plumbus"), base_url="https://github.com/rick/plumbus/raw/{version}/data/", version=version, version_dev="main", # We'll load it from a file later registry=None, ) # Get registry file from package_data registry_file = pkg_resources.resource_stream("plumbus", "registry.txt") # Load this registry file POOCH.load_registry(registry_file) In this case, the ``registry.txt`` file is in the ``plumbus/`` package directory and should be shipped with the package (see below for instructions). We use `pkg_resources `__ to access the ``registry.txt``, giving it the name of our Python package. Registry file format -------------------- Registry files are light-weight text files that specify a file's name and hash. In our example, the contents of ``registry.txt`` are: .. code-block:: none c137.csv 19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc cronen.csv 1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w A specific hashing algorithm can be enforced, if a checksum for a file is prefixed with ``alg:``: .. code-block:: none c137.csv sha1:e32b18dab23935bc091c353b308f724f18edcb5e cronen.csv md5:b53c08d3570b82665784cedde591a8b0 From Pooch v1.2.0 the registry file can also contain line comments, prepended with a ``#``: .. code-block:: none # C-137 sample data c137.csv 19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc # Cronenberg sample data cronen.csv 1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w .. attention:: Make sure you set the Pooch version in your ``setup.py`` to >=1.2.0 when using comments as earlier versions cannot handle them: ``install_requires = [..., "pooch>=1.2.0", ...]`` Packaging registry files ------------------------ To make sure the registry file is shipped with your package, include the following in your ``MANIFEST.in`` file: .. code-block:: none include plumbus/registry.txt And the following entry in the ``setup`` function of your ``setup.py`` file: .. code:: python setup( ... package_data={"plumbus": ["registry.txt"]}, ... ) Creating a registry file ------------------------ If you have many data files, creating the registry and keeping it updated can be a challenge. Function :func:`pooch.make_registry` will create a registry file with all contents of a directory. For example, we can generate the registry file for our fictitious project from the command-line: .. code:: bash $ python -c "import pooch; pooch.make_registry('data', 'plumbus/registry.txt')" Create registry file from remote files -------------------------------------- If you want to create a registry file for a large number of data files that are available for download but you don't have their hashes or any local copies, you must download them first. Manually downloading each file can be tedious. However, we can automate the process using :func:`pooch.retrieve`. Below, we'll explore two different scenarios. If the data files share the same base url, we can use :func:`pooch.retrieve` to download them and then use :func:`pooch.make_registry` to create the registry: .. code:: python import os # Names of the data files filenames = ["c137.csv", "cronen.csv", "citadel.csv"] # Base url from which the data files can be downloaded from base_url = "https://www.some-data-hosting-site.com/files/" # Create a new directory where all files will be downloaded directory = "data_files" os.makedirs(directory) # Download each data file to data_files for fname in filenames: path = pooch.retrieve( url=base_url + fname, known_hash=None, fname=fname, path=directory ) # Create the registry file from the downloaded data files pooch.make_registry("data_files", "registry.txt") If each data file has its own url, the registry file can be manually created after downloading each data file through :func:`pooch.retrieve`: .. code:: python import os # Names and urls of the data files. The file names are used for naming the # downloaded files. These are the names that will be included in the registry. fnames_and_urls = { "c137.csv": "https://www.some-data-hosting-site.com/c137/data.csv", "cronen.csv": "https://www.some-data-hosting-site.com/cronen/data.csv", "citadel.csv": "https://www.some-data-hosting-site.com/citadel/data.csv", } # Create a new directory where all files will be downloaded directory = "data_files" os.makedirs(directory) # Create a new registry file with open("registry.txt", "w") as registry: for fname, url in fnames_and_urls.items(): # Download each data file to the specified directory path = pooch.retrieve( url=url, known_hash=None, fname=fname, path=directory ) # Add the name, hash, and url of the file to the new registry file registry.write( f"{fname} {pooch.file_hash(path)} {url}\n" ) .. warning:: Notice that there are **no checks for download integrity** (since we don't know the file hashes before hand). Only do this for trusted data sources and over a secure connection. If you have access to file hashes/checksums, **we highly recommend using them** to set the ``known_hash`` argument. pooch-1.8.2/doc/retrieve.rst000066400000000000000000000064741463036314300157740ustar00rootroot00000000000000.. _retrieve: Retrieving a single data file ============================= Basic usage ----------- If you only want to download one or two data files, use the :func:`pooch.retrieve` function: .. code-block:: python import pooch file_path = pooch.retrieve( # URL to one of Pooch's test files url="https://github.com/fatiando/pooch/raw/v1.0.0/data/tiny-data.txt", known_hash="md5:70e2afd3fd7e336ae478b1e740a5f08e", ) The code above will: 1. Check if the file from this URL already exists in Pooch's default cache folder (see :func:`pooch.os_cache`). 2. If it doesn't, the file is downloaded and saved to the cache folder. 3. The MD5 `hash `__ is compared against the ``known_hash`` to make sure the file isn't corrupted. 4. The function returns the absolute path to the file on your computer. If the file already existed on your machine, Pooch will check if it's MD5 hash matches the ``known_hash``: * If it does, no download happens and the file path is returned. * If it doesn't, the file is downloaded once more to get an updated version on your computer. Since the download happens only once, you can place this function call at the start of your script or Jupyter notebook without having to worry about repeat downloads. Anyone getting a copy of your code should also get the correct data file the first time they run it. .. seealso:: Pooch can handle multiple download protocols like HTTP, FTP, SFTP, and even download from repositories like `figshare `__ and `Zenodo `__ by using the DOI instead of a URL. See :ref:`protocols`. .. seealso:: You can use **different hashes** by specifying different algorithm names: ``sha256:XXXXXX``, ``sha1:XXXXXX``, etc. See :ref:`hashes`. Unknown file hash ----------------- If you don't know the hash of the file, you can set ``known_hash=None`` to bypass the check. :func:`~pooch.retrieve` will print a log message with the SHA256 hash of the downloaded file. **It's highly recommended that you copy and paste this hash into your code and use it as the** ``known_hash``. .. tip:: Setting the ``known_hash`` guarantees that the next time your code is run (by you or someone else) the exact same file is downloaded. This helps make the results of your code **reproducible**. Customizing the download ------------------------ The :func:`pooch.retrieve` function supports for all of Pooch's :ref:`downloaders ` and :ref:`processors `. You can use HTTP, FTP, and SFTP (even with :ref:`authentication `), :ref:`decompress files `, :ref:`unpack archives `, show :ref:`progress bars `, and more with a bit of configuration. When not to use ``retrieve`` ---------------------------- If you need to manage the download and caching of several files from one or more sources, then you should start using the full capabilities of the :class:`pooch.Pooch` class. It can handle sandboxing data for different package versions, allow users to set the download locations, and more. The classic example is a **Python package that contains several sample datasets** for use in testing and documentation. See :ref:`beginner` and :ref:`intermediate` to get started. pooch-1.8.2/doc/sample-data.rst000066400000000000000000000165011463036314300163270ustar00rootroot00000000000000.. _intermediate: Manage a package's sample data ============================== In this section, we'll use Pooch to manage the download of a Python package's sample datasets. .. note:: The setup will be very similar to what we saw in :ref:`beginner`. It may be helpful to read that first. The problem ----------- In this example, we'll work with the follow assumptions: * You develop a Python library called ``plumbus`` for analysing data emitted by interdimensional portals. * You want to distribute sample data so that your users can easily try out the library by copying and pasting from the documentation. * You want to have a ``plumbus.datasets`` module that defines functions like ``fetch_c137()`` that will return the data loaded as a :class:`pandas.DataFrame` for convenient access. * Your sample data are in a folder of your GitHub repository but you don't want to include the data files with your source and wheel distributions because of their size. * You use git tags to mark releases of your project. * Your project has a variable that defines the version string. * The version string contains an indicator that the current commit is not a release (like ``'v1.2.3+12.d908jdl'`` or ``'v0.1+dev'``). For now, let's say that this is the layout of your repository on GitHub: .. code-block:: none doc/ ... data/ README.md c137.csv cronen.csv plumbus/ __init__.py ... datasets.py setup.py ... The sample data are stored in the ``data`` folder of your repository. .. seealso:: Pooch can handle different use cases as well, like: FTP/SFTP, authenticated HTTP, multiple URLs, decompressing and unpacking archives, etc. See the tutorials under "Training your Pooch" and the documentation for :func:`pooch.create` and :func:`pooch.Pooch` for more options. Basic setup ----------- This is what the ``plumbus/datasets.py`` file would look like: .. code:: python """ Load sample data. """ import pandas import pooch from . import version # The version string of your project BRIAN = pooch.create( # Use the default cache folder for the operating system path=pooch.os_cache("plumbus"), # The remote data is on Github base_url="https://github.com/rick/plumbus/raw/{version}/data/", version=version, # If this is a development version, get the data from the "main" branch version_dev="main", registry={ "c137.csv": "sha256:19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc", "cronen.csv": "sha256:1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w", }, ) def fetch_c137(): """ Load the C-137 sample data as a pandas.DataFrame. """ # The file will be downloaded automatically the first time this is run # returns the file path to the downloaded file. Afterwards, Pooch finds # it in the local cache and doesn't repeat the download. fname = BRIAN.fetch("c137.csv") # The "fetch" method returns the full path to the downloaded data file. # All we need to do now is load it with our standard Python tools. data = pandas.read_csv(fname) return data def fetch_cronen(): """ Load the Cronenberg sample data as a pandas.DataFrame. """ fname = BRIAN.fetch("cronen.csv") data = pandas.read_csv(fname) return data The ``BRIAN`` variable captures the value returned by :func:`pooch.create`, which is an instance of the :class:`~pooch.Pooch` class. The class contains the data registry (files, URLs, hashes, etc) and handles downloading files from the registry using the :meth:`~pooch.Pooch.fetch` method. When the user calls ``plumbus.datasets.fetch_c137()`` for the first time, the data file will be downloaded and stored in the local storage. .. tip:: We're using :func:`pooch.os_cache` to set the local folder to the default cache location for the user's operating system. You could also provide any other path if you prefer. Versioning ---------- The files from different version of your project will be kept in separate folders to make sure they don't conflict with each other. This way, you can safely update data files while maintaining backward compatibility. For example, if ``path=".plumbus"`` and ``version="v0.1"``, the data folder will be ``.plumbus/v0.1``. When your project updates, Pooch will automatically setup a separate folder for the new data files based on the given version string. The remote URL will also be updated. Notice that there is a format specifier ``{version}`` in the URL that Pooch substitutes for you. **Versioning is optional** and can be ignored by omitting the ``version`` and ``version_dev`` arguments or setting them to ``None``. Retry failed downloads ---------------------- When downloading data repeatedly, like in continuous integration, failures can occur due to sporadic network outages or other factors outside of our control. In these cases, it can be frustrating to have entire jobs fail because a single download was not successful. Pooch allows you to specify a number of times to retry the download in case of failure by setting ``retry_if_failed`` in :func:`pooch.create`. This setting will be valid for all downloads attempted with :meth:`pooch.Pooch.fetch`. The download can fail because the file hash doesn't match the known hash (due to a partial download, for example) or because of network errors coming from :mod:`requests`. Other errors (file system permission errors, etc) will still result in a failed download. .. note:: Requires Pooch >= 1.3.0. Disable file updates for testing -------------------------------- Sometimes we can forget to update the hash of a file in the registry when we change one of the existing data files. If this happens in a pull request or any branch that is not the default, Pooch will detect that there is a mismatch and will update the local file by re-downloading (usually from the default development branch). If your tests don't check the file contents exactly (which is usually not practical), you can have tests that pass on development or continuous integration and then fail once a pull request is merged. In these cases, it is better to temporarily disallow file updates so that Pooch raises an error when the hash doesn't match (indicating that you forgot to update it). To do so, use the ``allow_updates`` argument in :func:`pooch.create`. Setting this to ``False`` will mean that a hash mismatch between local file and the registry always results in an error. .. tip:: We **do not recommend setting this permanenetly to** ``False``. Instead, set it to the name of an environment variable that activates this behaviour, like ``pooch.create(..., allow_updates="MYPROJECT_ALLOW_UPDATES")``. Then you can set ``MYPROJECT_ALLOW_UPDATES=false`` on continuous integration or when running your tests locally. .. note:: Requires Pooch >= 1.6.0. Where to go from here --------------------- Pooch has more features for handling different download protocols, handling large registries, downloading from multiple sources, and more. Check out the tutorials under "Training your Pooch" for more information. Most users will also benefit from reading at least: * :ref:`environmentvariable` * :ref:`hashes` * :ref:`registryfiles` pooch-1.8.2/doc/unpacking.rst000066400000000000000000000050511463036314300161140ustar00rootroot00000000000000.. _unpacking: Unpacking archives ================== Let's say our data file is actually a zip (or tar) archive with a collection of files. We may want to store an unpacked version of the archive or extract just a single file from it. We can do both operations with the :class:`pooch.Unzip` and :class:`pooch.Untar` processors. For example, to extract a single file from a zip archive: .. code:: python from pooch import Unzip def fetch_zipped_file(): """ Load a large zipped sample data as a pandas.DataFrame. """ # Extract the file "actual-data-file.txt" from the archive unpack = Unzip(members=["actual-data-file.txt"]) # Pass in the processor to unzip the data file fnames = GOODBOY.fetch("zipped-data-file.zip", processor=unpack) # Returns the paths of all extract members (in our case, only one) fname = fnames[0] # fname is now the path of the unzipped file ("actual-data-file.txt") # which can be loaded by pandas directly data = pandas.read_csv(fname) return data By default, the :class:`~pooch.Unzip` processor (and similarly the :class:`~pooch.Untar` processor) will create a new folder in the same location as the downloaded archive file, and give it the same name as the archive file with the suffix ``.unzip`` (or ``.untar``) appended. If you want to change the location of the unpacked files, you can provide a parameter ``extract_dir`` to the processor to tell it where you want to unpack the files: .. code:: python from pooch import Untar def fetch_and_unpack_tar_file(): """ Unpack a file from a tar archive to a custom subdirectory in the cache. """ # Extract a single file from the archive, to a specific location unpack_to_custom_dir = Untar(members=["actual-data-file.txt"], extract_dir="custom_folder") # Pass in the processor to untar the data file fnames = GOODBOY.fetch("tarred-data-file.tar.gz", processor=unpack) # Returns the paths of all extract members (in our case, only one) fname = fnames[0] return fname To extract all files into a folder and return the path to each file, omit the ``members`` parameter: .. code:: python def fetch_zipped_archive(): """ Load all files from a zipped archive. """ fnames = GOODBOY.fetch("zipped-archive.zip", processor=Unzip()) return fnames Use :class:`pooch.Untar` to do the exact same for tar archives (with optional compression). pooch-1.8.2/doc/user-defined-cache.rst000066400000000000000000000022321463036314300175460ustar00rootroot00000000000000.. _environmentvariable: User-defined cache location --------------------------- The location of the local storage cache in the users' computer is usually hard-coded when we call :func:`pooch.create`. There is no way for them to change it to something else. To avoid being a tyrant, you can allow the user to define the cache location using an environment variable: .. code:: python BRIAN = pooch.create( # This is still the default path=pooch.os_cache("plumbus"), base_url="https://github.com/rick/plumbus/raw/{version}/data/", version=version, version_dev="main", registry={ "c137.csv": "19uheidhlkjdwhoiwuhc0uhcwljchw9ochwochw89dcgw9dcgwc", "cronen.csv": "1upodh2ioduhw9celdjhlfvhksgdwikdgcowjhcwoduchowjg8w", }, # The name of an environment variable that can overwrite the path env="PLUMBUS_DATA_DIR", ) In this case, if the user defines the ``PLUMBUS_DATA_DIR`` environment variable, Pooch use its value instead of ``path``. Pooch will still append the value of ``version`` to the path, so the value of ``PLUMBUS_DATA_DIR`` should not include a version number. pooch-1.8.2/doc/versions.rst000066400000000000000000000033631463036314300160110ustar00rootroot00000000000000Documentation for other versions -------------------------------- Use the links below to access documentation for specific versions (when in doubt, use the **latest release**): * `Latest release `__ * `Development `__ (reflects the current development branch on GitHub) * `v1.8.2 `__ * `v1.8.1 `__ * `v1.8.0 `__ * `v1.7.0 `__ * `v1.6.0 `__ * `v1.5.2 `__ * `v1.5.1 `__ * `v1.5.0 `__ * `v1.4.0 `__ * `v1.3.0 `__ * `v1.2.0 `__ * `v1.1.1 `__ * `v1.1.0 `__ * `v1.0.0 `__ * `v0.7.1 `__ * `v0.7.0 `__ * `v0.6.0 `__ * `v0.5.2 `__ * `v0.5.1 `__ * `v0.5.0 `__ * `v0.4.0 `__ * `v0.3.1 `__ * `v0.3.0 `__ * `v0.2.1 `__ * `v0.2.0 `__ * `v0.1.1 `__ * `v0.1 `__ pooch-1.8.2/env/000077500000000000000000000000001463036314300134255ustar00rootroot00000000000000pooch-1.8.2/env/requirements-build.txt000066400000000000000000000000421463036314300200020ustar00rootroot00000000000000# Requirements for building build pooch-1.8.2/env/requirements-docs.txt000066400000000000000000000001311463036314300176320ustar00rootroot00000000000000# Documentation requirements sphinx==7.2.* sphinx-book-theme==1.1.* sphinx-design==0.5.* pooch-1.8.2/env/requirements-style.txt000066400000000000000000000000731463036314300200470ustar00rootroot00000000000000# Style checks black flake8 pylint>=2.4 pathspec burocrata pooch-1.8.2/env/requirements-test.txt000066400000000000000000000001321463036314300176620ustar00rootroot00000000000000# Testing requirements pytest pytest-cov pytest-localftpserver pytest-httpserver coverage pooch-1.8.2/environment.yml000066400000000000000000000011171463036314300157240ustar00rootroot00000000000000name: pooch channels: - conda-forge - defaults dependencies: - python==3.11 - pip # Run - requests - packaging - platformdirs # Optional dependencies - tqdm>=4.41.0,<5.0.0 - paramiko>=2.7.0 - xxhash>=1.4.3 # Build - build # Test - pytest - pytest-cov - pytest-localftpserver - pytest-httpserver - coverage # Documentation - sphinx==7.2.* - sphinx-book-theme==1.1.* - sphinx-design==0.5.* # Style - pathspec - black>=20.8b1 - flake8 - pylint>=2.4 - pip: - burocrata pooch-1.8.2/paper/000077500000000000000000000000001463036314300137445ustar00rootroot00000000000000pooch-1.8.2/paper/paper.bib000066400000000000000000000061561463036314300155410ustar00rootroot00000000000000@article{scikit-learn, title={Scikit-learn: Machine Learning in {P}ython}, author={Pedregosa, F. and Varoquaux, G. and Gramfort, A. and Michel, V. and Thirion, B. and Grisel, O. and Blondel, M. and Prettenhofer, P. and Weiss, R. and Dubourg, V. and Vanderplas, J. and Passos, A. and Cournapeau, D. and Brucher, M. and Perrot, M. and Duchesnay, E.}, journal={Journal of Machine Learning Research}, volume={12}, pages={2825--2830}, year={2011} } @article{scikit-image, title={{scikit-image}: {image} processing in {Python}}, author={Van der Walt, Stefan and Sch{\"o}nberger, Johannes L and Nunez-Iglesias, Juan and Boulogne, Fran{\c{c}}ois and Warner, Joshua D and Yager, Neil and Gouillart, Emmanuelle and Yu, Tony}, journal={PeerJ}, volume={2}, pages={e453}, year={2014}, publisher={PeerJ Inc.}, doi={10.7717/peerj.453} } @software{metpy, title={MetPy: A {Python} Package for Meteorological Data}, author={May, Ryan M. and Arms, Sean C. and Marsh, Patrick and Bruning, Eric and Leeman, John R. and Goebbert, Kevin and Thielen, Jonathan E. and Bruck, Zachary}, organization={Unidata}, year={2008 - 2019}, version={0.11.1}, doi={10.5065/D6WW7G29}, url={https://github.com/Unidata/MetPy}, address={Boulder, Colorado} } @article{verde, title={Verde: Processing and gridding spatial data using {Green's} functions}, doi={10.21105/joss.00957}, url={https://doi.org/10.21105/joss.00957}, year={2018}, month=oct, publisher={The Open Journal}, volume={3}, number={30}, pages={957}, author={Leonardo Uieda}, journal={Journal of Open Source Software} } @misc{rockhound, doi={10.5281/ZENODO.3086002}, url={https://zenodo.org/record/3086002}, author={Uieda, Leonardo and Soler, Santiago R.}, language={en}, title={Rockhound: Download geophysical models/datasets and load them in {Python}}, publisher={Zenodo}, year={2019} } @misc{icepack, doi = {10.5281/ZENODO.3542092}, url = {https://zenodo.org/record/3542092}, author = {Shapero, Daniel and Lilien, David and Ham, David A. and Hoffman, Andrew}, title = {icepack/icepack: icepack: glacier flow modeling with the finite element method in {Python}}, publisher = {Zenodo}, year = {2019} } @misc{predictatops, doi = {10.5281/ZENODO.1450596}, url = {https://zenodo.org/record/1450596}, author = {Gosses, Justin}, title = {JustinGOSSES/predictatops: v0.0.4}, publisher = {Zenodo}, year = {2019} } @manual{cartopy, author = {{Met Office}}, title = {Cartopy: a cartographic {Python} library with a {Matplotlib} interface}, year = {2010 - 2015}, address = {Exeter, Devon}, url = {https://scitools.org.uk/cartopy} } @article{pyvista, doi = {10.21105/joss.01450}, url = {https://doi.org/10.21105/joss.01450}, year = {2019}, month = {may}, publisher = {The Open Journal}, volume = {4}, number = {37}, pages = {1450}, author = {C. Bane Sullivan and Alexander Kaszynski}, title = {{PyVista}: {3D} plotting and mesh analysis through a streamlined interface for the {Visualization} {Toolkit} ({VTK})}, journal = {Journal of Open Source Software} } pooch-1.8.2/paper/paper.md000066400000000000000000000165611463036314300154060ustar00rootroot00000000000000--- title: "Pooch: A friend to fetch your data files" tags: - python authors: - name: Leonardo Uieda orcid: 0000-0001-6123-9515 affiliation: 1 - name: Santiago Rubén Soler orcid: 0000-0001-9202-5317 affiliation: "2,3" - name: Rémi Rampin orcid: 0000-0002-0524-2282 affiliation: 4 - name: Hugo van Kemenade orcid: 0000-0001-5715-8632 affiliation: 5 - name: Matthew Turk orcid: 0000-0002-5294-0198 affiliation: 6 - name: Daniel Shapero orcid: 0000-0002-3651-0649 affiliation: 7 - name: Anderson Banihirwe orcid: 0000-0001-6583-571X affiliation: 8 - name: John Leeman orcid: 0000-0002-3624-1821 affiliation: 9 affiliations: - name: Department of Earth, Ocean and Ecological Sciences, School of Environmental Sciences, University of Liverpool, UK index: 1 - name: Instituto Geofísico Sismológico Volponi, Universidad Nacional de San Juan, Argentina index: 2 - name: CONICET, Argentina index: 3 - name: New York University, USA index: 4 - name: Independent (Non-affiliated) index: 5 - name: University of Illinois at Urbana-Champaign, USA index: 6 - name: Polar Science Center, University of Washington Applied Physics Lab, USA index: 7 - name: The US National Center for Atmospheric Research, USA index: 8 - name: Leeman Geophysical, USA index: 9 date: 02 December 2019 bibliography: paper.bib --- # Summary Scientific software is usually created to acquire, analyze, model, and visualize data. As such, many software libraries include sample datasets in their distributions for use in documentation, tests, benchmarks, and workshops. A common approach is to include smaller datasets in the GitHub repository directly and package them with the source and binary distributions (e.g., scikit-learn [@scikit-learn] and scikit-image [@scikit-image] do this). As data files increase in size, it becomes unfeasible to store them in GitHub repositories. Thus, larger datasets require writing code to download the files from a remote server to the user's computer. The same problem is faced by scientists using version control to manage their research projects. While downloading a data file over HTTPS can be done easily with modern Python libraries, it is not trivial to manage a set of files, keep them updated, and check for corruption. For example, scikit-learn [@scikit-learn], Cartopy [@cartopy], and PyVista [@pyvista] all include code dedicated to this particular task. Instead of scientists and library authors recreating the same code, it would be best to have a minimalistic and easy to set up tool for fetching and maintaining data files. Pooch is a Python library that fills this gap. It manages a data *registry* (containing file names, SHA-256 cryptographic hashes, and download URLs) by downloading files from one or more remote servers and storing them in a local data cache. Pooch is written in pure Python and has minimal dependencies. It can be easily installed from the Python Package Index (PyPI) and conda-forge on a wide range of Python versions: 2.7 (up to Pooch 0.6.0) and from 3.5 to 3.8. The integrity of downloads is verified by comparing the file's SHA-256 hash with the one stored in the data registry. This is also the mechanism used to detect if a file needs to be re-downloaded due to an update in the registry. Pooch is meant to be a drop-in replacement for the custom download code that users have already written (or are planning to write). In the ideal scenario, the end-user of a software package should not need to know that Pooch is being used. Setup is as easy as calling a single function (`pooch.create`), including setting up an environment variable for overwriting the data cache path and versioning the downloads so that multiple versions of the same package can coexist in the same machine. For example, this is the code required to set up a module `datasets.py` that uses Pooch to manage data downloads: ```python import pooch # Get the version string from the project from . import version # Create a new instance of pooch.Pooch GOODBOY = pooch.create( # Cache path using the default for the operating system path=pooch.os_cache("myproject"), # Base URL of the remote data server (for example, on GitHub) base_url="https://github.com/me/myproject/raw/{version}/data/", # PEP 440 compliant version number (added to path and base_url) version=version, # An environment variable that overwrites the path env="MYPROJECT_DATA_DIR", ) # Load the registry from a simple text file. # Each line has: file_name sha256 [url] GOODBOY.load_registry("registry.txt") def fetch_some_data(): # Get the path to the data file in the local cache # If it's not there or needs updating, download it fname = GOODBOY.fetch("some-data.csv") # Load it with NumPy/pandas/xarray/etc. data = pandas.read_csv(fname) return data ``` Pooch is designed to be extended: users can plug in custom download functions and post-download processing functions. For example, a custom download function could fetch files from a password-protected FTP server (the default is HTTP/HTTPS or anonymous FTP) and a processing function could decrypt a file using a user-defined password once the download is completed. We include ready-made download functions for HTTP and FTP (including basic authentication) as well as processing functions for unpacking archives (zip or tar) and decompressing files (gzip, lzma, and bzip2). To the best of the authors' awareness, the only other Python software with some overlapping functionality are [Intake](https://github.com/intake/intake) and [fsspec](https://github.com/intake/filesystem_spec) (which is used by Intake). The fsspec library provides a unified interface for defining file systems and opening files, regardless of where the files are located (local system, HTTPS/FTP servers, Amazon S3, Google Cloud Storage, etc). fsspec implements similar download and caching functionality to the one in Pooch, but has a wider range of download methods available. In the future, fsspec could be used as a backend to expand Pooch's download capabilities beyond HTTPS and FTP. Intake manages data catalogues (with download locations and extensive metadata), data download and caching, data loading, visualization, and browsing. It has built-in capabilities for loading data into standard containers, including NumPy, pandas, and xarray. While Intake and fsspec are powerful and highly configurable tools, we argue that Pooch's strong points are its simplicity, straight-forward documentation, and focus on solving a single problem. The Pooch API is stable and has been field-tested by other projects: MetPy [@metpy], Verde [@verde], RockHound [@rockhound], predictatops [@predictatops], and icepack [@icepack]. Pooch is also being implemented as the download manager for scikit-image ([GitHub pull request number 3945](https://github.com/scikit-image/scikit-image/pull/3945)), which will allow the project to use larger sample data while simultaneously reducing the download size of source and binary distributions. # Acknowledgements We would like to thank all of the volunteers who have dedicated their time and energy to build the open-source ecosystem on which our work relies. The order of authors is based on number of commits to the GitHub repository. A full list of all contributors to the project can be found on the [GitHub repository](https://github.com/fatiando/pooch/graphs/contributors). # References pooch-1.8.2/pooch/000077500000000000000000000000001463036314300137455ustar00rootroot00000000000000pooch-1.8.2/pooch/__init__.py000066400000000000000000000035321463036314300160610ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # # pylint: disable=missing-docstring,import-outside-toplevel,import-self # # Import functions/classes to make the API from .core import Pooch, create, retrieve from .utils import os_cache, check_version, get_logger from .hashes import file_hash, make_registry from .downloaders import ( HTTPDownloader, FTPDownloader, SFTPDownloader, DOIDownloader, ) from .processors import Unzip, Untar, Decompress # This file is generated automatically by setuptools_scm from . import _version # Add a "v" to the version number __version__ = f"v{_version.version}" def test(doctest=True, verbose=True, coverage=False): """ Run the test suite. Uses `py.test `__ to discover and run the tests. Parameters ---------- doctest : bool If ``True``, will run the doctests as well (code examples that start with a ``>>>`` in the docs). verbose : bool If ``True``, will print extra information during the test run. coverage : bool If ``True``, will run test coverage analysis on the code as well. Requires ``pytest-cov``. Raises ------ AssertionError If pytest returns a non-zero error code indicating that some tests have failed. """ import pytest package = __name__ args = [] if verbose: args.append("-vv") if coverage: args.append(f"--cov={package}") args.append("--cov-report=term-missing") if doctest: args.append("--doctest-modules") args.append("--pyargs") args.append(package) status = pytest.main(args) assert status == 0, "Some tests have failed." pooch-1.8.2/pooch/core.py000066400000000000000000000772141463036314300152620ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ The main Pooch class and a factory function for it. """ import os import time import contextlib from pathlib import Path import shlex import shutil from .hashes import hash_matches, file_hash from .utils import ( check_version, get_logger, make_local_storage, cache_location, temporary_file, os_cache, unique_file_name, ) from .downloaders import DOIDownloader, choose_downloader, doi_to_repository def retrieve( url, known_hash, fname=None, path=None, processor=None, downloader=None, progressbar=False, ): """ Download and cache a single file locally. Uses HTTP or FTP by default, depending on the protocol in the given *url*. Other download methods can be controlled through the *downloader* argument (see below). The file will be downloaded to a temporary location first and its hash will be compared to the given *known_hash*. This is done to ensure that the download happened correctly and securely. If the hash doesn't match, the file will be deleted and an exception will be raised. If the file already exists locally, its hash will be compared to *known_hash*. If they are not the same, this is interpreted as the file needing to be updated and it will be downloaded again. You can bypass these checks by passing ``known_hash=None``. If this is done, the SHA256 hash of the downloaded file will be logged to the screen. It is highly recommended that you copy and paste this hash as *known_hash* so that future downloads are guaranteed to be the exact same file. This is crucial for reproducible computations. If the file exists in the given *path* with the given *fname* and the hash matches, it will not be downloaded and the absolute path to the file will be returned. .. note:: This function is meant for downloading single files. If you need to manage the download and caching of several files, with versioning, use :func:`pooch.create` and :class:`pooch.Pooch` instead. Parameters ---------- url : str The URL to the file that is to be downloaded. Ideally, the URL should end in a file name. known_hash : str or None A known hash (checksum) of the file. Will be used to verify the download or check if an existing file needs to be updated. By default, will assume it's a SHA256 hash. To specify a different hashing method, prepend the hash with ``algorithm:``, for example ``md5:pw9co2iun29juoh`` or ``sha1:092odwhi2ujdp2du2od2odh2wod2``. If None, will NOT check the hash of the downloaded file or check if an existing file needs to be updated. fname : str or None The name that will be used to save the file. Should NOT include the full path, just the file name (it will be appended to *path*). If None, will create a unique file name using a combination of the last part of the URL (assuming it's the file name) and the MD5 hash of the URL. For example, ``81whdo2d2e928yd1wi22-data-file.csv``. This ensures that files from different URLs never overwrite each other, even if they have the same name. path : str or PathLike or None The location of the cache folder on disk. This is where the file will be saved. If None, will save to a ``pooch`` folder in the default cache location for your operating system (see :func:`pooch.os_cache`). processor : None or callable If not None, then a function (or callable object) that will be called before returning the full path and after the file has been downloaded (if required). See :ref:`processors` for details. downloader : None or callable If not None, then a function (or callable object) that will be called to download a given URL to a provided local file name. See :ref:`downloaders` for details. progressbar : bool or an arbitrary progress bar object If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. Alternatively, an arbitrary progress bar object can be passed. See :ref:`custom-progressbar` for details. Returns ------- full_path : str The absolute path (including the file name) of the file in the local storage. Examples -------- Download one of the data files from the Pooch repository on GitHub: >>> import os >>> from pooch import __version__, check_version, retrieve >>> # Make a URL for the version of pooch we have installed >>> url = "https://github.com/fatiando/pooch/raw/{}/data/tiny-data.txt" >>> url = url.format(check_version(__version__, fallback="main")) >>> # Download the file and save it locally. Will check the MD5 checksum of >>> # the downloaded file against the given value to make sure it's the >>> # right file. You can use other hashes by specifying different >>> # algorithm names (sha256, sha1, etc). >>> fname = retrieve( ... url, known_hash="md5:70e2afd3fd7e336ae478b1e740a5f08e", ... ) >>> with open(fname) as f: ... print(f.read().strip()) # A tiny data file for test purposes only 1 2 3 4 5 6 >>> # Running again won't trigger a download and only return the path to >>> # the existing file. >>> fname2 = retrieve( ... url, known_hash="md5:70e2afd3fd7e336ae478b1e740a5f08e", ... ) >>> print(fname2 == fname) True >>> os.remove(fname) Files that are compressed with gzip, xz/lzma, or bzip2 can be automatically decompressed by passing using the :class:`pooch.Decompress` processor: >>> from pooch import Decompress >>> # URLs to a gzip compressed version of the data file. >>> url = ("https://github.com/fatiando/pooch/raw/{}/" ... + "pooch/tests/data/tiny-data.txt.gz") >>> url = url.format(check_version(__version__, fallback="main")) >>> # By default, you would have to decompress the file yourself >>> fname = retrieve( ... url, ... known_hash="md5:8812ba10b6c7778014fdae81b03f9def", ... ) >>> print(os.path.splitext(fname)[1]) .gz >>> # Use the processor to decompress after download automatically and >>> # return the path to the decompressed file instead. >>> fname2 = retrieve( ... url, ... known_hash="md5:8812ba10b6c7778014fdae81b03f9def", ... processor=Decompress(), ... ) >>> print(fname2 == fname) False >>> with open(fname2) as f: ... print(f.read().strip()) # A tiny data file for test purposes only 1 2 3 4 5 6 >>> os.remove(fname) >>> os.remove(fname2) When downloading archives (zip or tar), it can be useful to unpack them after download to avoid having to do that yourself. Use the processors :class:`pooch.Unzip` or :class:`pooch.Untar` to do this automatically: >>> from pooch import Unzip >>> # URLs to a zip archive with a single data file. >>> url = ("https://github.com/fatiando/pooch/raw/{}/" ... + "pooch/tests/data/tiny-data.zip") >>> url = url.format(check_version(__version__, fallback="main")) >>> # By default, you would get the path to the archive >>> fname = retrieve( ... url, ... known_hash="md5:e9592cb46cf3514a1079051f8a148148", ... ) >>> print(os.path.splitext(fname)[1]) .zip >>> os.remove(fname) >>> # Using the processor, the archive will be unzipped and a list with the >>> # path to every file will be returned instead of a single path. >>> fnames = retrieve( ... url, ... known_hash="md5:e9592cb46cf3514a1079051f8a148148", ... processor=Unzip(), ... ) >>> # There was only a single file in our archive. >>> print(len(fnames)) 1 >>> with open(fnames[0]) as f: ... print(f.read().strip()) # A tiny data file for test purposes only 1 2 3 4 5 6 >>> for f in fnames: ... os.remove(f) """ if path is None: path = os_cache("pooch") if fname is None: fname = unique_file_name(url) # Make the path absolute. path = cache_location(path, env=None, version=None) full_path = path.resolve() / fname action, verb = download_action(full_path, known_hash) if action in ("download", "update"): # We need to write data, so create the local data directory if it # doesn't already exist. make_local_storage(path) get_logger().info( "%s data from '%s' to file '%s'.", verb, url, str(full_path), ) if downloader is None: downloader = choose_downloader(url, progressbar=progressbar) stream_download(url, full_path, known_hash, downloader, pooch=None) if known_hash is None: get_logger().info( "SHA256 hash of downloaded file: %s\n" "Use this value as the 'known_hash' argument of 'pooch.retrieve'" " to ensure that the file hasn't changed if it is downloaded again" " in the future.", file_hash(str(full_path)), ) if processor is not None: return processor(str(full_path), action, None) return str(full_path) def create( path, base_url, version=None, version_dev="master", env=None, registry=None, urls=None, retry_if_failed=0, allow_updates=True, ): """ Create a :class:`~pooch.Pooch` with sensible defaults to fetch data files. If a version string is given, the Pooch will be versioned, meaning that the local storage folder and the base URL depend on the project version. This is necessary if your users have multiple versions of your library installed (using virtual environments) and you updated the data files between versions. Otherwise, every time a user switches environments would trigger a re-download of the data. The version string will be appended to the local storage path (for example, ``~/.mypooch/cache/v0.1``) and inserted into the base URL (for example, ``https://github.com/fatiando/pooch/raw/v0.1/data``). If the version string contains ``+XX.XXXXX``, it will be interpreted as a development version. Does **not** create the local data storage folder. The folder will only be created the first time a download is attempted with :meth:`pooch.Pooch.fetch`. This makes it safe to use this function at the module level (so it's executed on ``import`` and the resulting :class:`~pooch.Pooch` is a global variable). Parameters ---------- path : str, PathLike, list or tuple The path to the local data storage folder. If this is a list or tuple, we'll join the parts with the appropriate separator. The *version* will be appended to the end of this path. Use :func:`pooch.os_cache` for a sensible default. base_url : str Base URL for the remote data source. All requests will be made relative to this URL. The string should have a ``{version}`` formatting mark in it. We will call ``.format(version=version)`` on this string. If the URL does not end in a ``'/'``, a trailing ``'/'`` will be added automatically. version : str or None The version string for your project. Should be PEP440 compatible. If None is given, will not attempt to format *base_url* and no subfolder will be appended to *path*. version_dev : str The name used for the development version of a project. If your data is hosted on Github (and *base_url* is a Github raw link), then ``"master"`` is a good choice (default). Ignored if *version* is None. env : str or None An environment variable that can be used to overwrite *path*. This allows users to control where they want the data to be stored. We'll append *version* to the end of this value as well. registry : dict or None A record of the files that are managed by this Pooch. Keys should be the file names and the values should be their hashes. Only files in the registry can be fetched from the local storage. Files in subdirectories of *path* **must use Unix-style separators** (``'/'``) even on Windows. urls : dict or None Custom URLs for downloading individual files in the registry. A dictionary with the file names as keys and the custom URLs as values. Not all files in *registry* need an entry in *urls*. If a file has an entry in *urls*, the *base_url* will be ignored when downloading it in favor of ``urls[fname]``. retry_if_failed : int Retry a file download the specified number of times if it fails because of a bad connection or a hash mismatch. By default, downloads are only attempted once (``retry_if_failed=0``). Initially, will wait for 1s between retries and then increase the wait time by 1s with each retry until a maximum of 10s. allow_updates : bool or str Whether existing files in local storage that have a hash mismatch with the registry are allowed to update from the remote URL. If a string is passed, we will assume it's the name of an environment variable that will be checked for the true/false value. If ``False``, any mismatch with hashes in the registry will result in an error. Defaults to ``True``. Returns ------- pooch : :class:`~pooch.Pooch` The :class:`~pooch.Pooch` initialized with the given arguments. Examples -------- Create a :class:`~pooch.Pooch` for a release (v0.1): >>> pup = create(path="myproject", ... base_url="http://some.link.com/{version}/", ... version="v0.1", ... registry={"data.txt": "9081wo2eb2gc0u..."}) >>> print(pup.path.parts) # The path is a pathlib.Path ('myproject', 'v0.1') >>> # The local folder is only created when a dataset is first downloaded >>> print(pup.path.exists()) False >>> print(pup.base_url) http://some.link.com/v0.1/ >>> print(pup.registry) {'data.txt': '9081wo2eb2gc0u...'} >>> print(pup.registry_files) ['data.txt'] If this is a development version (12 commits ahead of v0.1), then the ``version_dev`` will be used (defaults to ``"master"``): >>> pup = create(path="myproject", ... base_url="http://some.link.com/{version}/", ... version="v0.1+12.do9iwd") >>> print(pup.path.parts) ('myproject', 'master') >>> print(pup.base_url) http://some.link.com/master/ Versioning is optional (but highly encouraged): >>> pup = create(path="myproject", ... base_url="http://some.link.com/", ... registry={"data.txt": "9081wo2eb2gc0u..."}) >>> print(pup.path.parts) # The path is a pathlib.Path ('myproject',) >>> print(pup.base_url) http://some.link.com/ To place the storage folder at a subdirectory, pass in a list and we'll join the path for you using the appropriate separator for your operating system: >>> pup = create(path=["myproject", "cache", "data"], ... base_url="http://some.link.com/{version}/", ... version="v0.1") >>> print(pup.path.parts) ('myproject', 'cache', 'data', 'v0.1') The user can overwrite the storage path by setting an environment variable: >>> # The variable is not set so we'll use *path* >>> pup = create(path=["myproject", "not_from_env"], ... base_url="http://some.link.com/{version}/", ... version="v0.1", ... env="MYPROJECT_DATA_DIR") >>> print(pup.path.parts) ('myproject', 'not_from_env', 'v0.1') >>> # Set the environment variable and try again >>> import os >>> os.environ["MYPROJECT_DATA_DIR"] = os.path.join("myproject", "env") >>> pup = create(path=["myproject", "not_env"], ... base_url="http://some.link.com/{version}/", ... version="v0.1", ... env="MYPROJECT_DATA_DIR") >>> print(pup.path.parts) ('myproject', 'env', 'v0.1') """ if version is not None: version = check_version(version, fallback=version_dev) base_url = base_url.format(version=version) # Don't create the cache folder here! This function is usually called in # the module context (at import time), so touching the file system is not # recommended. It could cause crashes when multiple processes/threads try # to import at the same time (which would try to create the folder several # times at once). path = cache_location(path, env, version) if isinstance(allow_updates, str): allow_updates = os.environ.get(allow_updates, "true").lower() != "false" # add trailing "/" base_url = base_url.rstrip("/") + "/" pup = Pooch( path=path, base_url=base_url, registry=registry, urls=urls, retry_if_failed=retry_if_failed, allow_updates=allow_updates, ) return pup class Pooch: """ Manager for a local data storage that can fetch from a remote source. Avoid creating ``Pooch`` instances directly. Use :func:`pooch.create` instead. Parameters ---------- path : str The path to the local data storage folder. The path must exist in the file system. base_url : str Base URL for the remote data source. All requests will be made relative to this URL. registry : dict or None A record of the files that are managed by this good boy. Keys should be the file names and the values should be their hashes. Only files in the registry can be fetched from the local storage. Files in subdirectories of *path* **must use Unix-style separators** (``'/'``) even on Windows. urls : dict or None Custom URLs for downloading individual files in the registry. A dictionary with the file names as keys and the custom URLs as values. Not all files in *registry* need an entry in *urls*. If a file has an entry in *urls*, the *base_url* will be ignored when downloading it in favor of ``urls[fname]``. retry_if_failed : int Retry a file download the specified number of times if it fails because of a bad connection or a hash mismatch. By default, downloads are only attempted once (``retry_if_failed=0``). Initially, will wait for 1s between retries and then increase the wait time by 1s with each retry until a maximum of 10s. allow_updates : bool Whether existing files in local storage that have a hash mismatch with the registry are allowed to update from the remote URL. If ``False``, any mismatch with hashes in the registry will result in an error. Defaults to ``True``. """ def __init__( self, path, base_url, registry=None, urls=None, retry_if_failed=0, allow_updates=True, ): self.path = path self.base_url = base_url if registry is None: registry = {} self.registry = registry if urls is None: urls = {} self.urls = dict(urls) self.retry_if_failed = retry_if_failed self.allow_updates = allow_updates @property def abspath(self): "Absolute path to the local storage" return Path(os.path.abspath(os.path.expanduser(str(self.path)))) @property def registry_files(self): "List of file names on the registry" return list(self.registry) def fetch(self, fname, processor=None, downloader=None, progressbar=False): """ Get the absolute path to a file in the local storage. If it's not in the local storage, it will be downloaded. If the hash of the file in local storage doesn't match the one in the registry, will download a new copy of the file. This is considered a sign that the file was updated in the remote storage. If the hash of the downloaded file still doesn't match the one in the registry, will raise an exception to warn of possible file corruption. Post-processing actions sometimes need to be taken on downloaded files (unzipping, conversion to a more efficient format, etc). If these actions are time or memory consuming, it would be best to do this only once right after the file is downloaded. Use the *processor* argument to specify a function that is executed after the download to perform these actions. See :ref:`processors` for details. Custom file downloaders can be provided through the *downloader* argument. By default, Pooch will determine the download protocol from the URL in the registry. If the server for a given file requires authentication (username and password), use a downloader that support these features. Downloaders can also be used to print custom messages (like a progress bar), etc. See :ref:`downloaders` for details. Parameters ---------- fname : str The file name (relative to the *base_url* of the remote data storage) to fetch from the local storage. processor : None or callable If not None, then a function (or callable object) that will be called before returning the full path and after the file has been downloaded. See :ref:`processors` for details. downloader : None or callable If not None, then a function (or callable object) that will be called to download a given URL to a provided local file name. See :ref:`downloaders` for details. progressbar : bool or an arbitrary progress bar object If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. Alternatively, an arbitrary progress bar object can be passed. See :ref:`custom-progressbar` for details. Returns ------- full_path : str The absolute path (including the file name) of the file in the local storage. """ self._assert_file_in_registry(fname) url = self.get_url(fname) full_path = self.abspath / fname known_hash = self.registry[fname] action, verb = download_action(full_path, known_hash) if action == "update" and not self.allow_updates: raise ValueError( f"{fname} needs to update {full_path} but updates are disallowed." ) if action in ("download", "update"): # We need to write data, so create the local data directory if it # doesn't already exist. make_local_storage(str(self.abspath)) get_logger().info( "%s file '%s' from '%s' to '%s'.", verb, fname, url, str(self.abspath), ) if downloader is None: downloader = choose_downloader(url, progressbar=progressbar) stream_download( url, full_path, known_hash, downloader, pooch=self, retry_if_failed=self.retry_if_failed, ) if processor is not None: return processor(str(full_path), action, self) return str(full_path) def _assert_file_in_registry(self, fname): """ Check if a file is in the registry and raise :class:`ValueError` if it's not. """ if fname not in self.registry: raise ValueError(f"File '{fname}' is not in the registry.") def get_url(self, fname): """ Get the full URL to download a file in the registry. Parameters ---------- fname : str The file name (relative to the *base_url* of the remote data storage) to fetch from the local storage. """ self._assert_file_in_registry(fname) return self.urls.get(fname, "".join([self.base_url, fname])) def load_registry(self, fname): """ Load entries from a file and add them to the registry. Use this if you are managing many files. Each line of the file should have file name and its hash separated by a space. Hash can specify checksum algorithm using "alg:hash" format. In case no algorithm is provided, SHA256 is used by default. Only one file per line is allowed. Custom download URLs for individual files can be specified as a third element on the line. Line comments can be added and must be prepended with ``#``. Parameters ---------- fname : str | fileobj Path (or open file object) to the registry file. """ with contextlib.ExitStack() as stack: if hasattr(fname, "read"): # It's a file object fin = fname else: # It's a file path fin = stack.enter_context(open(fname, encoding="utf-8")) for linenum, line in enumerate(fin): if isinstance(line, bytes): line = line.decode("utf-8") line = line.strip() # skip line comments if line.startswith("#"): continue elements = shlex.split(line) if not len(elements) in [0, 2, 3]: raise OSError( f"Invalid entry in Pooch registry file '{fname}': " f"expected 2 or 3 elements in line {linenum + 1} but got " f"{len(elements)}. Offending entry: '{line}'" ) if elements: file_name = elements[0] file_checksum = elements[1] if len(elements) == 3: file_url = elements[2] self.urls[file_name] = file_url self.registry[file_name] = file_checksum.lower() def load_registry_from_doi(self): """ Populate the registry using the data repository API Fill the registry with all the files available in the data repository, along with their hashes. It will make a request to the data repository API to retrieve this information. No file is downloaded during this process. .. important:: This method is intended to be used only when the ``base_url`` is a DOI. """ # Ensure that this is indeed a DOI-based pooch downloader = choose_downloader(self.base_url) if not isinstance(downloader, DOIDownloader): raise ValueError( f"Invalid base_url '{self.base_url}': " + "Pooch.load_registry_from_doi is only implemented for DOIs" ) # Create a repository instance doi = self.base_url.replace("doi:", "") repository = doi_to_repository(doi) # Call registry population for this repository return repository.populate_registry(self) def is_available(self, fname, downloader=None): """ Check availability of a remote file without downloading it. Use this method when working with large files to check if they are available for download. Parameters ---------- fname : str The file name (relative to the *base_url* of the remote data storage). downloader : None or callable If not None, then a function (or callable object) that will be called to check the availability of the file on the server. See :ref:`downloaders` for details. Returns ------- status : bool True if the file is available for download. False otherwise. """ self._assert_file_in_registry(fname) url = self.get_url(fname) if downloader is None: downloader = choose_downloader(url) try: available = downloader(url, None, self, check_only=True) except TypeError as error: error_msg = ( f"Downloader '{str(downloader)}' does not support availability checks." ) raise NotImplementedError(error_msg) from error return available def download_action(path, known_hash): """ Determine the action that is needed to get the file on disk. Parameters ---------- path : PathLike The path to the file on disk. known_hash : str A known hash (checksum) of the file. Will be used to verify the download or check if an existing file needs to be updated. By default, will assume it's a SHA256 hash. To specify a different hashing method, prepend the hash with ``algorithm:``, for example ``md5:pw9co2iun29juoh`` or ``sha1:092odwhi2ujdp2du2od2odh2wod2``. Returns ------- action, verb : str The action that must be taken and the English verb (infinitive form of *action*) used in the log: * ``'download'``: File does not exist locally and must be downloaded. * ``'update'``: File exists locally but needs to be updated. * ``'fetch'``: File exists locally and only need to inform its path. """ if not path.exists(): action = "download" verb = "Downloading" elif not hash_matches(str(path), known_hash): action = "update" verb = "Updating" else: action = "fetch" verb = "Fetching" return action, verb def stream_download(url, fname, known_hash, downloader, pooch=None, retry_if_failed=0): """ Stream the file and check that its hash matches the known one. The file is first downloaded to a temporary file name in the cache folder. It will be moved to the desired file name only if the hash matches the known hash. Otherwise, the temporary file is deleted. If the download fails for either a bad connection or a hash mismatch, we will retry the download the specified number of times in case the failure was due to a network error. """ # Lazy import requests to speed up import time import requests.exceptions # pylint: disable=C0415 # Ensure the parent directory exists in case the file is in a subdirectory. # Otherwise, move will cause an error. if not fname.parent.exists(): os.makedirs(str(fname.parent)) download_attempts = 1 + retry_if_failed max_wait = 10 for i in range(download_attempts): try: # Stream the file to a temporary so that we can safely check its # hash before overwriting the original. with temporary_file(path=str(fname.parent)) as tmp: downloader(url, tmp, pooch) hash_matches(tmp, known_hash, strict=True, source=str(fname.name)) shutil.move(tmp, str(fname)) break except (ValueError, requests.exceptions.RequestException): if i == download_attempts - 1: raise retries_left = download_attempts - (i + 1) get_logger().info( "Failed to download '%s'. " "Will attempt the download again %d more time%s.", str(fname.name), retries_left, "s" if retries_left > 1 else "", ) time.sleep(min(i + 1, max_wait)) pooch-1.8.2/pooch/downloaders.py000066400000000000000000001201271463036314300166430ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ The classes that actually handle the downloads. """ import os import sys import ftplib import warnings from .utils import parse_url try: from tqdm import tqdm except ImportError: tqdm = None try: import paramiko except ImportError: paramiko = None # Set the default timeout in seconds so it can be configured in a pinch for the # methods that don't or can't expose a way set it at runtime. # See https://github.com/fatiando/pooch/issues/409 DEFAULT_TIMEOUT = 30 def choose_downloader(url, progressbar=False): """ Choose the appropriate downloader for the given URL based on the protocol. Parameters ---------- url : str A URL (including protocol). progressbar : bool or an arbitrary progress bar object If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. Alternatively, an arbitrary progress bar object can be passed. See :ref:`custom-progressbar` for details. Returns ------- downloader A downloader class, like :class:`pooch.HTTPDownloader`, :class:`pooch.FTPDownloader`, or :class: `pooch.SFTPDownloader`. Examples -------- >>> downloader = choose_downloader("http://something.com") >>> print(downloader.__class__.__name__) HTTPDownloader >>> downloader = choose_downloader("https://something.com") >>> print(downloader.__class__.__name__) HTTPDownloader >>> downloader = choose_downloader("ftp://something.com") >>> print(downloader.__class__.__name__) FTPDownloader >>> downloader = choose_downloader("doi:DOI/filename.csv") >>> print(downloader.__class__.__name__) DOIDownloader """ known_downloaders = { "ftp": FTPDownloader, "https": HTTPDownloader, "http": HTTPDownloader, "sftp": SFTPDownloader, "doi": DOIDownloader, } parsed_url = parse_url(url) if parsed_url["protocol"] not in known_downloaders: raise ValueError( f"Unrecognized URL protocol '{parsed_url['protocol']}' in '{url}'. " f"Must be one of {known_downloaders.keys()}." ) downloader = known_downloaders[parsed_url["protocol"]](progressbar=progressbar) return downloader class HTTPDownloader: # pylint: disable=too-few-public-methods """ Download manager for fetching files over HTTP/HTTPS. When called, downloads the given file URL into the specified local file. Uses the :mod:`requests` library to manage downloads. Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to customize the download of files (for example, to use authentication or print a progress bar). Parameters ---------- progressbar : bool or an arbitrary progress bar object If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. Alternatively, an arbitrary progress bar object can be passed. See :ref:`custom-progressbar` for details. chunk_size : int Files are streamed *chunk_size* bytes at a time instead of loading everything into memory at one. Usually doesn't need to be changed. **kwargs All keyword arguments given when creating an instance of this class will be passed to :func:`requests.get`. Examples -------- Download one of the data files from the Pooch repository: >>> import os >>> from pooch import __version__, check_version >>> url = "https://github.com/fatiando/pooch/raw/{}/data/tiny-data.txt" >>> url = url.format(check_version(__version__, fallback="main")) >>> downloader = HTTPDownloader() >>> # Not using with Pooch.fetch so no need to pass an instance of Pooch >>> downloader(url=url, output_file="tiny-data.txt", pooch=None) >>> os.path.exists("tiny-data.txt") True >>> with open("tiny-data.txt") as f: ... print(f.read().strip()) # A tiny data file for test purposes only 1 2 3 4 5 6 >>> os.remove("tiny-data.txt") Authentication can be handled by passing a user name and password to :func:`requests.get`. All arguments provided when creating an instance of the class are forwarded to :func:`requests.get`. We'll use ``auth=(username, password)`` to use basic HTTPS authentication. The https://httpbin.org website allows us to make a fake a login request using whatever username and password we provide to it: >>> user = "doggo" >>> password = "goodboy" >>> # httpbin will ask for the user and password we provide in the URL >>> url = f"https://httpbin.org/basic-auth/{user}/{password}" >>> # Trying without the login credentials causes an error >>> downloader = HTTPDownloader() >>> try: ... downloader(url=url, output_file="tiny-data.txt", pooch=None) ... except Exception: ... print("There was an error!") There was an error! >>> # Pass in the credentials to HTTPDownloader >>> downloader = HTTPDownloader(auth=(user, password)) >>> downloader(url=url, output_file="tiny-data.txt", pooch=None) >>> with open("tiny-data.txt") as f: ... for line in f: ... print(line.rstrip()) { "authenticated": true, "user": "doggo" } >>> os.remove("tiny-data.txt") """ def __init__(self, progressbar=False, chunk_size=1024, **kwargs): self.kwargs = kwargs self.progressbar = progressbar self.chunk_size = chunk_size if self.progressbar is True and tqdm is None: raise ValueError("Missing package 'tqdm' required for progress bars.") def __call__( self, url, output_file, pooch, check_only=False ): # pylint: disable=R0914 """ Download the given URL over HTTP to the given output file. Uses :func:`requests.get`. Parameters ---------- url : str The URL to the file you want to download. output_file : str or file-like object Path (and file name) to which the file will be downloaded. pooch : :class:`~pooch.Pooch` The instance of :class:`~pooch.Pooch` that is calling this method. check_only : bool If True, will only check if a file exists on the server and **without downloading the file**. Will return ``True`` if the file exists and ``False`` otherwise. Returns ------- availability : bool or None If ``check_only==True``, returns a boolean indicating if the file is available on the server. Otherwise, returns ``None``. """ # Lazy import requests to speed up import time import requests # pylint: disable=C0415 if check_only: timeout = self.kwargs.get("timeout", DEFAULT_TIMEOUT) response = requests.head(url, timeout=timeout, allow_redirects=True) available = bool(response.status_code == 200) return available kwargs = self.kwargs.copy() timeout = kwargs.pop("timeout", DEFAULT_TIMEOUT) kwargs.setdefault("stream", True) ispath = not hasattr(output_file, "write") if ispath: # pylint: disable=consider-using-with output_file = open(output_file, "w+b") # pylint: enable=consider-using-with try: response = requests.get(url, timeout=timeout, **kwargs) response.raise_for_status() content = response.iter_content(chunk_size=self.chunk_size) total = int(response.headers.get("content-length", 0)) if self.progressbar is True: # Need to use ascii characters on Windows because there isn't # always full unicode support # (see https://github.com/tqdm/tqdm/issues/454) use_ascii = bool(sys.platform == "win32") progress = tqdm( total=total, ncols=79, ascii=use_ascii, unit="B", unit_scale=True, leave=True, ) elif self.progressbar: progress = self.progressbar progress.total = total for chunk in content: if chunk: output_file.write(chunk) output_file.flush() if self.progressbar: # Use the chunk size here because chunk may be much # larger if the data are decompressed by requests after # reading (happens with text files). progress.update(self.chunk_size) # Make sure the progress bar gets filled even if the actual number # is chunks is smaller than expected. This happens when streaming # text files that are compressed by the server when sending (gzip). # Binary files don't experience this. if self.progressbar: progress.reset() progress.update(total) progress.close() finally: if ispath: output_file.close() return None class FTPDownloader: # pylint: disable=too-few-public-methods """ Download manager for fetching files over FTP. When called, downloads the given file URL into the specified local file. Uses the :mod:`ftplib` module to manage downloads. Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to customize the download of files (for example, to use authentication or print a progress bar). Parameters ---------- port : int Port used for the FTP connection. username : str User name used to login to the server. Only needed if the server requires authentication (i.e., no anonymous FTP). password : str Password used to login to the server. Only needed if the server requires authentication (i.e., no anonymous FTP). Use the empty string to indicate no password is required. account : str Some servers also require an "account" name for authentication. timeout : int Timeout in seconds for ftp socket operations, use None to mean no timeout. progressbar : bool If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. **Custom progress bars are not yet supported.** chunk_size : int Files are streamed *chunk_size* bytes at a time instead of loading everything into memory at one. Usually doesn't need to be changed. """ def __init__( self, port=21, username="anonymous", password="", account="", timeout=None, progressbar=False, chunk_size=1024, ): self.port = port self.username = username self.password = password self.account = account self.timeout = timeout self.progressbar = progressbar self.chunk_size = chunk_size if self.progressbar is True and tqdm is None: raise ValueError("Missing package 'tqdm' required for progress bars.") def __call__(self, url, output_file, pooch, check_only=False): """ Download the given URL over FTP to the given output file. Parameters ---------- url : str The URL to the file you want to download. output_file : str or file-like object Path (and file name) to which the file will be downloaded. pooch : :class:`~pooch.Pooch` The instance of :class:`~pooch.Pooch` that is calling this method. check_only : bool If True, will only check if a file exists on the server and **without downloading the file**. Will return ``True`` if the file exists and ``False`` otherwise. Returns ------- availability : bool or None If ``check_only==True``, returns a boolean indicating if the file is available on the server. Otherwise, returns ``None``. """ parsed_url = parse_url(url) ftp = ftplib.FTP(timeout=self.timeout) ftp.connect(host=parsed_url["netloc"], port=self.port) if check_only: directory, file_name = os.path.split(parsed_url["path"]) try: ftp.login(user=self.username, passwd=self.password, acct=self.account) available = file_name in ftp.nlst(directory) finally: ftp.close() return available ispath = not hasattr(output_file, "write") if ispath: # pylint: disable=consider-using-with output_file = open(output_file, "w+b") # pylint: enable=consider-using-with try: ftp.login(user=self.username, passwd=self.password, acct=self.account) command = f"RETR {parsed_url['path']}" if self.progressbar: # Make sure the file is set to binary mode, otherwise we can't # get the file size. See: https://stackoverflow.com/a/22093848 ftp.voidcmd("TYPE I") use_ascii = bool(sys.platform == "win32") progress = tqdm( total=int(ftp.size(parsed_url["path"])), ncols=79, ascii=use_ascii, unit="B", unit_scale=True, leave=True, ) with progress: def callback(data): "Update the progress bar and write to output" progress.update(len(data)) output_file.write(data) ftp.retrbinary(command, callback, blocksize=self.chunk_size) else: ftp.retrbinary(command, output_file.write, blocksize=self.chunk_size) finally: ftp.quit() if ispath: output_file.close() return None class SFTPDownloader: # pylint: disable=too-few-public-methods """ Download manager for fetching files over SFTP. When called, downloads the given file URL into the specified local file. Requires `paramiko `__ to be installed. Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to customize the download of files (for example, to use authentication or print a progress bar). Parameters ---------- port : int Port used for the SFTP connection. username : str User name used to login to the server. Only needed if the server requires authentication (i.e., no anonymous SFTP). password : str Password used to login to the server. Only needed if the server requires authentication (i.e., no anonymous SFTP). Use the empty string to indicate no password is required. timeout : int Timeout in seconds for sftp socket operations, use None to mean no timeout. progressbar : bool or an arbitrary progress bar object If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. """ def __init__( self, port=22, username="anonymous", password="", account="", timeout=None, progressbar=False, ): self.port = port self.username = username self.password = password self.account = account self.timeout = timeout self.progressbar = progressbar # Collect errors and raise only once so that both missing packages are # captured. Otherwise, the user is only warned of one of them at a # time (and we can't test properly when they are both missing). errors = [] if self.progressbar and tqdm is None: errors.append("Missing package 'tqdm' required for progress bars.") if paramiko is None: errors.append("Missing package 'paramiko' required for SFTP downloads.") if errors: raise ValueError(" ".join(errors)) def __call__(self, url, output_file, pooch): """ Download the given URL over SFTP to the given output file. The output file must be given as a string (file name/path) and not an open file object! Otherwise, paramiko cannot save to that file. Parameters ---------- url : str The URL to the file you want to download. output_file : str Path (and file name) to which the file will be downloaded. **Cannot be a file object**. pooch : :class:`~pooch.Pooch` The instance of :class:`~pooch.Pooch` that is calling this method. """ parsed_url = parse_url(url) connection = paramiko.Transport(sock=(parsed_url["netloc"], self.port)) sftp = None try: connection.connect(username=self.username, password=self.password) sftp = paramiko.SFTPClient.from_transport(connection) sftp.get_channel().settimeout = self.timeout if self.progressbar: size = int(sftp.stat(parsed_url["path"]).st_size) use_ascii = bool(sys.platform == "win32") progress = tqdm( total=size, ncols=79, ascii=use_ascii, unit="B", unit_scale=True, leave=True, ) if self.progressbar: with progress: def callback(current, total): "Update the progress bar and write to output" progress.total = int(total) progress.update(int(current - progress.n)) sftp.get(parsed_url["path"], output_file, callback=callback) else: sftp.get(parsed_url["path"], output_file) finally: connection.close() if sftp is not None: sftp.close() class DOIDownloader: # pylint: disable=too-few-public-methods """ Download manager for fetching files from Digital Object Identifiers (DOIs). Open-access data repositories often issue Digital Object Identifiers (DOIs) for data which provide a stable link and citation point. The trick is finding out the download URL for a file given the DOI. When called, this downloader uses the repository's public API to find out the download URL from the DOI and file name. It then uses :class:`pooch.HTTPDownloader` to download the URL into the specified local file. Allowing "URL"s to be specified with the DOI instead of the actual HTTP download link. Uses the :mod:`requests` library to manage downloads and interact with the APIs. The **format of the "URL"** is: ``doi:{DOI}/{file name}``. Notice that there are no ``//`` like in HTTP/FTP and you must specify a file name after the DOI (separated by a ``/``). Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to be able to download files given the DOI instead of an HTTP link. Supported repositories: * `figshare `__ * `Zenodo `__ * `Dataverse `__ instances .. attention:: DOIs from other repositories **will not work** since we need to access their particular APIs to find the download links. We welcome suggestions and contributions adding new repositories. Parameters ---------- progressbar : bool or an arbitrary progress bar object If True, will print a progress bar of the download to standard error (stderr). Requires `tqdm `__ to be installed. Alternatively, an arbitrary progress bar object can be passed. See :ref:`custom-progressbar` for details. chunk_size : int Files are streamed *chunk_size* bytes at a time instead of loading everything into memory at one. Usually doesn't need to be changed. **kwargs All keyword arguments given when creating an instance of this class will be passed to :func:`requests.get`. Examples -------- Download one of the data files from the figshare archive of Pooch test data: >>> import os >>> downloader = DOIDownloader() >>> url = "doi:10.6084/m9.figshare.14763051.v1/tiny-data.txt" >>> # Not using with Pooch.fetch so no need to pass an instance of Pooch >>> downloader(url=url, output_file="tiny-data.txt", pooch=None) >>> os.path.exists("tiny-data.txt") True >>> with open("tiny-data.txt") as f: ... print(f.read().strip()) # A tiny data file for test purposes only 1 2 3 4 5 6 >>> os.remove("tiny-data.txt") Same thing but for our Zenodo archive: >>> url = "doi:10.5281/zenodo.4924875/tiny-data.txt" >>> downloader(url=url, output_file="tiny-data.txt", pooch=None) >>> os.path.exists("tiny-data.txt") True >>> with open("tiny-data.txt") as f: ... print(f.read().strip()) # A tiny data file for test purposes only 1 2 3 4 5 6 >>> os.remove("tiny-data.txt") """ def __init__(self, progressbar=False, chunk_size=1024, **kwargs): self.kwargs = kwargs self.progressbar = progressbar self.chunk_size = chunk_size def __call__(self, url, output_file, pooch): """ Download the given DOI URL over HTTP to the given output file. Uses the repository's API to determine the actual HTTP download URL from the given DOI. Uses :func:`requests.get`. Parameters ---------- url : str The URL to the file you want to download. output_file : str or file-like object Path (and file name) to which the file will be downloaded. pooch : :class:`~pooch.Pooch` The instance of :class:`~pooch.Pooch` that is calling this method. """ parsed_url = parse_url(url) data_repository = doi_to_repository(parsed_url["netloc"]) # Resolve the URL file_name = parsed_url["path"] # remove the leading slash in the path if file_name[0] == "/": file_name = file_name[1:] download_url = data_repository.download_url(file_name) # Instantiate the downloader object downloader = HTTPDownloader( progressbar=self.progressbar, chunk_size=self.chunk_size, **self.kwargs ) downloader(download_url, output_file, pooch) def doi_to_url(doi): """ Follow a DOI link to resolve the URL of the archive. Parameters ---------- doi : str The DOI of the archive. Returns ------- url : str The URL of the archive in the data repository. """ # Lazy import requests to speed up import time import requests # pylint: disable=C0415 # Use doi.org to resolve the DOI to the repository website. response = requests.get(f"https://doi.org/{doi}", timeout=DEFAULT_TIMEOUT) url = response.url if 400 <= response.status_code < 600: raise ValueError( f"Archive with doi:{doi} not found (see {url}). Is the DOI correct?" ) return url def doi_to_repository(doi): """ Instantiate a data repository instance from a given DOI. This function implements the chain of responsibility dispatch to the correct data repository class. Parameters ---------- doi : str The DOI of the archive. Returns ------- data_repository : DataRepository The data repository object """ # This should go away in a separate issue: DOI handling should # not rely on the (non-)existence of trailing slashes. The issue # is documented in https://github.com/fatiando/pooch/issues/324 if doi[-1] == "/": doi = doi[:-1] repositories = [ FigshareRepository, ZenodoRepository, DataverseRepository, ] # Extract the DOI and the repository information archive_url = doi_to_url(doi) # Try the converters one by one until one of them returned a URL data_repository = None for repo in repositories: if data_repository is None: data_repository = repo.initialize( archive_url=archive_url, doi=doi, ) if data_repository is None: repository = parse_url(archive_url)["netloc"] raise ValueError( f"Invalid data repository '{repository}'. " "To request or contribute support for this repository, " "please open an issue at https://github.com/fatiando/pooch/issues" ) return data_repository class DataRepository: # pylint: disable=too-few-public-methods, missing-class-docstring @classmethod def initialize(cls, doi, archive_url): # pylint: disable=unused-argument """ Initialize the data repository if the given URL points to a corresponding repository. Initializes a data repository object. This is done as part of a chain of responsibility. If the class cannot handle the given repository URL, it returns `None`. Otherwise a `DataRepository` instance is returned. Parameters ---------- doi : str The DOI that identifies the repository archive_url : str The resolved URL for the DOI """ return None # pragma: no cover def download_url(self, file_name): """ Use the repository API to get the download URL for a file given the archive URL. Parameters ---------- file_name : str The name of the file in the archive that will be downloaded. Returns ------- download_url : str The HTTP URL that can be used to download the file. """ raise NotImplementedError # pragma: no cover def populate_registry(self, pooch): """ Populate the registry using the data repository's API Parameters ---------- pooch : Pooch The pooch instance that the registry will be added to. """ raise NotImplementedError # pragma: no cover class ZenodoRepository(DataRepository): # pylint: disable=missing-class-docstring base_api_url = "https://zenodo.org/api/records" def __init__(self, doi, archive_url): self.archive_url = archive_url self.doi = doi self._api_response = None self._api_version = None @classmethod def initialize(cls, doi, archive_url): """ Initialize the data repository if the given URL points to a corresponding repository. Initializes a data repository object. This is done as part of a chain of responsibility. If the class cannot handle the given repository URL, it returns `None`. Otherwise a `DataRepository` instance is returned. Parameters ---------- doi : str The DOI that identifies the repository archive_url : str The resolved URL for the DOI """ # Check whether this is a Zenodo URL parsed_archive_url = parse_url(archive_url) if parsed_archive_url["netloc"] != "zenodo.org": return None return cls(doi, archive_url) @property def api_response(self): """Cached API response from Zenodo""" if self._api_response is None: # Lazy import requests to speed up import time import requests # pylint: disable=C0415 article_id = self.archive_url.split("/")[-1] self._api_response = requests.get( f"{self.base_api_url}/{article_id}", timeout=DEFAULT_TIMEOUT, ).json() return self._api_response @property def api_version(self): """ Version of the Zenodo API we are interacting with The versions can either be : - ``"legacy"``: corresponds to the Zenodo API that was supported until 2023-10-12 (before the migration to InvenioRDM). - ``"new"``: corresponds to the new API that went online on 2023-10-13 after the migration to InvenioRDM. The ``"new"`` API breaks backward compatibility with the ``"legacy"`` one and could probably be replaced by an updated version that restores the behaviour of the ``"legacy"`` one. Returns ------- str """ if self._api_version is None: if all("key" in file for file in self.api_response["files"]): self._api_version = "legacy" elif all("filename" in file for file in self.api_response["files"]): self._api_version = "new" else: raise ValueError( "Couldn't determine the version of the Zenodo API for " f"{self.archive_url} (doi:{self.doi})." ) return self._api_version def download_url(self, file_name): """ Use the repository API to get the download URL for a file given the archive URL. Parameters ---------- file_name : str The name of the file in the archive that will be downloaded. Returns ------- download_url : str The HTTP URL that can be used to download the file. Notes ----- After Zenodo migrated to InvenioRDM on Oct 2023, their API changed. The link to the desired files that appears in the API response leads to 404 errors (by 2023-10-17). The files are available in the following url: ``https://zenodo.org/records/{article_id}/files/{file_name}?download=1``. This method supports both the legacy and the new API. """ # Create list of files in the repository if self.api_version == "legacy": files = {item["key"]: item for item in self.api_response["files"]} else: files = [item["filename"] for item in self.api_response["files"]] # Check if file exists in the repository if file_name not in files: raise ValueError( f"File '{file_name}' not found in data archive " f"{self.archive_url} (doi:{self.doi})." ) # Build download url if self.api_version == "legacy": download_url = files[file_name]["links"]["self"] else: article_id = self.api_response["id"] download_url = ( f"https://zenodo.org/records/{article_id}/files/{file_name}?download=1" ) return download_url def populate_registry(self, pooch): """ Populate the registry using the data repository's API Parameters ---------- pooch : Pooch The pooch instance that the registry will be added to. Notes ----- After Zenodo migrated to InvenioRDM on Oct 2023, their API changed. The checksums for each file listed in the API reference is now an md5 sum. This method supports both the legacy and the new API. """ for filedata in self.api_response["files"]: checksum = filedata["checksum"] if self.api_version == "legacy": key = "key" else: key = "filename" checksum = f"md5:{checksum}" pooch.registry[filedata[key]] = checksum class FigshareRepository(DataRepository): # pylint: disable=missing-class-docstring def __init__(self, doi, archive_url): self.archive_url = archive_url self.doi = doi self._api_response = None @classmethod def initialize(cls, doi, archive_url): """ Initialize the data repository if the given URL points to a corresponding repository. Initializes a data repository object. This is done as part of a chain of responsibility. If the class cannot handle the given repository URL, it returns `None`. Otherwise a `DataRepository` instance is returned. Parameters ---------- doi : str The DOI that identifies the repository archive_url : str The resolved URL for the DOI """ # Check whether this is a Figshare URL parsed_archive_url = parse_url(archive_url) if parsed_archive_url["netloc"] != "figshare.com": return None return cls(doi, archive_url) def _parse_version_from_doi(self): """ Parse version from the doi Return None if version is not available in the doi. """ # Get suffix of the doi _, suffix = self.doi.split("/") # Split the suffix by dots and keep the last part last_part = suffix.split(".")[-1] # Parse the version from the last part if last_part[0] != "v": return None version = int(last_part[1:]) return version @property def api_response(self): """Cached API response from Figshare""" if self._api_response is None: # Lazy import requests to speed up import time import requests # pylint: disable=C0415 # Use the figshare API to find the article ID from the DOI article = requests.get( f"https://api.figshare.com/v2/articles?doi={self.doi}", timeout=DEFAULT_TIMEOUT, ).json()[0] article_id = article["id"] # Parse desired version from the doi version = self._parse_version_from_doi() # With the ID and version, we can get a list of files and their # download links if version is None: # Figshare returns the latest version available when no version # is specified through the DOI. warnings.warn( f"The Figshare DOI '{self.doi}' doesn't specify which version of " "the repository should be used. " "Figshare will point to the latest version available.", UserWarning, ) # Define API url using only the article id # (figshare will resolve the latest version) api_url = f"https://api.figshare.com/v2/articles/{article_id}" else: # Define API url using article id and the desired version # Get list of files using article id and the version api_url = ( "https://api.figshare.com/v2/articles/" f"{article_id}/versions/{version}" ) # Make the request and return the files in the figshare repository response = requests.get(api_url, timeout=DEFAULT_TIMEOUT) response.raise_for_status() self._api_response = response.json()["files"] return self._api_response def download_url(self, file_name): """ Use the repository API to get the download URL for a file given the archive URL. Parameters ---------- file_name : str The name of the file in the archive that will be downloaded. Returns ------- download_url : str The HTTP URL that can be used to download the file. """ files = {item["name"]: item for item in self.api_response} if file_name not in files: raise ValueError( f"File '{file_name}' not found in data archive {self.archive_url} (doi:{self.doi})." ) download_url = files[file_name]["download_url"] return download_url def populate_registry(self, pooch): """ Populate the registry using the data repository's API Parameters ---------- pooch : Pooch The pooch instance that the registry will be added to. """ for filedata in self.api_response: pooch.registry[filedata["name"]] = f"md5:{filedata['computed_md5']}" class DataverseRepository(DataRepository): # pylint: disable=missing-class-docstring def __init__(self, doi, archive_url): self.archive_url = archive_url self.doi = doi self._api_response = None @classmethod def initialize(cls, doi, archive_url): """ Initialize the data repository if the given URL points to a corresponding repository. Initializes a data repository object. This is done as part of a chain of responsibility. If the class cannot handle the given repository URL, it returns `None`. Otherwise a `DataRepository` instance is returned. Parameters ---------- doi : str The DOI that identifies the repository archive_url : str The resolved URL for the DOI """ # Access the DOI as if this was a DataVerse instance response = cls._get_api_response(doi, archive_url) # If we failed, this is probably not a DataVerse instance if 400 <= response.status_code < 600: return None # Initialize the repository and overwrite the api response repository = cls(doi, archive_url) repository.api_response = response return repository @classmethod def _get_api_response(cls, doi, archive_url): """ Perform the actual API request This has been separated into a separate ``classmethod``, as it can be used prior and after the initialization. """ # Lazy import requests to speed up import time import requests # pylint: disable=C0415 parsed = parse_url(archive_url) response = requests.get( f"{parsed['protocol']}://{parsed['netloc']}/api/datasets/" f":persistentId?persistentId=doi:{doi}", timeout=DEFAULT_TIMEOUT, ) return response @property def api_response(self): """Cached API response from a DataVerse instance""" if self._api_response is None: self._api_response = self._get_api_response( self.doi, self.archive_url ) # pragma: no cover return self._api_response @api_response.setter def api_response(self, response): """Update the cached API response""" self._api_response = response def download_url(self, file_name): """ Use the repository API to get the download URL for a file given the archive URL. Parameters ---------- file_name : str The name of the file in the archive that will be downloaded. Returns ------- download_url : str The HTTP URL that can be used to download the file. """ parsed = parse_url(self.archive_url) response = self.api_response.json() files = { file["dataFile"]["filename"]: file["dataFile"] for file in response["data"]["latestVersion"]["files"] } if file_name not in files: raise ValueError( f"File '{file_name}' not found in data archive " f"{self.archive_url} (doi:{self.doi})." ) # Generate download_url using the file id download_url = ( f"{parsed['protocol']}://{parsed['netloc']}/api/access/datafile/" f"{files[file_name]['id']}" ) return download_url def populate_registry(self, pooch): """ Populate the registry using the data repository's API Parameters ---------- pooch : Pooch The pooch instance that the registry will be added to. """ for filedata in self.api_response.json()["data"]["latestVersion"]["files"]: pooch.registry[filedata["dataFile"]["filename"]] = ( f"md5:{filedata['dataFile']['md5']}" ) pooch-1.8.2/pooch/hashes.py000066400000000000000000000152211463036314300155730ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Calculating and checking file hashes. """ import hashlib import functools from pathlib import Path # From the docs: https://docs.python.org/3/library/hashlib.html#hashlib.new # The named constructors are much faster than new() and should be # preferred. # Need to fallback on new() for some algorithms. ALGORITHMS_AVAILABLE = { alg: getattr(hashlib, alg, functools.partial(hashlib.new, alg)) for alg in hashlib.algorithms_available } try: import xxhash # xxhash doesn't have a list of available algorithms yet. # https://github.com/ifduyue/python-xxhash/issues/48 ALGORITHMS_AVAILABLE.update( { alg: getattr(xxhash, alg, None) for alg in ["xxh128", "xxh64", "xxh32", "xxh3_128", "xxh3_64"] } ) # The xxh3 algorithms are only available for version>=2.0. Set to None and # remove to ensure backwards compatibility. ALGORITHMS_AVAILABLE = { alg: func for alg, func in ALGORITHMS_AVAILABLE.items() if func is not None } except ImportError: pass def file_hash(fname, alg="sha256"): """ Calculate the hash of a given file. Useful for checking if a file has changed or been corrupted. Parameters ---------- fname : str The name of the file. alg : str The type of the hashing algorithm Returns ------- hash : str The hash of the file. Examples -------- >>> fname = "test-file-for-hash.txt" >>> with open(fname, "w") as f: ... __ = f.write("content of the file") >>> print(file_hash(fname)) 0fc74468e6a9a829f103d069aeb2bb4f8646bad58bf146bb0e3379b759ec4a00 >>> import os >>> os.remove(fname) """ if alg not in ALGORITHMS_AVAILABLE: raise ValueError( f"Algorithm '{alg}' not available to the pooch library. " "Only the following algorithms are available " f"{list(ALGORITHMS_AVAILABLE.keys())}." ) # Calculate the hash in chunks to avoid overloading the memory chunksize = 65536 hasher = ALGORITHMS_AVAILABLE[alg]() with open(fname, "rb") as fin: buff = fin.read(chunksize) while buff: hasher.update(buff) buff = fin.read(chunksize) return hasher.hexdigest() def hash_algorithm(hash_string): """ Parse the name of the hash method from the hash string. The hash string should have the following form ``algorithm:hash``, where algorithm can be the name of any algorithm known to :mod:`hashlib`. If the algorithm is omitted or the hash string is None, will default to ``"sha256"``. Parameters ---------- hash_string : str The hash string with optional algorithm prepended. Returns ------- hash_algorithm : str The name of the algorithm. Examples -------- >>> print(hash_algorithm("qouuwhwd2j192y1lb1iwgowdj2898wd2d9")) sha256 >>> print(hash_algorithm("md5:qouuwhwd2j192y1lb1iwgowdj2898wd2d9")) md5 >>> print(hash_algorithm("sha256:qouuwhwd2j192y1lb1iwgowdj2898wd2d9")) sha256 >>> print(hash_algorithm("SHA256:qouuwhwd2j192y1lb1iwgowdj2898wd2d9")) sha256 >>> print(hash_algorithm("xxh3_64:qouuwhwd2j192y1lb1iwgowdj2898wd2d9")) xxh3_64 >>> print(hash_algorithm(None)) sha256 """ default = "sha256" if hash_string is None: algorithm = default elif ":" not in hash_string: algorithm = default else: algorithm = hash_string.split(":")[0] return algorithm.lower() def hash_matches(fname, known_hash, strict=False, source=None): """ Check if the hash of a file matches a known hash. If the *known_hash* is None, will always return True. Coverts hashes to lowercase before comparison to avoid system specific mismatches between hashes in the registry and computed hashes. Parameters ---------- fname : str or PathLike The path to the file. known_hash : str The known hash. Optionally, prepend ``alg:`` to the hash to specify the hashing algorithm. Default is SHA256. strict : bool If True, will raise a :class:`ValueError` if the hash does not match informing the user that the file may be corrupted. source : str The source of the downloaded file (name or URL, for example). Will be used in the error message if *strict* is True. Has no other use other than reporting to the user where the file came from in case of hash mismatch. If None, will default to *fname*. Returns ------- is_same : bool True if the hash matches, False otherwise. """ if known_hash is None: return True algorithm = hash_algorithm(known_hash) new_hash = file_hash(fname, alg=algorithm) matches = new_hash.lower() == known_hash.split(":")[-1].lower() if strict and not matches: if source is None: source = str(fname) raise ValueError( f"{algorithm.upper()} hash of downloaded file ({source}) does not match" f" the known hash: expected {known_hash} but got {new_hash}. Deleted" " download for safety. The downloaded file may have been corrupted or" " the known hash may be outdated." ) return matches def make_registry(directory, output, recursive=True): """ Make a registry of files and hashes for the given directory. This is helpful if you have many files in your test dataset as it keeps you from needing to manually update the registry. Parameters ---------- directory : str Directory of the test data to put in the registry. All file names in the registry will be relative to this directory. output : str Name of the output registry file. recursive : bool If True, will recursively look for files in subdirectories of *directory*. """ directory = Path(directory) if recursive: pattern = "**/*" else: pattern = "*" files = sorted( str(path.relative_to(directory)) for path in directory.glob(pattern) if path.is_file() ) hashes = [file_hash(str(directory / fname)) for fname in files] with open(output, "w", encoding="utf-8") as outfile: for fname, fhash in zip(files, hashes): # Only use Unix separators for the registry so that we don't go # insane dealing with file paths. outfile.write("{} {}\n".format(fname.replace("\\", "/"), fhash)) pooch-1.8.2/pooch/processors.py000066400000000000000000000367641463036314300165410ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # # pylint: disable=line-too-long """ Post-processing hooks """ import abc import os import bz2 import gzip import lzma import shutil from zipfile import ZipFile from tarfile import TarFile from .utils import get_logger class ExtractorProcessor(abc.ABC): # pylint: disable=too-few-public-methods """ Abstract base class for extractions from compressed archives. Subclasses can be used with :meth:`pooch.Pooch.fetch` and :func:`pooch.retrieve` to unzip a downloaded data file into a folder in the local data store. :meth:`~pooch.Pooch.fetch` will return a list with the names of the extracted files instead of the archive. Parameters ---------- members : list or None If None, will unpack all files in the archive. Otherwise, *members* must be a list of file names to unpack from the archive. Only these files will be unpacked. extract_dir : str or None If None, files will be unpacked to the default location (a folder in the same location as the downloaded zip file, with a suffix added). Otherwise, files will be unpacked to ``extract_dir``, which is interpreted as a *relative path* (relative to the cache location provided by :func:`pooch.retrieve` or :meth:`pooch.Pooch.fetch`). """ def __init__(self, members=None, extract_dir=None): self.members = members self.extract_dir = extract_dir @property @abc.abstractmethod def suffix(self): """ String appended to unpacked archive folder name. Only used if extract_dir is None. MUST BE IMPLEMENTED BY CHILD CLASSES. """ @abc.abstractmethod def _all_members(self, fname): """ Return all the members in the archive. MUST BE IMPLEMENTED BY CHILD CLASSES. """ @abc.abstractmethod def _extract_file(self, fname, extract_dir): """ This method receives an argument for the archive to extract and the destination path. MUST BE IMPLEMENTED BY CHILD CLASSES. """ def __call__(self, fname, action, pooch): """ Extract all files from the given archive. Parameters ---------- fname : str Full path of the zipped file in local storage. action : str Indicates what action was taken by :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve`: * ``"download"``: File didn't exist locally and was downloaded * ``"update"``: Local file was outdated and was re-download * ``"fetch"``: File exists and is updated so it wasn't downloaded pooch : :class:`pooch.Pooch` The instance of :class:`pooch.Pooch` that is calling this. Returns ------- fnames : list of str A list of the full path to all files in the extracted archive. """ if self.extract_dir is None: self.extract_dir = fname + self.suffix else: archive_dir = fname.rsplit(os.path.sep, maxsplit=1)[0] self.extract_dir = os.path.join(archive_dir, self.extract_dir) # Get a list of everyone who is supposed to be in the unpacked folder # so we can check if they are all there or if we need to extract new # files. if self.members is None or not self.members: members = self._all_members(fname) else: members = self.members if ( (action in ("update", "download")) or (not os.path.exists(self.extract_dir)) or not all( os.path.exists(os.path.join(self.extract_dir, m)) for m in members ) ): # Make sure that the folder with the extracted files exists os.makedirs(self.extract_dir, exist_ok=True) self._extract_file(fname, self.extract_dir) # Get a list of all file names (including subdirectories) in our folder # of unzipped files, filtered by the given members list fnames = [] for path, _, files in os.walk(self.extract_dir): for filename in files: relpath = os.path.normpath( os.path.join(os.path.relpath(path, self.extract_dir), filename) ) if self.members is None or any( relpath.startswith(os.path.normpath(m)) for m in self.members ): fnames.append(os.path.join(path, filename)) return fnames class Unzip(ExtractorProcessor): # pylint: disable=too-few-public-methods """ Processor that unpacks a zip archive and returns a list of all files. Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to unzip a downloaded data file into a folder in the local data store. The method/function will return a list with the names of the unzipped files instead of the zip archive. The output folder is ``{fname}.unzip``. Parameters ---------- members : list or None If None, will unpack all files in the zip archive. Otherwise, *members* must be a list of file names to unpack from the archive. Only these files will be unpacked. extract_dir : str or None If None, files will be unpacked to the default location (a folder in the same location as the downloaded zip file, with the suffix ``.unzip`` added). Otherwise, files will be unpacked to ``extract_dir``, which is interpreted as a *relative path* (relative to the cache location provided by :func:`pooch.retrieve` or :meth:`pooch.Pooch.fetch`). """ @property def suffix(self): """ String appended to unpacked archive folder name. Only used if extract_dir is None. """ return ".unzip" def _all_members(self, fname): """Return all members from a given archive.""" with ZipFile(fname, "r") as zip_file: return zip_file.namelist() def _extract_file(self, fname, extract_dir): """ This method receives an argument for the archive to extract and the destination path. """ with ZipFile(fname, "r") as zip_file: if self.members is None: get_logger().info( "Unzipping contents of '%s' to '%s'", fname, extract_dir ) # Unpack all files from the archive into our new folder zip_file.extractall(path=extract_dir) else: for member in self.members: get_logger().info( "Extracting '%s' from '%s' to '%s'", member, fname, extract_dir ) # If the member is a dir, we need to get the names of the # elements it contains for extraction (ZipFile does not # support dirs on .extract). If it's not a dir, this will # only include the member itself. # Based on: # https://stackoverflow.com/questions/8008829/extract-only-a-single-directory-from-tar subdir_members = [ name for name in zip_file.namelist() if os.path.normpath(name).startswith(os.path.normpath(member)) ] # Extract the data file from within the archive zip_file.extractall(members=subdir_members, path=extract_dir) class Untar(ExtractorProcessor): # pylint: disable=too-few-public-methods """ Processor that unpacks a tar archive and returns a list of all files. Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to untar a downloaded data file into a folder in the local data store. The method/function will return a list with the names of the extracted files instead of the archive. The output folder is ``{fname}.untar``. Parameters ---------- members : list or None If None, will unpack all files in the archive. Otherwise, *members* must be a list of file names to unpack from the archive. Only these files will be unpacked. extract_dir : str or None If None, files will be unpacked to the default location (a folder in the same location as the downloaded tar file, with the suffix ``.untar`` added). Otherwise, files will be unpacked to ``extract_dir``, which is interpreted as a *relative path* (relative to the cache location provided by :func:`pooch.retrieve` or :meth:`pooch.Pooch.fetch`). """ @property def suffix(self): """ String appended to unpacked archive folder name. Only used if extract_dir is None. """ return ".untar" def _all_members(self, fname): """Return all members from a given archive.""" with TarFile.open(fname, "r") as tar_file: return [info.name for info in tar_file.getmembers()] def _extract_file(self, fname, extract_dir): """ This method receives an argument for the archive to extract and the destination path. """ with TarFile.open(fname, "r") as tar_file: if self.members is None: get_logger().info( "Untarring contents of '%s' to '%s'", fname, extract_dir ) # Unpack all files from the archive into our new folder tar_file.extractall(path=extract_dir) else: for member in self.members: get_logger().info( "Extracting '%s' from '%s' to '%s'", member, fname, extract_dir ) # If the member is a dir, we need to get the names of the # elements it contains for extraction (TarFile does not # support dirs on .extract). If it's not a dir, this will # only include the member itself. # Based on: # https://stackoverflow.com/questions/8008829/extract-only-a-single-directory-from-tar # Can't use .getnames because extractall expects TarInfo # objects. subdir_members = [ info for info in tar_file.getmembers() if os.path.normpath(info.name).startswith( os.path.normpath(member) ) ] # Extract the data file from within the archive tar_file.extractall(members=subdir_members, path=extract_dir) class Decompress: # pylint: disable=too-few-public-methods """ Processor that decompress a file and returns the decompressed version. Use with :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve` to decompress a downloaded data file so that it can be easily opened. Useful for data files that take a long time to decompress (exchanging disk space for speed). Supported decompression methods are LZMA (``.xz``), bzip2 (``.bz2``), and gzip (``.gz``). File names with the standard extensions (see above) can use ``method="auto"`` to automatically determine the compression method. This can be overwritten by setting the *method* argument. .. note:: To unpack zip and tar archives with one or more files, use :class:`pooch.Unzip` and :class:`pooch.Untar` instead. The output file is ``{fname}.decomp`` by default but it can be changed by setting the ``name`` parameter. .. warning:: Passing in ``name`` can cause existing data to be lost! For example, if a file already exists with the specified name it will be overwritten with the new decompressed file content. **Use this option with caution.** Parameters ---------- method : str Name of the compression method. Can be "auto", "lzma", "xz", "bzip2", or "gzip". name : None or str Defines the decompressed file name. The file name will be ``{fname}.decomp`` if ``None`` (default) or the given name otherwise. Note that the name should **not** include the full (or relative) path, it should be just the file name itself. """ modules = {"auto": None, "lzma": lzma, "xz": lzma, "gzip": gzip, "bzip2": bz2} extensions = {".xz": "lzma", ".gz": "gzip", ".bz2": "bzip2"} def __init__(self, method="auto", name=None): self.method = method self.name = name def __call__(self, fname, action, pooch): """ Decompress the given file. The output file will be either ``{fname}.decomp`` or the given *name* class attribute. Parameters ---------- fname : str Full path of the compressed file in local storage. action : str Indicates what action was taken by :meth:`pooch.Pooch.fetch` or :func:`pooch.retrieve`: - ``"download"``: File didn't exist locally and was downloaded - ``"update"``: Local file was outdated and was re-download - ``"fetch"``: File exists and is updated so it wasn't downloaded pooch : :class:`pooch.Pooch` The instance of :class:`pooch.Pooch` that is calling this. Returns ------- fname : str The full path to the decompressed file. """ if self.name is None: decompressed = fname + ".decomp" else: decompressed = os.path.join(os.path.dirname(fname), self.name) if action in ("update", "download") or not os.path.exists(decompressed): get_logger().info( "Decompressing '%s' to '%s' using method '%s'.", fname, decompressed, self.method, ) module = self._compression_module(fname) with open(decompressed, "w+b") as output: with module.open(fname) as compressed: shutil.copyfileobj(compressed, output) return decompressed def _compression_module(self, fname): """ Get the Python module compatible with fname and the chosen method. If the *method* attribute is "auto", will select a method based on the extension. If no recognized extension is in the file name, will raise a ValueError. """ error_archives = "To unpack zip/tar archives, use pooch.Unzip/Untar instead." if self.method not in self.modules: message = ( f"Invalid compression method '{self.method}'. " f"Must be one of '{list(self.modules.keys())}'." ) if self.method in {"zip", "tar"}: message = " ".join([message, error_archives]) raise ValueError(message) if self.method == "auto": ext = os.path.splitext(fname)[-1] if ext not in self.extensions: message = ( f"Unrecognized file extension '{ext}'. " f"Must be one of '{list(self.extensions.keys())}'." ) if ext in {".zip", ".tar"}: message = " ".join([message, error_archives]) raise ValueError(message) return self.modules[self.extensions[ext]] return self.modules[self.method] pooch-1.8.2/pooch/tests/000077500000000000000000000000001463036314300151075ustar00rootroot00000000000000pooch-1.8.2/pooch/tests/__init__.py000066400000000000000000000003411463036314300172160ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # pooch-1.8.2/pooch/tests/data/000077500000000000000000000000001463036314300160205ustar00rootroot00000000000000pooch-1.8.2/pooch/tests/data/large-data.txt000066400000000000000000003072751463036314300206000ustar00rootroot00000000000000# A larer data file for test purposes only 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 1 2 3 4 5 6 pooch-1.8.2/pooch/tests/data/registry-custom-url.txt000066400000000000000000000015061463036314300225430ustar00rootroot00000000000000subdir/tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d tiny-data.zip 0d49e94f07bc1866ec57e7fd1b93a351fba36842ec9b13dd50bf94e8dfa35cbb large-data.txt 98de171fb320da82982e6bf0f3994189fff4b42b23328769afce12bdd340444a store.zip 0498d2a001e71051bbd2acd2346f38da7cbd345a633cb7bf0f8a20938714b51a tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d https://some-site/tiny-data.txt tiny-data.tar.gz 41503f083814f43a01a8e9a30c28d7a9fe96839a99727a7fdd0acf7cd5bab63b store.tar.gz 088c7f4e0f1859b1c769bb6065de24376f366374817ede8691a6ac2e49f29511 tiny-data.txt.bz2 753663687a4040c90c8578061867d1df623e6aa8011c870a5dbd88ee3c82e306 tiny-data.txt.gz 2e2da6161291657617c32192dba95635706af80c6e7335750812907b58fd4b52 tiny-data.txt.xz 99dcb5c32a6e916344bacb4badcbc2f2b6ee196977d1d8187610c21e7e607765 pooch-1.8.2/pooch/tests/data/registry-invalid.txt000066400000000000000000000002101463036314300220460ustar00rootroot00000000000000tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d some-file.txt second_element third_element forth_element pooch-1.8.2/pooch/tests/data/registry-spaces.txt000066400000000000000000000002611463036314300217040ustar00rootroot00000000000000"file with spaces.txt" baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d other\ with\ spaces.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d pooch-1.8.2/pooch/tests/data/registry.txt000066400000000000000000000014501463036314300204310ustar00rootroot00000000000000subdir/tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d large-data.txt 98de171fb320da82982e6bf0f3994189fff4b42b23328769afce12bdd340444a tiny-data.zip 0d49e94f07bc1866ec57e7fd1b93a351fba36842ec9b13dd50bf94e8dfa35cbb store.zip 0498D2A001E71051BBD2ACD2346F38DA7CBD345A633CB7BF0F8A20938714B51A tiny-data.tar.gz 41503f083814f43a01a8e9a30c28d7a9fe96839a99727a7fdd0acf7cd5bab63b store.tar.gz 088c7f4e0f1859b1c769bb6065de24376f366374817ede8691a6ac2e49f29511 tiny-data.txt.bz2 753663687a4040c90c8578061867d1df623e6aa8011c870a5dbd88ee3c82e306 tiny-data.txt.gz 2e2da6161291657617c32192dba95635706af80c6e7335750812907b58fd4b52 tiny-data.txt.xz 99dcb5c32a6e916344bacb4badcbc2f2b6ee196977d1d8187610c21e7e607765 pooch-1.8.2/pooch/tests/data/registry_comments.txt000066400000000000000000000015271463036314300223430ustar00rootroot00000000000000# a comment subdir/tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d large-data.txt 98de171fb320da82982e6bf0f3994189fff4b42b23328769afce12bdd340444a tiny-data.zip 0d49e94f07bc1866ec57e7fd1b93a351fba36842ec9b13dd50bf94e8dfa35cbb # a comment with a starting space store.zip 0498d2a001e71051bbd2acd2346f38da7cbd345a633cb7bf0f8a20938714b51a tiny-data.tar.gz 41503f083814f43a01a8e9a30c28d7a9fe96839a99727a7fdd0acf7cd5bab63b store.tar.gz 088c7f4e0f1859b1c769bb6065de24376f366374817ede8691a6ac2e49f29511 tiny-data.txt.bz2 753663687a4040c90c8578061867d1df623e6aa8011c870a5dbd88ee3c82e306 tiny-data.txt.gz 2e2da6161291657617c32192dba95635706af80c6e7335750812907b58fd4b52 tiny-data.txt.xz 99dcb5c32a6e916344bacb4badcbc2f2b6ee196977d1d8187610c21e7e607765 pooch-1.8.2/pooch/tests/data/store.tar.gz000066400000000000000000000003631463036314300203050ustar00rootroot00000000000000͊0ᬽg^Jb$IA^afγɢ~棹bRqq}B*ipӎu1&@!~w|պKS]+o-n{X,27j`o97J;j??5;jvl>`J!bqf Ѐ,?ih^}p߁ODDDDDDDDDDDDD7V+(pooch-1.8.2/pooch/tests/data/store.zip000066400000000000000000000014141463036314300177000ustar00rootroot00000000000000PK Mstore/UT |{[|{[ux PK M store/subdir/UT |{[|{[ux PK Mm;;store/subdir/tiny-data.txtUT |{[|{[ux # A tiny data file for test purposes only 1 2 3 4 5 6 PK Mm;;store/tiny-data.txtUT |{[|{[ux # A tiny data file for test purposes only 1 2 3 4 5 6 PK MAstore/UT|{[ux PK M A@store/subdir/UT|{[ux PK Mm;;store/subdir/tiny-data.txtUT|{[ux PK Mm;;store/tiny-data.txtUT|{[ux PKXpooch-1.8.2/pooch/tests/data/store/000077500000000000000000000000001463036314300171545ustar00rootroot00000000000000pooch-1.8.2/pooch/tests/data/store/subdir/000077500000000000000000000000001463036314300204445ustar00rootroot00000000000000pooch-1.8.2/pooch/tests/data/store/subdir/tiny-data.txt000066400000000000000000000000731463036314300230770ustar00rootroot00000000000000# A tiny data file for test purposes only 1 2 3 4 5 6 pooch-1.8.2/pooch/tests/data/store/tiny-data.txt000066400000000000000000000000731463036314300216070ustar00rootroot00000000000000# A tiny data file for test purposes only 1 2 3 4 5 6 pooch-1.8.2/pooch/tests/data/tiny-data.tar.gz000066400000000000000000000002601463036314300210370ustar00rootroot00000000000000x\һ @J6M\QD4oo`Df9a^2) BH:(pooch-1.8.2/pooch/tests/data/tiny-data.txt000066400000000000000000000000731463036314300204530ustar00rootroot00000000000000# A tiny data file for test purposes only 1 2 3 4 5 6 pooch-1.8.2/pooch/tests/data/tiny-data.txt.bz2000066400000000000000000000001331463036314300211440ustar00rootroot00000000000000BZh91AY&SY-B ]H? '% TPhѠ (m"YD5LRg 0"Æ w$S $.pooch-1.8.2/pooch/tests/data/tiny-data.txt.gz000066400000000000000000000001331463036314300210670ustar00rootroot00000000000000|\tiny-data.txtK @xcEhA9Hx/{&3jbho;p Dxm;pooch-1.8.2/pooch/tests/data/tiny-data.txt.xz000066400000000000000000000001641463036314300211140ustar00rootroot000000000000007zXZִF!t/:# A tiny data file for test purposes only 1 2 3 4 5 6 W{Y S;R}YZpooch-1.8.2/pooch/tests/data/tiny-data.zip000066400000000000000000000003531463036314300204370ustar00rootroot00000000000000PK nLm;; tiny-data.txtUT 5iV[5iV[ux # A tiny data file for test purposes only 1 2 3 4 5 6 PK nLm;; tiny-data.txtUT5iV[ux PKSpooch-1.8.2/pooch/tests/test_core.py000066400000000000000000000634631463036314300174640ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # # pylint: disable=redefined-outer-name """ Test the core class and factory function. """ import hashlib import os from pathlib import Path from tempfile import TemporaryDirectory import pytest from ..core import create, Pooch, retrieve, download_action, stream_download from ..utils import get_logger, temporary_file, os_cache from ..hashes import file_hash, hash_matches # Import the core module so that we can monkeypatch some functions from .. import core from ..downloaders import HTTPDownloader, FTPDownloader from .utils import ( pooch_test_url, data_over_ftp, pooch_test_figshare_url, pooch_test_zenodo_url, pooch_test_zenodo_with_slash_url, pooch_test_dataverse_url, pooch_test_registry, check_tiny_data, check_large_data, capture_log, mirror_directory, ) DATA_DIR = str(Path(__file__).parent / "data") REGISTRY = pooch_test_registry() BASEURL = pooch_test_url() FIGSHAREURL = pooch_test_figshare_url() ZENODOURL = pooch_test_zenodo_url() ZENODOURL_W_SLASH = pooch_test_zenodo_with_slash_url() DATAVERSEURL = pooch_test_dataverse_url() REGISTRY_CORRUPTED = { # The same data file but I changed the hash manually to a wrong one "tiny-data.txt": "098h0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d" } @pytest.fixture def data_dir_mirror(tmp_path): """ Mirror the test data folder on a temporary directory. Needed to avoid permission errors when pooch is installed on a non-writable path. """ return mirror_directory(DATA_DIR, tmp_path) @pytest.mark.network def test_retrieve(): "Try downloading some data with retrieve" with TemporaryDirectory() as local_store: data_file = "tiny-data.txt" url = BASEURL + data_file # Check that the logs say that the file is being downloaded with capture_log() as log_file: fname = retrieve(url, known_hash=None, path=local_store) logs = log_file.getvalue() assert logs.split()[0] == "Downloading" assert "SHA256 hash of downloaded file:" in logs assert REGISTRY[data_file] in logs # Check that the downloaded file has the right content assert data_file == fname[-len(data_file) :] check_tiny_data(fname) assert file_hash(fname) == REGISTRY[data_file] # Check that no logging happens when not downloading with capture_log() as log_file: fname = retrieve(url, known_hash=None, path=local_store) assert log_file.getvalue() == "" with capture_log() as log_file: fname = retrieve(url, known_hash=REGISTRY[data_file], path=local_store) assert log_file.getvalue() == "" @pytest.mark.network def test_retrieve_fname(): "Try downloading some data with retrieve and setting the file name" with TemporaryDirectory() as local_store: data_file = "tiny-data.txt" url = BASEURL + data_file # Check that the logs say that the file is being downloaded with capture_log() as log_file: fname = retrieve(url, known_hash=None, path=local_store, fname=data_file) logs = log_file.getvalue() assert logs.split()[0] == "Downloading" assert "SHA256 hash of downloaded file:" in logs assert REGISTRY[data_file] in logs # Check that the downloaded file has the right name and content assert data_file == os.path.split(fname)[1] check_tiny_data(fname) assert file_hash(fname) == REGISTRY[data_file] @pytest.mark.network def test_retrieve_default_path(): "Try downloading some data with retrieve to the default cache location" data_file = "tiny-data.txt" url = BASEURL + data_file expected_location = os_cache("pooch") / data_file try: # Check that the logs say that the file is being downloaded with capture_log() as log_file: fname = retrieve(url, known_hash=None, fname=data_file) logs = log_file.getvalue() assert logs.split()[0] == "Downloading" assert str(os_cache("pooch").resolve()) in logs assert "SHA256 hash of downloaded file" in logs assert REGISTRY[data_file] in logs # Check that the downloaded file has the right content assert fname == str(expected_location.resolve()) check_tiny_data(fname) assert file_hash(fname) == REGISTRY[data_file] finally: if os.path.exists(str(expected_location)): os.remove(str(expected_location)) def test_pooch_local(data_dir_mirror): "Setup a pooch that already has the local data and test the fetch." pup = Pooch(path=data_dir_mirror, base_url="some bogus URL", registry=REGISTRY) true = str(data_dir_mirror / "tiny-data.txt") fname = pup.fetch("tiny-data.txt") assert true == fname check_tiny_data(fname) @pytest.mark.network @pytest.mark.parametrize( "url", [BASEURL, FIGSHAREURL, ZENODOURL, DATAVERSEURL], ids=["https", "figshare", "zenodo", "dataverse"], ) def test_pooch_custom_url(url): "Have pooch download the file from URL that is not base_url" with TemporaryDirectory() as local_store: path = Path(local_store) urls = {"tiny-data.txt": url + "tiny-data.txt"} # Setup a pooch in a temp dir pup = Pooch(path=path, base_url="", registry=REGISTRY, urls=urls) # Check that the logs say that the file is being downloaded with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") logs = log_file.getvalue() assert logs.split()[0] == "Downloading" assert logs.split()[-1] == f"'{path}'." check_tiny_data(fname) # Check that no logging happens when there are no events with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") assert log_file.getvalue() == "" @pytest.mark.network @pytest.mark.parametrize( "url", [BASEURL, FIGSHAREURL, ZENODOURL, DATAVERSEURL], ids=["https", "figshare", "zenodo", "dataverse"], ) def test_pooch_download(url): "Setup a pooch that has no local data and needs to download" with TemporaryDirectory() as local_store: path = Path(local_store) true_path = str(path / "tiny-data.txt") # Setup a pooch in a temp dir pup = Pooch(path=path, base_url=url, registry=REGISTRY) # Check that the logs say that the file is being downloaded with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") logs = log_file.getvalue() assert logs.split()[0] == "Downloading" assert logs.split()[-1] == f"'{path}'." # Check that the downloaded file has the right content assert true_path == fname check_tiny_data(fname) assert file_hash(fname) == REGISTRY["tiny-data.txt"] # Check that no logging happens when not downloading with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") assert log_file.getvalue() == "" class FakeHashMatches: # pylint: disable=too-few-public-methods "Create a fake version of hash_matches that fails n times" def __init__(self, nfailures): self.nfailures = nfailures self.failed = 0 def hash_matches(self, *args, **kwargs): "Fail n times before finally passing" if self.failed < self.nfailures: self.failed += 1 # Give it an invalid hash to force a failure return hash_matches(args[0], "bla", **kwargs) return hash_matches(*args, **kwargs) @pytest.mark.network def test_pooch_download_retry_off_by_default(monkeypatch): "Check that retrying the download is off by default" with TemporaryDirectory() as local_store: monkeypatch.setattr(core, "hash_matches", FakeHashMatches(3).hash_matches) # Setup a pooch without download retrying path = Path(local_store) pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY) # Make sure it fails with no retries with pytest.raises(ValueError) as error: with capture_log() as log_file: pup.fetch("tiny-data.txt") assert "does not match the known hash" in str(error) # Check that the log doesn't have the download retry message logs = log_file.getvalue().strip().split("\n") assert len(logs) == 1 assert logs[0].startswith("Downloading") assert logs[0].endswith(f"'{path}'.") class FakeSleep: # pylint: disable=too-few-public-methods "Create a fake version of sleep that logs the specified times" def __init__(self): self.times = [] def sleep(self, secs): "Store the time and doesn't sleep" self.times.append(secs) @pytest.mark.network def test_pooch_download_retry(monkeypatch): "Check that retrying the download works if the hash is different" with TemporaryDirectory() as local_store: monkeypatch.setattr(core, "hash_matches", FakeHashMatches(11).hash_matches) fakesleep = FakeSleep() monkeypatch.setattr(core.time, "sleep", fakesleep.sleep) # Setup a pooch with download retrying path = Path(local_store) true_path = str(path / "tiny-data.txt") retries = 11 pup = Pooch( path=path, base_url=BASEURL, registry=REGISTRY, retry_if_failed=retries ) # Check that the logs say that the download failed n times with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") logs = log_file.getvalue().strip().split("\n") assert len(logs) == 1 + retries assert logs[0].startswith("Downloading") assert logs[0].endswith(f"'{path}'.") for i, line in zip(range(retries, 0, -1), logs[1:]): assert "Failed to download" in line plural = "s" if i > 1 else "" assert f"download again {i} more time{plural}." in line # Check that the sleep time increases but stops at 10s assert fakesleep.times == [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10] # Check that the downloaded file has the right content assert true_path == fname check_tiny_data(fname) assert file_hash(fname) == REGISTRY["tiny-data.txt"] @pytest.mark.network def test_pooch_download_retry_fails_eventually(monkeypatch): "Check that retrying the download fails after the set amount of retries" with TemporaryDirectory() as local_store: monkeypatch.setattr(core, "hash_matches", FakeHashMatches(3).hash_matches) # Setup a pooch with insufficient retry attempts path = Path(local_store) pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY, retry_if_failed=1) # Make sure it fails with no retries with pytest.raises(ValueError) as error: # Check that the logs say that the download failed n times with capture_log() as log_file: pup.fetch("tiny-data.txt") logs = log_file.getvalue().strip().split("\n") assert len(logs) == 2 assert logs[0].startswith("Downloading") assert logs[0].endswith(f"'{path}'.") assert "Failed to download" in logs[1] assert "download again 1 more time." in logs[1] assert "does not match the known hash" in str(error) @pytest.mark.network def test_pooch_logging_level(): "Setup a pooch and check that no logging happens when the level is raised" with TemporaryDirectory() as local_store: path = Path(local_store) urls = {"tiny-data.txt": BASEURL + "tiny-data.txt"} # Setup a pooch in a temp dir pup = Pooch(path=path, base_url="", registry=REGISTRY, urls=urls) # Capture only critical logging events with capture_log("CRITICAL") as log_file: fname = pup.fetch("tiny-data.txt") assert log_file.getvalue() == "" check_tiny_data(fname) @pytest.mark.network def test_pooch_update(): "Setup a pooch that already has the local data but the file is outdated" with TemporaryDirectory() as local_store: path = Path(local_store) # Create a dummy version of tiny-data.txt that is different from the # one in the remote storage true_path = str(path / "tiny-data.txt") with open(true_path, "w", encoding="utf-8") as fin: fin.write("different data") # Setup a pooch in a temp dir pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY) # Check that the logs say that the file is being updated with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") logs = log_file.getvalue() assert logs.split()[0] == "Updating" assert logs.split()[-1] == f"'{path}'." # Check that the updated file has the right content assert true_path == fname check_tiny_data(fname) assert file_hash(fname) == REGISTRY["tiny-data.txt"] # Check that no logging happens when not downloading with capture_log() as log_file: fname = pup.fetch("tiny-data.txt") assert log_file.getvalue() == "" def test_pooch_update_disallowed(): "Test that disallowing updates works." with TemporaryDirectory() as local_store: path = Path(local_store) # Create a dummy version of tiny-data.txt that is different from the # one in the remote storage true_path = str(path / "tiny-data.txt") with open(true_path, "w", encoding="utf-8") as fin: fin.write("different data") # Setup a pooch in a temp dir pup = Pooch( path=path, base_url=BASEURL, registry=REGISTRY, allow_updates=False, ) with pytest.raises(ValueError): pup.fetch("tiny-data.txt") def test_pooch_update_disallowed_environment(): "Test that disallowing updates works through an environment variable." variable_name = "MYPROJECT_DISALLOW_UPDATES" try: os.environ[variable_name] = "False" with TemporaryDirectory() as local_store: path = Path(local_store) # Create a dummy version of tiny-data.txt that is different from # the one in the remote storage true_path = str(path / "tiny-data.txt") with open(true_path, "w", encoding="utf-8") as fin: fin.write("different data") # Setup a pooch in a temp dir pup = create( path=path, base_url=BASEURL, registry=REGISTRY, allow_updates=variable_name, ) with pytest.raises(ValueError): pup.fetch("tiny-data.txt") finally: os.environ.pop(variable_name) def test_pooch_create_base_url_no_trailing_slash(): """ Test if pooch.create appends a trailing slash to the base url if missing """ base_url = "https://mybase.url" pup = create(base_url=base_url, registry=None, path=DATA_DIR) assert pup.base_url == base_url + "/" @pytest.mark.network def test_pooch_corrupted(data_dir_mirror): "Raise an exception if the file hash doesn't match the registry" # Test the case where the file wasn't in the directory with TemporaryDirectory() as local_store: path = os.path.abspath(local_store) pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY_CORRUPTED) with capture_log() as log_file: with pytest.raises(ValueError) as error: pup.fetch("tiny-data.txt") assert "(tiny-data.txt)" in str(error.value) logs = log_file.getvalue() assert logs.split()[0] == "Downloading" assert logs.split()[-1] == f"'{path}'." # and the case where the file exists but hash doesn't match pup = Pooch(path=data_dir_mirror, base_url=BASEURL, registry=REGISTRY_CORRUPTED) with capture_log() as log_file: with pytest.raises(ValueError) as error: pup.fetch("tiny-data.txt") assert "(tiny-data.txt)" in str(error.value) logs = log_file.getvalue() assert logs.split()[0] == "Updating" assert logs.split()[-1] == f"'{data_dir_mirror}'." def test_pooch_file_not_in_registry(): "Should raise an exception if the file is not in the registry." pup = Pooch( path="it shouldn't matter", base_url="this shouldn't either", registry=REGISTRY ) with pytest.raises(ValueError): pup.fetch("this-file-does-not-exit.csv") def test_pooch_load_registry(): "Loading the registry from a file should work" pup = Pooch(path="", base_url="") pup.load_registry(os.path.join(DATA_DIR, "registry.txt")) assert pup.registry == REGISTRY assert pup.registry_files.sort() == list(REGISTRY).sort() def test_pooch_load_registry_comments(): "Loading the registry from a file and strip line comments" pup = Pooch(path="", base_url="") pup.load_registry(os.path.join(DATA_DIR, "registry_comments.txt")) assert pup.registry == REGISTRY assert pup.registry_files.sort() == list(REGISTRY).sort() def test_pooch_load_registry_fileobj(): "Loading the registry from a file object" path = os.path.join(DATA_DIR, "registry.txt") # Binary mode pup = Pooch(path="", base_url="") with open(path, "rb") as fin: pup.load_registry(fin) assert pup.registry == REGISTRY assert pup.registry_files.sort() == list(REGISTRY).sort() # Text mode pup = Pooch(path="", base_url="") with open(path, "r", encoding="utf-8") as fin: pup.load_registry(fin) assert pup.registry == REGISTRY assert pup.registry_files.sort() == list(REGISTRY).sort() def test_pooch_load_registry_custom_url(): "Load the registry from a file with a custom URL inserted" pup = Pooch(path="", base_url="") pup.load_registry(os.path.join(DATA_DIR, "registry-custom-url.txt")) assert pup.registry == REGISTRY assert pup.urls == {"tiny-data.txt": "https://some-site/tiny-data.txt"} def test_pooch_load_registry_invalid_line(): "Should raise an exception when a line doesn't have two elements" pup = Pooch(path="", base_url="", registry={}) with pytest.raises(IOError): pup.load_registry(os.path.join(DATA_DIR, "registry-invalid.txt")) def test_pooch_load_registry_with_spaces(): "Should check that spaces in filenames are allowed in registry files" pup = Pooch(path="", base_url="") pup.load_registry(os.path.join(DATA_DIR, "registry-spaces.txt")) assert "file with spaces.txt" in pup.registry assert "other with spaces.txt" in pup.registry @pytest.mark.network def test_check_availability(): "Should correctly check availability of existing and non existing files" # Check available remote file pup = Pooch(path=DATA_DIR, base_url=BASEURL, registry=REGISTRY) assert pup.is_available("tiny-data.txt") # Check non available remote file pup = Pooch(path=DATA_DIR, base_url=BASEURL + "wrong-url/", registry=REGISTRY) assert not pup.is_available("tiny-data.txt") # Wrong file name registry = {"not-a-real-data-file.txt": "notarealhash"} registry.update(REGISTRY) pup = Pooch(path=DATA_DIR, base_url=BASEURL, registry=registry) assert not pup.is_available("not-a-real-data-file.txt") def test_check_availability_on_ftp(ftpserver): "Should correctly check availability of existing and non existing files" with data_over_ftp(ftpserver, "tiny-data.txt") as url: # Check available remote file on FTP server pup = Pooch( path=DATA_DIR, base_url=url.replace("tiny-data.txt", ""), registry={ "tiny-data.txt": "baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d", "doesnot_exist.zip": "jdjdjdjdflld", }, ) downloader = FTPDownloader(port=ftpserver.server_port) assert pup.is_available("tiny-data.txt", downloader=downloader) # Check non available remote file assert not pup.is_available("doesnot_exist.zip", downloader=downloader) def test_check_availability_invalid_downloader(): "Should raise an exception if the downloader doesn't support this" def downloader(url, output, pooch): # pylint: disable=unused-argument "A downloader that doesn't support check_only" return None pup = Pooch(path=DATA_DIR, base_url=BASEURL, registry=REGISTRY) msg = "does not support availability checks." with pytest.raises(NotImplementedError, match=msg): pup.is_available("tiny-data.txt", downloader=downloader) @pytest.mark.network def test_fetch_with_downloader(capsys): "Setup a downloader function for fetch" def download(url, output_file, pup): # pylint: disable=unused-argument "Download through HTTP and warn that we're doing it" get_logger().info("downloader executed") HTTPDownloader()(url, output_file, pup) with TemporaryDirectory() as local_store: path = Path(local_store) # Setup a pooch in a temp dir pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY) # Check that the logs say that the file is being downloaded with capture_log() as log_file: fname = pup.fetch("large-data.txt", downloader=download) logs = log_file.getvalue() lines = logs.splitlines() assert len(lines) == 2 assert lines[0].split()[0] == "Downloading" assert lines[1] == "downloader executed" # Read stderr and make sure no progress bar was printed by default assert not capsys.readouterr().err # Check that the downloaded file has the right content check_large_data(fname) # Check that no logging happens when not downloading with capture_log() as log_file: fname = pup.fetch("large-data.txt") assert log_file.getvalue() == "" def test_invalid_hash_alg(data_dir_mirror): "Test an invalid hashing algorithm" pup = Pooch( path=data_dir_mirror, base_url=BASEURL, registry={"tiny-data.txt": "blah:1234"} ) with pytest.raises(ValueError) as exc: pup.fetch("tiny-data.txt") assert "'blah'" in str(exc.value) def test_alternative_hashing_algorithms(data_dir_mirror): "Test different hashing algorithms using local data" fname = str(data_dir_mirror / "tiny-data.txt") check_tiny_data(fname) with open(fname, "rb") as fin: data = fin.read() for alg in ("sha512", "md5"): hasher = hashlib.new(alg) hasher.update(data) registry = {"tiny-data.txt": f"{alg}:{hasher.hexdigest()}"} pup = Pooch(path=data_dir_mirror, base_url="some bogus URL", registry=registry) assert fname == pup.fetch("tiny-data.txt") check_tiny_data(fname) def test_download_action(): "Test that the right action is performed based on file existing" action, verb = download_action( Path("this_file_does_not_exist.txt"), known_hash=None ) assert action == "download" assert verb == "Downloading" with temporary_file() as tmp: action, verb = download_action(Path(tmp), known_hash="not the correct hash") assert action == "update" assert verb == "Updating" with temporary_file() as tmp: with open(tmp, "w", encoding="utf-8") as output: output.write("some data") action, verb = download_action(Path(tmp), known_hash=file_hash(tmp)) assert action == "fetch" assert verb == "Fetching" @pytest.mark.network @pytest.mark.parametrize("fname", ["tiny-data.txt", "subdir/tiny-data.txt"]) def test_stream_download(fname): "Check that downloading a file over HTTP works as expected" # Use the data in store/ because the subdir is in there for some reason url = BASEURL + "store/" + fname known_hash = REGISTRY[fname] downloader = HTTPDownloader() with TemporaryDirectory() as local_store: destination = Path(local_store) / fname assert not destination.exists() stream_download(url, destination, known_hash, downloader, pooch=None) assert destination.exists() check_tiny_data(str(destination)) @pytest.mark.network @pytest.mark.parametrize( "url", [FIGSHAREURL, ZENODOURL, DATAVERSEURL], ids=["figshare", "zenodo", "dataverse"], ) def test_load_registry_from_doi(url): """Check that the registry is correctly populated from the API""" with TemporaryDirectory() as local_store: path = os.path.abspath(local_store) pup = Pooch(path=path, base_url=url) pup.load_registry_from_doi() # Check the existence of all files in the registry assert len(pup.registry) == 2 assert "tiny-data.txt" in pup.registry assert "store.zip" in pup.registry # Ensure that all files have correct checksums by fetching them for filename in pup.registry: pup.fetch(filename) @pytest.mark.network def test_load_registry_from_doi_zenodo_with_slash(): """ Check that the registry is correctly populated from the Zenodo API when the filename contains a slash """ url = ZENODOURL_W_SLASH with TemporaryDirectory() as local_store: path = os.path.abspath(local_store) pup = Pooch(path=path, base_url=url) pup.load_registry_from_doi() # Check the existence of all files in the registry assert len(pup.registry) == 1 assert "santisoler/pooch-test-data-v1.zip" in pup.registry # Ensure that all files have correct checksums by fetching them for filename in pup.registry: pup.fetch(filename) def test_wrong_load_registry_from_doi(): """Check that non-DOI URLs produce an error""" pup = Pooch(path="", base_url=BASEURL) with pytest.raises(ValueError) as exc: pup.load_registry_from_doi() assert "only implemented for DOIs" in str(exc.value) pooch-1.8.2/pooch/tests/test_downloaders.py000066400000000000000000000452151463036314300210500ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Test the downloader classes and functions separately from the Pooch core. """ import os import sys from tempfile import TemporaryDirectory import pytest try: import tqdm except ImportError: tqdm = None try: import paramiko except ImportError: paramiko = None from .. import Pooch from ..downloaders import ( HTTPDownloader, FTPDownloader, SFTPDownloader, DOIDownloader, choose_downloader, FigshareRepository, ZenodoRepository, DataverseRepository, doi_to_url, ) from ..processors import Unzip from .utils import ( pooch_test_url, check_large_data, check_tiny_data, data_over_ftp, pooch_test_figshare_url, pooch_test_zenodo_url, pooch_test_zenodo_with_slash_url, pooch_test_dataverse_url, ) BASEURL = pooch_test_url() FIGSHAREURL = pooch_test_figshare_url() ZENODOURL = pooch_test_zenodo_url() ZENODOURL_W_SLASH = pooch_test_zenodo_with_slash_url() DATAVERSEURL = pooch_test_dataverse_url() @pytest.mark.skipif(tqdm is None, reason="requires tqdm") @pytest.mark.parametrize( "url", [ BASEURL + "tiny-data.txt", # HTTPDownloader FIGSHAREURL, # DOIDownloader ], ) def test_progressbar_kwarg_passed(url): """The progressbar keyword argument must pass through choose_downloader""" downloader = choose_downloader(url, progressbar=True) assert downloader.progressbar is True @pytest.mark.skipif(paramiko is None, reason="requires paramiko") def test_progressbar_kwarg_passed_sftp(): """The progressbar keyword argument must pass through choose_downloader""" url = "sftp://test.rebex.net/pub/example/pocketftp.png" downloader = choose_downloader(url, progressbar=True) assert downloader.progressbar is True def test_unsupported_protocol(): "Should raise ValueError when protocol is not supported" with pytest.raises(ValueError): choose_downloader("httpup://some-invalid-url.com") # Simulate the DOI format with pytest.raises(ValueError): choose_downloader("doii:XXX/XXX/file") @pytest.mark.network def test_invalid_doi_repository(): "Should fail if data repository is not supported" with pytest.raises(ValueError) as exc: # Use the DOI of the Pooch paper in JOSS (not a data repository) DOIDownloader()( url="doi:10.21105/joss.01943/file_name.txt", output_file=None, pooch=None ) assert "Invalid data repository 'joss.theoj.org'" in str(exc.value) @pytest.mark.network def test_doi_url_not_found(): "Should fail if the DOI is not found" with pytest.raises(ValueError) as exc: doi_to_url(doi="NOTAREALDOI") assert "Is the DOI correct?" in str(exc.value) @pytest.mark.network @pytest.mark.parametrize( "repository,doi", [ (FigshareRepository, "10.6084/m9.figshare.14763051.v1"), (ZenodoRepository, "10.5281/zenodo.4924875"), (DataverseRepository, "10.11588/data/TKCFEF"), ], ids=["figshare", "zenodo", "dataverse"], ) def test_figshare_url_file_not_found(repository, doi): "Should fail if the file is not found in the archive" with pytest.raises(ValueError) as exc: url = doi_to_url(doi) repo = repository.initialize(doi, url) repo.download_url(file_name="bla.txt") assert "File 'bla.txt' not found" in str(exc.value) @pytest.mark.network @pytest.mark.parametrize( "url", [FIGSHAREURL, ZENODOURL, DATAVERSEURL], ids=["figshare", "zenodo", "dataverse"], ) def test_doi_downloader(url): "Test the DOI downloader" # Use the test data we have on the repository with TemporaryDirectory() as local_store: downloader = DOIDownloader() outfile = os.path.join(local_store, "tiny-data.txt") downloader(url + "tiny-data.txt", outfile, None) check_tiny_data(outfile) @pytest.mark.network def test_zenodo_downloader_with_slash_in_fname(): """ Test the Zenodo downloader when the path contains a forward slash Related to issue #336 """ # Use the test data we have on the repository with TemporaryDirectory() as local_store: base_url = ZENODOURL_W_SLASH + "santisoler/pooch-test-data-v1.zip" downloader = DOIDownloader() outfile = os.path.join(local_store, "test-data.zip") downloader(base_url, outfile, None) # unpack the downloaded zip file so we can check the integrity of # tiny-data.txt fnames = Unzip()(outfile, action="download", pooch=None) (fname,) = [f for f in fnames if "tiny-data.txt" in f] check_tiny_data(fname) @pytest.mark.network def test_figshare_unspecified_version(): """ Test if passing a Figshare url without a version warns about it, but still downloads it. """ url = FIGSHAREURL # Remove the last bits of the doi, where the version is specified and url = url[: url.rindex(".")] + "/" # Create expected warning message doi = url[4:-1] warning_msg = f"The Figshare DOI '{doi}' doesn't specify which version of " with TemporaryDirectory() as local_store: downloader = DOIDownloader() outfile = os.path.join(local_store, "tiny-data.txt") with pytest.warns(UserWarning, match=warning_msg): downloader(url + "tiny-data.txt", outfile, None) @pytest.mark.network @pytest.mark.parametrize( "version, missing, present", [ ( 1, "LC08_L2SP_218074_20190114_20200829_02_T1-cropped.tar.gz", "cropped-before.tar.gz", ), ( 2, "cropped-before.tar.gz", "LC08_L2SP_218074_20190114_20200829_02_T1-cropped.tar.gz", ), ], ) def test_figshare_data_repository_versions(version, missing, present): """ Test if setting the version in Figshare DOI works as expected """ # Use a Figshare repo as example (we won't download files from it since # they are too big) doi = f"10.6084/m9.figshare.21665630.v{version}" url = f"https://doi.org/{doi}/" figshare = FigshareRepository(doi, url) filenames = [item["name"] for item in figshare.api_response] assert present in filenames assert missing not in filenames @pytest.mark.network def test_ftp_downloader(ftpserver): "Test ftp downloader" with data_over_ftp(ftpserver, "tiny-data.txt") as url: with TemporaryDirectory() as local_store: downloader = FTPDownloader(port=ftpserver.server_port) outfile = os.path.join(local_store, "tiny-data.txt") downloader(url, outfile, None) check_tiny_data(outfile) @pytest.mark.network @pytest.mark.skipif(paramiko is None, reason="requires paramiko to run SFTP") def test_sftp_downloader(): "Test sftp downloader" with TemporaryDirectory() as local_store: downloader = SFTPDownloader(username="demo", password="password") url = "sftp://test.rebex.net/pub/example/pocketftp.png" outfile = os.path.join(local_store, "pocketftp.png") downloader(url, outfile, None) assert os.path.exists(outfile) @pytest.mark.network @pytest.mark.skipif(paramiko is None, reason="requires paramiko to run SFTP") def test_sftp_downloader_fail_if_file_object(): "Downloader should fail when a file object rather than string is passed" with TemporaryDirectory() as local_store: downloader = SFTPDownloader(username="demo", password="password") url = "sftp://test.rebex.net/pub/example/pocketftp.png" outfile = os.path.join(local_store, "pocketftp.png") with open(outfile, "wb") as outfile_obj: with pytest.raises(TypeError): downloader(url, outfile_obj, None) @pytest.mark.skipif(paramiko is not None, reason="paramiko must be missing") def test_sftp_downloader_fail_if_paramiko_missing(): "test must fail if paramiko is not installed" with pytest.raises(ValueError) as exc: SFTPDownloader() assert "'paramiko'" in str(exc.value) @pytest.mark.skipif(tqdm is not None, reason="tqdm must be missing") @pytest.mark.parametrize("downloader", [HTTPDownloader, FTPDownloader, SFTPDownloader]) def test_downloader_progressbar_fails(downloader): "Make sure an error is raised if trying to use progressbar without tqdm" with pytest.raises(ValueError) as exc: downloader(progressbar=True) assert "'tqdm'" in str(exc.value) @pytest.mark.network @pytest.mark.skipif(tqdm is None, reason="requires tqdm") @pytest.mark.parametrize( "url,downloader", [(BASEURL, HTTPDownloader), (FIGSHAREURL, DOIDownloader)], ids=["http", "figshare"], ) def test_downloader_progressbar(url, downloader, capsys): "Setup a downloader function that prints a progress bar for fetch" download = downloader(progressbar=True) with TemporaryDirectory() as local_store: fname = "tiny-data.txt" url = url + fname outfile = os.path.join(local_store, fname) download(url, outfile, None) # Read stderr and make sure the progress bar is printed only when told captured = capsys.readouterr() printed = captured.err.split("\r")[-1].strip() assert len(printed) == 79 if sys.platform == "win32": progress = "100%|####################" else: progress = "100%|████████████████████" # Bar size is not always the same so can't reliably test the whole bar. assert printed[:25] == progress # Check that the downloaded file has the right content check_tiny_data(outfile) @pytest.mark.network @pytest.mark.skipif(tqdm is None, reason="requires tqdm") def test_downloader_progressbar_ftp(capsys, ftpserver): "Setup an FTP downloader function that prints a progress bar for fetch" with data_over_ftp(ftpserver, "tiny-data.txt") as url: download = FTPDownloader(progressbar=True, port=ftpserver.server_port) with TemporaryDirectory() as local_store: outfile = os.path.join(local_store, "tiny-data.txt") download(url, outfile, None) # Read stderr and make sure the progress bar is printed only when # told captured = capsys.readouterr() printed = captured.err.split("\r")[-1].strip() assert len(printed) == 79 if sys.platform == "win32": progress = "100%|####################" else: progress = "100%|████████████████████" # Bar size is not always the same so can't reliably test the whole # bar. assert printed[:25] == progress # Check that the file was actually downloaded check_tiny_data(outfile) @pytest.mark.network @pytest.mark.skipif(tqdm is None, reason="requires tqdm") @pytest.mark.skipif(paramiko is None, reason="requires paramiko") def test_downloader_progressbar_sftp(capsys): "Setup an SFTP downloader function that prints a progress bar for fetch" downloader = SFTPDownloader(progressbar=True, username="demo", password="password") with TemporaryDirectory() as local_store: url = "sftp://test.rebex.net/pub/example/pocketftp.png" outfile = os.path.join(local_store, "pocketftp.png") downloader(url, outfile, None) # Read stderr and make sure the progress bar is printed only when told captured = capsys.readouterr() printed = captured.err.split("\r")[-1].strip() assert len(printed) == 79 if sys.platform == "win32": progress = "100%|####################" else: progress = "100%|████████████████████" # Bar size is not always the same so can't reliably test the whole bar. assert printed[:25] == progress # Check that the file was actually downloaded assert os.path.exists(outfile) @pytest.mark.network def test_downloader_arbitrary_progressbar(capsys): "Setup a downloader function with an arbitrary progress bar class." class MinimalProgressDisplay: """A minimalist replacement for tqdm.tqdm""" def __init__(self, total): self.count = 0 self.total = total def __repr__(self): """represent current completion""" return str(self.count) + "/" + str(self.total) def render(self): """print self.__repr__ to stderr""" print(f"\r{self}", file=sys.stderr, end="") def update(self, i): """modify completion and render""" self.count = i self.render() def reset(self): """set counter to 0""" self.count = 0 @staticmethod def close(): """print a new empty line""" print("", file=sys.stderr) pbar = MinimalProgressDisplay(total=None) download = HTTPDownloader(progressbar=pbar) with TemporaryDirectory() as local_store: fname = "large-data.txt" url = BASEURL + fname outfile = os.path.join(local_store, "large-data.txt") download(url, outfile, None) # Read stderr and make sure the progress bar is printed only when told captured = capsys.readouterr() printed = captured.err.split("\r")[-1].strip() progress = "336/336" assert printed == progress # Check that the downloaded file has the right content check_large_data(outfile) class TestZenodoAPISupport: """ Test support for different Zenodo APIs """ article_id = 123456 doi = f"10.0001/zenodo.{article_id}" doi_url = f"https://doi.org/{doi}" file_name = "my-file.zip" file_url = ( "https://zenodo.org/api/files/513d7033-93a2-4eeb-821c-2fb0bbab0012/my-file.zip" ) file_checksum = "2942bfabb3d05332b66eb128e0842cff" legacy_api_response = { "created": "2021-20-19T08:00:00.000000+00:00", "modified": "2021-20-19T08:00:00.000000+00:00", "id": article_id, "doi": doi, "doi_url": doi_url, "files": [ { "id": "513d7033-93a2-4eeb-821c-2fb0bbab0012", "key": file_name, "checksum": f"md5:{file_checksum}", "links": { "self": file_url, }, } ], } new_api_response = { "created": "2021-20-19T08:00:00.000000+00:00", "modified": "2021-20-19T08:00:00.000000+00:00", "id": article_id, "doi": doi, "doi_url": doi_url, "files": [ { "id": "513d7033-93a2-4eeb-821c-2fb0bbab0012", "filename": file_name, "checksum": file_checksum, "links": { "self": file_url, }, } ], } invalid_api_response = { "created": "2021-20-19T08:00:00.000000+00:00", "modified": "2021-20-19T08:00:00.000000+00:00", "id": article_id, "doi": doi, "doi_url": doi_url, "files": [ { "id": "513d7033-93a2-4eeb-821c-2fb0bbab0012", "filename": file_name, "checksum": file_checksum, "links": { "self": file_url, }, }, { "id": "513d7033-93a2-4eeb-821c-2fb0bbab0012", "key": file_name, "checksum": f"md5:{file_checksum}", "links": { "self": file_url, }, }, ], } @pytest.mark.parametrize( "api_version, api_response", [ ("legacy", legacy_api_response), ("new", new_api_response), ("invalid", invalid_api_response), ], ) def test_api_version(self, httpserver, api_version, api_response): """ Test if the API version is correctly detected. """ # Create a local http server httpserver.expect_request(f"/zenodo.{self.article_id}").respond_with_json( api_response ) # Create Zenodo downloader downloader = ZenodoRepository(doi=self.doi, archive_url=self.doi_url) # Override base url for the API of the downloader downloader.base_api_url = httpserver.url_for("") # Check if the API version is correctly identified if api_version != "invalid": assert downloader.api_version == api_version else: msg = "Couldn't determine the version of the Zenodo API" with pytest.raises(ValueError, match=msg): api_version = downloader.api_version @pytest.mark.parametrize( "api_version, api_response", [("legacy", legacy_api_response), ("new", new_api_response)], ) def test_download_url(self, httpserver, api_version, api_response): """ Test if the download url is correct for each API version. """ # Create a local http server httpserver.expect_request(f"/zenodo.{self.article_id}").respond_with_json( api_response ) # Create Zenodo downloader downloader = ZenodoRepository(doi=self.doi, archive_url=self.doi_url) # Override base url for the API of the downloader downloader.base_api_url = httpserver.url_for("") # Check if the download url is correct download_url = downloader.download_url(file_name=self.file_name) if api_version == "legacy": assert download_url == self.file_url else: expected_url = ( "https://zenodo.org/records/" f"{self.article_id}/files/{self.file_name}?download=1" ) assert download_url == expected_url @pytest.mark.parametrize( "api_response", [legacy_api_response, new_api_response], ) def test_populate_registry(self, httpserver, tmp_path, api_response): """ Test if population of registry is correctly done for each API version. """ # Create a local http server httpserver.expect_request(f"/zenodo.{self.article_id}").respond_with_json( api_response ) # Create sample pooch object puppy = Pooch(base_url="", path=tmp_path) # Create Zenodo downloader downloader = ZenodoRepository(doi=self.doi, archive_url=self.doi_url) # Override base url for the API of the downloader downloader.base_api_url = httpserver.url_for("") # Populate registry downloader.populate_registry(puppy) assert puppy.registry == {self.file_name: f"md5:{self.file_checksum}"} pooch-1.8.2/pooch/tests/test_hashes.py000066400000000000000000000162261463036314300200020ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # # pylint: disable=redefined-outer-name """ Test the hash calculation and checking functions. """ import os from pathlib import Path from tempfile import NamedTemporaryFile import pytest try: import xxhash XXHASH_MAJOR_VERSION = int(xxhash.VERSION.split(".", maxsplit=1)[0]) except ImportError: xxhash = None XXHASH_MAJOR_VERSION = 0 from ..core import Pooch from ..hashes import ( make_registry, file_hash, hash_matches, ) from .utils import check_tiny_data, mirror_directory DATA_DIR = str(Path(__file__).parent / "data" / "store") REGISTRY = ( "tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d\n" ) REGISTRY_RECURSIVE = ( "subdir/tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d\n" "tiny-data.txt baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d\n" ) TINY_DATA_HASHES_HASHLIB = { "sha1": "c03148994acd89317915ea2f2d080d6dd127aa09", "sha256": "baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d", "md5": "70e2afd3fd7e336ae478b1e740a5f08e", } TINY_DATA_HASHES_XXH = { "xxh64": "f843815fe57948fa", "xxh32": "98d6f1a2", # Require xxHash > 2.0 "xxh128": "0267d220db258fffb0c567c0ecd1b689", "xxh3_128": "0267d220db258fffb0c567c0ecd1b689", "xxh3_64": "811e3f2a12aec53f", } TINY_DATA_HASHES = TINY_DATA_HASHES_HASHLIB.copy() TINY_DATA_HASHES.update(TINY_DATA_HASHES_XXH) @pytest.fixture def data_dir_mirror(tmp_path): """ Mirror the test data folder on a temporary directory. Needed to avoid permission errors when pooch is installed on a non-writable path. """ return mirror_directory(DATA_DIR, tmp_path) def test_make_registry(data_dir_mirror): "Check that the registry builder creates the right file names and hashes" outfile = NamedTemporaryFile(delete=False) # pylint: disable=consider-using-with # Need to close the file before writing to it. outfile.close() try: make_registry(data_dir_mirror, outfile.name, recursive=False) with open(outfile.name, encoding="utf-8") as fout: registry = fout.read() assert registry == REGISTRY # Check that the registry can be used. pup = Pooch(path=data_dir_mirror, base_url="some bogus URL", registry={}) pup.load_registry(outfile.name) true = str(data_dir_mirror / "tiny-data.txt") fname = pup.fetch("tiny-data.txt") assert true == fname check_tiny_data(fname) finally: os.remove(outfile.name) def test_make_registry_recursive(data_dir_mirror): "Check that the registry builder works in recursive mode" outfile = NamedTemporaryFile(delete=False) # pylint: disable=consider-using-with # Need to close the file before writing to it. outfile.close() try: make_registry(data_dir_mirror, outfile.name, recursive=True) with open(outfile.name, encoding="utf-8") as fout: registry = fout.read() assert registry == REGISTRY_RECURSIVE # Check that the registry can be used. pup = Pooch(path=data_dir_mirror, base_url="some bogus URL", registry={}) pup.load_registry(outfile.name) assert str(data_dir_mirror / "tiny-data.txt") == pup.fetch("tiny-data.txt") check_tiny_data(pup.fetch("tiny-data.txt")) true = str(data_dir_mirror / "subdir" / "tiny-data.txt") assert true == pup.fetch("subdir/tiny-data.txt") check_tiny_data(pup.fetch("subdir/tiny-data.txt")) finally: os.remove(outfile.name) def test_file_hash_invalid_algorithm(): "Test an invalid hashing algorithm" with pytest.raises(ValueError) as exc: file_hash(fname="something", alg="blah") assert "'blah'" in str(exc.value) @pytest.mark.parametrize( "alg,expected_hash", list(TINY_DATA_HASHES.items()), ids=list(TINY_DATA_HASHES.keys()), ) def test_file_hash(alg, expected_hash): "Test the hash calculation using hashlib and xxhash" if alg.startswith("xxh"): if xxhash is None: pytest.skip("requires xxhash") if alg not in ["xxh64", "xxh32"] and XXHASH_MAJOR_VERSION < 2: pytest.skip("requires xxhash > 2.0") fname = os.path.join(DATA_DIR, "tiny-data.txt") check_tiny_data(fname) returned_hash = file_hash(fname, alg) assert returned_hash == expected_hash @pytest.mark.parametrize( "alg,expected_hash", list(TINY_DATA_HASHES.items()), ids=list(TINY_DATA_HASHES.keys()), ) def test_hash_matches(alg, expected_hash): "Make sure the hash checking function works" if alg.startswith("xxh"): if xxhash is None: pytest.skip("requires xxhash") if alg not in ["xxh64", "xxh32"] and XXHASH_MAJOR_VERSION < 2: pytest.skip("requires xxhash > 2.0") fname = os.path.join(DATA_DIR, "tiny-data.txt") check_tiny_data(fname) # Check if the check passes known_hash = f"{alg}:{expected_hash}" assert hash_matches(fname, known_hash) # And also if it fails known_hash = f"{alg}:blablablabla" assert not hash_matches(fname, known_hash) @pytest.mark.parametrize( "alg,expected_hash", list(TINY_DATA_HASHES_HASHLIB.items()), ids=list(TINY_DATA_HASHES_HASHLIB.keys()), ) def test_hash_matches_strict(alg, expected_hash): "Make sure the hash checking function raises an exception if strict" fname = os.path.join(DATA_DIR, "tiny-data.txt") check_tiny_data(fname) # Check if the check passes known_hash = f"{alg}:{expected_hash}" assert hash_matches(fname, known_hash, strict=True) # And also if it fails bad_hash = f"{alg}:blablablabla" with pytest.raises(ValueError) as error: hash_matches(fname, bad_hash, strict=True, source="Neverland") assert "Neverland" in str(error.value) with pytest.raises(ValueError) as error: hash_matches(fname, bad_hash, strict=True, source=None) assert fname in str(error.value) def test_hash_matches_none(): "The hash checking function should always returns True if known_hash=None" fname = os.path.join(DATA_DIR, "tiny-data.txt") assert hash_matches(fname, known_hash=None) # Should work even if the file is invalid assert hash_matches(fname="", known_hash=None) # strict should cause an error if this wasn't working assert hash_matches(fname, known_hash=None, strict=True) @pytest.mark.parametrize( "alg,expected_hash", list(TINY_DATA_HASHES_HASHLIB.items()), ids=list(TINY_DATA_HASHES_HASHLIB.keys()), ) def test_hash_matches_uppercase(alg, expected_hash): "Hash matching should be independent of upper or lower case" fname = os.path.join(DATA_DIR, "tiny-data.txt") check_tiny_data(fname) # Check if the check passes known_hash = f"{alg}:{expected_hash.upper()}" assert hash_matches(fname, known_hash, strict=True) # And also if it fails with pytest.raises(ValueError) as error: hash_matches(fname, known_hash[:-5], strict=True, source="Neverland") assert "Neverland" in str(error.value) pooch-1.8.2/pooch/tests/test_integration.py000066400000000000000000000032261463036314300210460ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # # pylint: disable=redefined-outer-name """ Test the entire process of creating a Pooch and using it. """ import os import shutil from pathlib import Path import pytest from .. import create, os_cache from .. import __version__ as full_version from .utils import check_tiny_data, capture_log @pytest.mark.network def test_create_and_fetch(): "Fetch a data file from the local storage" path = os_cache("pooch-testing") if path.exists(): shutil.rmtree(str(path)) pup = create( path=path, base_url="https://github.com/fatiando/pooch/raw/{version}/data/", version=full_version, version_dev="main", env="POOCH_DATA_DIR", ) # Make sure the storage isn't created until a download is required assert not pup.abspath.exists() pup.load_registry(Path(os.path.dirname(__file__), "data", "registry.txt")) for target in ["tiny-data.txt", "subdir/tiny-data.txt"]: with capture_log() as log_file: fname = pup.fetch(target) assert log_file.getvalue().split()[0] == "Downloading" check_tiny_data(fname) # Now modify the file to trigger an update on the next fetch with open(fname, "w", encoding="utf-8") as fin: fin.write("The data is now different") with capture_log() as log_file: fname = pup.fetch(target) assert log_file.getvalue().split()[0] == "Updating" check_tiny_data(fname) pooch-1.8.2/pooch/tests/test_processors.py000066400000000000000000000255351463036314300207340ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Test the processor hooks """ from pathlib import Path from tempfile import TemporaryDirectory import warnings import pytest from .. import Pooch from ..processors import Unzip, Untar, Decompress from .utils import pooch_test_url, pooch_test_registry, check_tiny_data, capture_log REGISTRY = pooch_test_registry() BASEURL = pooch_test_url() @pytest.mark.network @pytest.mark.parametrize( "method,ext,name", [ ("auto", "xz", None), ("lzma", "xz", None), ("xz", "xz", None), ("bzip2", "bz2", None), ("gzip", "gz", None), ("gzip", "gz", "different-name.txt"), ], ids=["auto", "lzma", "xz", "bz2", "gz", "name"], ) def test_decompress(method, ext, name): "Check that decompression after download works for all formats" processor = Decompress(method=method, name=name) with TemporaryDirectory() as local_store: path = Path(local_store) if name is None: true_path = str(path / ".".join(["tiny-data.txt", ext, "decomp"])) else: true_path = str(path / name) # Setup a pooch in a temp dir pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY) # Check the logs when downloading and from the processor with capture_log() as log_file: fname = pup.fetch("tiny-data.txt." + ext, processor=processor) logs = log_file.getvalue() lines = logs.splitlines() assert len(lines) == 2 assert lines[0].split()[0] == "Downloading" assert lines[-1].startswith("Decompressing") assert method in lines[-1] assert fname == true_path check_tiny_data(fname) # Check that processor doesn't execute when not downloading with capture_log() as log_file: fname = pup.fetch("tiny-data.txt." + ext, processor=processor) assert log_file.getvalue() == "" assert fname == true_path check_tiny_data(fname) @pytest.mark.network def test_decompress_fails(): "Should fail if method='auto' and no extension is given in the file name" with TemporaryDirectory() as local_store: path = Path(local_store) pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY) # Invalid extension with pytest.raises(ValueError) as exception: with warnings.catch_warnings(): pup.fetch("tiny-data.txt", processor=Decompress(method="auto")) assert exception.value.args[0].startswith("Unrecognized file extension '.txt'") assert "pooch.Unzip/Untar" not in exception.value.args[0] # Should also fail for a bad method name with pytest.raises(ValueError) as exception: with warnings.catch_warnings(): pup.fetch("tiny-data.txt", processor=Decompress(method="bla")) assert exception.value.args[0].startswith("Invalid compression method 'bla'") assert "pooch.Unzip/Untar" not in exception.value.args[0] # Point people to Untar and Unzip with pytest.raises(ValueError) as exception: with warnings.catch_warnings(): pup.fetch("tiny-data.txt", processor=Decompress(method="zip")) assert exception.value.args[0].startswith("Invalid compression method 'zip'") assert "pooch.Unzip/Untar" in exception.value.args[0] with pytest.raises(ValueError) as exception: with warnings.catch_warnings(): pup.fetch("store.zip", processor=Decompress(method="auto")) assert exception.value.args[0].startswith("Unrecognized file extension '.zip'") assert "pooch.Unzip/Untar" in exception.value.args[0] @pytest.mark.network @pytest.mark.parametrize( "target_path", [None, "some_custom_path"], ids=["default_path", "custom_path"] ) @pytest.mark.parametrize( "archive,members", [ ("tiny-data", ["tiny-data.txt"]), ("store", None), ("store", ["store/tiny-data.txt"]), ("store", ["store/subdir/tiny-data.txt"]), ("store", ["store/subdir"]), ("store", ["store/tiny-data.txt", "store/subdir"]), ], ids=[ "single_file", "archive_all", "archive_file", "archive_subdir_file", "archive_subdir", "archive_multiple", ], ) @pytest.mark.parametrize( "processor_class,extension", [(Unzip, ".zip"), (Untar, ".tar.gz")], ids=["Unzip", "Untar"], ) def test_unpacking(processor_class, extension, target_path, archive, members): "Tests the behaviour of processors for unpacking archives (Untar, Unzip)" processor = processor_class(members=members, extract_dir=target_path) if target_path is None: target_path = archive + extension + processor.suffix with TemporaryDirectory() as path: path = Path(path) true_paths, expected_log = _unpacking_expected_paths_and_logs( archive, members, path / target_path, processor_class.__name__ ) # Setup a pooch in a temp dir pup = Pooch(path=path, base_url=BASEURL, registry=REGISTRY) # Capture logs and check for the right processor message with capture_log() as log_file: fnames = pup.fetch(archive + extension, processor=processor) assert set(fnames) == true_paths _check_logs(log_file, expected_log) for fname in fnames: check_tiny_data(fname) # Check that processor doesn't execute when not downloading with capture_log() as log_file: fnames = pup.fetch(archive + extension, processor=processor) assert set(fnames) == true_paths _check_logs(log_file, []) for fname in fnames: check_tiny_data(fname) @pytest.mark.network @pytest.mark.parametrize( "processor_class,extension", [(Unzip, ".zip"), (Untar, ".tar.gz")], ) def test_multiple_unpacking(processor_class, extension): "Test that multiple subsequent calls to a processor yield correct results" with TemporaryDirectory() as local_store: pup = Pooch(path=Path(local_store), base_url=BASEURL, registry=REGISTRY) # Do a first fetch with the one member only processor1 = processor_class(members=["store/tiny-data.txt"]) filenames1 = pup.fetch("store" + extension, processor=processor1) assert len(filenames1) == 1 check_tiny_data(filenames1[0]) # Do a second fetch with the other member processor2 = processor_class( members=["store/tiny-data.txt", "store/subdir/tiny-data.txt"] ) filenames2 = pup.fetch("store" + extension, processor=processor2) assert len(filenames2) == 2 check_tiny_data(filenames2[0]) check_tiny_data(filenames2[1]) # Do a third fetch, again with one member and assert # that only this member was returned filenames3 = pup.fetch("store" + extension, processor=processor1) assert len(filenames3) == 1 check_tiny_data(filenames3[0]) @pytest.mark.network @pytest.mark.parametrize( "processor_class,extension", [(Unzip, ".zip"), (Untar, ".tar.gz")], ) def test_unpack_members_with_leading_dot(processor_class, extension): "Test that unpack members can also be specifed both with a leading ./" with TemporaryDirectory() as local_store: pup = Pooch(path=Path(local_store), base_url=BASEURL, registry=REGISTRY) # Do a first fetch with the one member only processor1 = processor_class(members=["./store/tiny-data.txt"]) filenames1 = pup.fetch("store" + extension, processor=processor1) assert len(filenames1) == 1 check_tiny_data(filenames1[0]) def _check_logs(log_file, expected_lines): """ Assert that the lines in the log match the expected ones. """ lines = log_file.getvalue().splitlines() assert len(lines) == len(expected_lines) for line, expected_line in zip(lines, expected_lines): assert line.startswith(expected_line) def _unpacking_expected_paths_and_logs(archive, members, path, name): """ Generate the appropriate expected paths and log message depending on the parameters for the test. """ log_lines = ["Downloading"] if archive == "tiny-data": true_paths = {str(path / "tiny-data.txt")} log_lines.append("Extracting 'tiny-data.txt'") elif archive == "store" and members is None: true_paths = { str(path / "store" / "tiny-data.txt"), str(path / "store" / "subdir" / "tiny-data.txt"), } log_lines.append(f"{name}{name[-1]}ing contents") elif archive == "store" and members is not None: true_paths = [] for member in members: true_path = path / Path(*member.split("/")) if not str(true_path).endswith("tiny-data.txt"): true_path = true_path / "tiny-data.txt" true_paths.append(str(true_path)) log_lines.append(f"Extracting '{member}'") true_paths = set(true_paths) return true_paths, log_lines @pytest.mark.network @pytest.mark.parametrize( "processor_class,extension", [(Unzip, ".zip"), (Untar, ".tar.gz")], ) def test_unpacking_members_then_no_members(processor_class, extension): """ Test that calling with valid members then without them works. https://github.com/fatiando/pooch/issues/364 """ with TemporaryDirectory() as local_store: pup = Pooch(path=Path(local_store), base_url=BASEURL, registry=REGISTRY) # Do a first fetch with an existing member processor1 = processor_class(members=["store/tiny-data.txt"]) filenames1 = pup.fetch("store" + extension, processor=processor1) assert len(filenames1) == 1 # Do a second fetch with no members processor2 = processor_class() filenames2 = pup.fetch("store" + extension, processor=processor2) assert len(filenames2) > 1 @pytest.mark.network @pytest.mark.parametrize( "processor_class,extension", [(Unzip, ".zip"), (Untar, ".tar.gz")], ) def test_unpacking_wrong_members_then_no_members(processor_class, extension): """ Test that calling with invalid members then without them works. https://github.com/fatiando/pooch/issues/364 """ with TemporaryDirectory() as local_store: pup = Pooch(path=Path(local_store), base_url=BASEURL, registry=REGISTRY) # Do a first fetch with incorrect member processor1 = processor_class(members=["not-a-valid-file.csv"]) filenames1 = pup.fetch("store" + extension, processor=processor1) assert len(filenames1) == 0 # Do a second fetch with no members processor2 = processor_class() filenames2 = pup.fetch("store" + extension, processor=processor2) assert len(filenames2) > 0 pooch-1.8.2/pooch/tests/test_utils.py000066400000000000000000000144641463036314300176710ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Test the utility functions. """ import os import shutil import time from pathlib import Path import tempfile from tempfile import TemporaryDirectory from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor import pytest from ..utils import ( parse_url, make_local_storage, temporary_file, unique_file_name, ) def test_unique_name_long(): "The file name should never be longer than 255 characters" url = f"https://www.something.com/data{'a' * 500}.txt" assert len(url) > 255 fname = unique_file_name(url) assert len(fname) == 255 assert fname[-10:] == "aaaaaa.txt" assert fname.split("-")[1][:10] == "aaaaaaaaaa" @pytest.mark.parametrize( "pool", [ThreadPoolExecutor, ProcessPoolExecutor], ids=["threads", "processes"], ) def test_make_local_storage_parallel(pool, monkeypatch): "Try to create the cache folder in parallel" # Can cause multiple attempts at creating the folder which leads to an # exception. Check that this doesn't happen. # See https://github.com/fatiando/pooch/issues/170 # Monkey path makedirs to make it delay before creating the directory. # Otherwise, the dispatch is too fast and the directory will exist before # another process tries to create it. # Need to keep a reference to the original function to avoid infinite # recursions from the monkey patching. makedirs = os.makedirs def mockmakedirs(path, exist_ok=False): # pylint: disable=unused-argument "Delay before calling makedirs" time.sleep(1.5) makedirs(path, exist_ok=exist_ok) monkeypatch.setattr(os, "makedirs", mockmakedirs) data_cache = os.path.join(os.curdir, "test_parallel_cache") assert not os.path.exists(data_cache) try: with pool() as executor: futures = [ executor.submit(make_local_storage, data_cache) for i in range(4) ] for future in futures: future.result() assert os.path.exists(data_cache) finally: if os.path.exists(data_cache): shutil.rmtree(data_cache) def test_local_storage_makedirs_permissionerror(monkeypatch): "Should warn the user when can't create the local data dir" def mockmakedirs(path, exist_ok=False): # pylint: disable=unused-argument "Raise an exception to mimic permission issues" raise PermissionError("Fake error") data_cache = os.path.join(os.curdir, "test_permission") assert not os.path.exists(data_cache) monkeypatch.setattr(os, "makedirs", mockmakedirs) with pytest.raises(PermissionError) as error: make_local_storage( path=data_cache, env="SOME_VARIABLE", ) assert "Pooch could not create data cache" in str(error) assert "'SOME_VARIABLE'" in str(error) def test_local_storage_newfile_permissionerror(monkeypatch): "Should warn the user when can't write to the local data dir" # This is a separate function because there should be a warning if the data # dir already exists but we can't write to it. def mocktempfile(**kwargs): # pylint: disable=unused-argument "Raise an exception to mimic permission issues" raise PermissionError("Fake error") with TemporaryDirectory() as data_cache: os.makedirs(os.path.join(data_cache, "1.0")) assert os.path.exists(data_cache) monkeypatch.setattr(tempfile, "NamedTemporaryFile", mocktempfile) with pytest.raises(PermissionError) as error: make_local_storage( path=data_cache, env="SOME_VARIABLE", ) assert "Pooch could not write to data cache" in str(error) assert "'SOME_VARIABLE'" in str(error) @pytest.mark.parametrize( "url,output", [ ( "http://127.0.0.1:8080/test.nc", {"protocol": "http", "netloc": "127.0.0.1:8080", "path": "/test.nc"}, ), ( "ftp://127.0.0.1:8080/test.nc", {"protocol": "ftp", "netloc": "127.0.0.1:8080", "path": "/test.nc"}, ), ( "doi:10.6084/m9.figshare.923450.v1/dike.json", { "protocol": "doi", "netloc": "10.6084/m9.figshare.923450.v1", "path": "/dike.json", }, ), ( r"doi:10.5281/zenodo.7632643/santisoler/pooch-test-data-v1.zip", { "protocol": "doi", "netloc": "10.5281/zenodo.7632643", "path": "/santisoler/pooch-test-data-v1.zip", }, ), ], ids=["http", "ftp", "doi", "zenodo-doi-with-slash"], ) def test_parse_url(url, output): "Parse URL into 3 components" assert parse_url(url) == output def test_parse_url_invalid_doi(): "Should fail if we forget to not include // in the DOI link" with pytest.raises(ValueError): parse_url("doi://XXX/XXX/fname.txt") def test_temporary_file(): "Make sure the file is writable and cleaned up in the end" with temporary_file() as tmp: assert Path(tmp).exists() with open(tmp, "w", encoding="utf-8") as outfile: outfile.write("Meh") with open(tmp, encoding="utf-8") as infile: assert infile.read().strip() == "Meh" assert not Path(tmp).exists() def test_temporary_file_path(): "Make sure the file is writable and cleaned up in the end when given a dir" with TemporaryDirectory() as path: with temporary_file(path) as tmp: assert Path(tmp).exists() assert path in tmp with open(tmp, "w", encoding="utf-8") as outfile: outfile.write("Meh") with open(tmp, encoding="utf-8") as infile: assert infile.read().strip() == "Meh" assert not Path(tmp).exists() def test_temporary_file_exception(): "Make sure the file is writable and cleaned up when there is an exception" try: with temporary_file() as tmp: assert Path(tmp).exists() raise ValueError("Nooooooooo!") except ValueError: assert not Path(tmp).exists() pooch-1.8.2/pooch/tests/test_version.py000066400000000000000000000010411463036314300202010ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Test the version. """ from packaging.version import Version import pooch def test_version(): "Check there's a usable version number in the usual __version__" assert pooch.__version__.startswith("v") # Check that it's PEP440 compliant (will raise an exception otherwise) Version(pooch.__version__) pooch-1.8.2/pooch/tests/utils.py000066400000000000000000000151611463036314300166250ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Utilities for testing code. """ import os import io import logging import shutil import stat from pathlib import Path from contextlib import contextmanager from .. import __version__ as full_version from ..utils import check_version, get_logger def check_tiny_data(fname): """ Load the tiny-data.txt file and check that the contents are correct. """ assert os.path.exists(fname) with open(fname, encoding="utf-8") as tinydata: content = tinydata.read() true_content = "\n".join( ["# A tiny data file for test purposes only", "1 2 3 4 5 6"] ) assert content.strip() == true_content def check_large_data(fname): """ Load the large-data.txt file and check that the contents are correct. """ assert os.path.exists(fname) with open(fname, encoding="utf-8") as data: content = data.read() true_content = ["# A larer data file for test purposes only"] true_content.extend(["1 2 3 4 5 6"] * 6002) assert content.strip() == "\n".join(true_content) def pooch_test_url(): """ Get the base URL for the test data used in Pooch itself. The URL is a GitHub raw link to the ``pooch/tests/data`` directory from the `GitHub repository `__. It matches the pooch version specified in ``pooch.version.full_version``. Returns ------- url The versioned URL for pooch's test data. """ version = check_version(full_version, fallback="main") url = f"https://github.com/fatiando/pooch/raw/{version}/pooch/tests/data/" return url def pooch_test_figshare_url(): """ Get the base URL for the test data stored in figshare. The URL contains the DOI for the figshare dataset using the appropriate version for this version of Pooch. Returns ------- url The URL for pooch's test data. """ url = "doi:10.6084/m9.figshare.14763051.v1/" return url def pooch_test_zenodo_url(): """ Get the base URL for the test data stored in Zenodo. The URL contains the DOI for the Zenodo dataset using the appropriate version for this version of Pooch. Returns ------- url The URL for pooch's test data. """ url = "doi:10.5281/zenodo.4924875/" return url def pooch_test_zenodo_with_slash_url(): """ Get base URL for test data in Zenodo, where the file name contains a slash The URL contains the DOI for the Zenodo dataset that has a slash in the filename (created with the GitHub-Zenodo integration service), using the appropriate version for this version of Pooch. Returns ------- url The URL for pooch's test data. """ url = "doi:10.5281/zenodo.7632643/" return url def pooch_test_dataverse_url(): """ Get the base URL for the test data stored on a DataVerse instance. Returns ------- url The URL for pooch's test data. """ url = "doi:10.11588/data/TKCFEF/" return url def pooch_test_registry(): """ Get a registry for the test data used in Pooch itself. Returns ------- registry Dictionary with pooch's test data files and their hashes. """ registry = { "tiny-data.txt": "baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d", "large-data.txt": "98de171fb320da82982e6bf0f3994189fff4b42b23328769afce12bdd340444a", "subdir/tiny-data.txt": "baee0894dba14b12085eacb204284b97e362f4f3e5a5807693cc90ef415c1b2d", "tiny-data.zip": "0d49e94f07bc1866ec57e7fd1b93a351fba36842ec9b13dd50bf94e8dfa35cbb", "store.zip": "0498d2a001e71051bbd2acd2346f38da7cbd345a633cb7bf0f8a20938714b51a", "tiny-data.tar.gz": "41503f083814f43a01a8e9a30c28d7a9fe96839a99727a7fdd0acf7cd5bab63b", "store.tar.gz": "088c7f4e0f1859b1c769bb6065de24376f366374817ede8691a6ac2e49f29511", "tiny-data.txt.bz2": "753663687a4040c90c8578061867d1df623e6aa8011c870a5dbd88ee3c82e306", "tiny-data.txt.gz": "2e2da6161291657617c32192dba95635706af80c6e7335750812907b58fd4b52", "tiny-data.txt.xz": "99dcb5c32a6e916344bacb4badcbc2f2b6ee196977d1d8187610c21e7e607765", } return registry @contextmanager def capture_log(level=logging.DEBUG): """ Create a context manager for reading from the logs. Yields ------ log_file : StringIO a file-like object to which the logs were written """ log_file = io.StringIO() handler = logging.StreamHandler(log_file) handler.setLevel(level) get_logger().addHandler(handler) yield log_file get_logger().removeHandler(handler) @contextmanager def data_over_ftp(server, fname): """ Add a test data file to the test FTP server and clean it up afterwards. Parameters ---------- server The ``ftpserver`` fixture provided by pytest-localftpserver. fname : str The name of a file *relative* to the test data folder of the package (usually just the file name, not the full path). Yields ------ url : str The download URL of the data file from the test FTP server. """ package_path = str(Path(__file__).parent / "data" / fname) server_path = os.path.join(server.anon_root, fname) try: shutil.copyfile(package_path, server_path) url = f"ftp://localhost/{fname}" yield url finally: if os.path.exists(server_path): os.remove(server_path) def _recursive_chmod_directories(root, mode): """ Recursively change the permissions on the child directories using a bitwise OR operation. """ for item in root.iterdir(): if item.is_dir(): item.chmod(item.stat().st_mode | mode) _recursive_chmod_directories(item, mode) def mirror_directory(source, destination): """ Copy contents of the source directory into destination and fix permissions. Parameters ---------- source : str, :class:`pathlib.Path` Source data directory. destination : str, :class:`pathlib.Path` Destination directory that will contain the copy of source. The actual source directory (not just it's contents) is copied. Returns ------- mirror : :class:`pathlib.Path` The path of the mirrored output directory. """ source = Path(source) mirror = Path(destination) / source.name shutil.copytree(source, mirror) _recursive_chmod_directories(mirror, mode=stat.S_IWUSR) return mirror pooch-1.8.2/pooch/utils.py000066400000000000000000000244471463036314300154720ustar00rootroot00000000000000# Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) # """ Misc utilities """ import logging import os import tempfile import hashlib from pathlib import Path from urllib.parse import urlsplit from contextlib import contextmanager import warnings import platformdirs from packaging.version import Version LOGGER = logging.Logger("pooch") LOGGER.addHandler(logging.StreamHandler()) def file_hash(*args, **kwargs): """ WARNING: Importing this function from pooch.utils is DEPRECATED. Please import from the top-level namespace (`from pooch import file_hash`) instead, which is fully backwards compatible with pooch >= 0.1. Examples -------- >>> fname = "test-file-for-hash.txt" >>> with open(fname, "w") as f: ... __ = f.write("content of the file") >>> print(file_hash(fname)) 0fc74468e6a9a829f103d069aeb2bb4f8646bad58bf146bb0e3379b759ec4a00 >>> import os >>> os.remove(fname) """ # pylint: disable=import-outside-toplevel from .hashes import file_hash as new_file_hash message = """ Importing file_hash from pooch.utils is DEPRECATED. Please import from the top-level namespace (`from pooch import file_hash`) instead, which is fully backwards compatible with pooch >= 0.1. """ warnings.warn(message, DeprecationWarning, stacklevel=2) return new_file_hash(*args, **kwargs) def get_logger(): r""" Get the default event logger. The logger records events like downloading files, unzipping archives, etc. Use the method :meth:`logging.Logger.setLevel` of this object to adjust the verbosity level from Pooch. Returns ------- logger : :class:`logging.Logger` The logger object for Pooch """ return LOGGER def os_cache(project): r""" Default cache location based on the operating system. The folder locations are defined by the ``platformdirs`` package using the ``user_cache_dir`` function. Usually, the locations will be following (see the `platformdirs documentation `__): * Mac: ``~/Library/Caches/`` * Unix: ``~/.cache/`` or the value of the ``XDG_CACHE_HOME`` environment variable, if defined. * Windows: ``C:\Users\\AppData\Local\\\Cache`` Parameters ---------- project : str The project name. Returns ------- cache_path : :class:`pathlib.Path` The default location for the data cache. User directories (``'~'``) are not expanded. """ return Path(platformdirs.user_cache_dir(project)) def check_version(version, fallback="master"): """ Check if a version is PEP440 compliant and there are no unreleased changes. For example, ``version = "0.1"`` will be returned as is but ``version = "0.1+10.8dl8dh9"`` will return the fallback. This is the convention used by `versioneer `__ to mark that this version is 10 commits ahead of the last release. Parameters ---------- version : str A version string. fallback : str What to return if the version string has unreleased changes. Returns ------- version : str If *version* is PEP440 compliant and there are unreleased changes, then return *version*. Otherwise, return *fallback*. Raises ------ InvalidVersion If *version* is not PEP440 compliant. Examples -------- >>> check_version("0.1") '0.1' >>> check_version("0.1a10") '0.1a10' >>> check_version("0.1+111.9hdg36") 'master' >>> check_version("0.1+111.9hdg36", fallback="dev") 'dev' """ parse = Version(version) if parse.local is not None: return fallback return version def parse_url(url): """ Parse a URL into 3 components: :/// Example URLs: * http://127.0.0.1:8080/test.nc * ftp://127.0.0.1:8080/test.nc * doi:10.6084/m9.figshare.923450.v1/test.nc The DOI is a special case. The protocol will be "doi", the netloc will be the DOI, and the path is what comes after the last "/". The only exception are Zenodo dois: the protocol will be "doi", the netloc will be composed by the "prefix/suffix" and the path is what comes after the second "/". This allows to support special cases of Zenodo dois where the path contains forward slashes "/", created by the GitHub-Zenodo integration service. Parameters ---------- url : str The URL. Returns ------- parsed_url : dict Three components of a URL (e.g., ``{'protocol':'http', 'netloc':'127.0.0.1:8080','path': '/test.nc'}``). """ if url.startswith("doi://"): raise ValueError( f"Invalid DOI link '{url}'. You must not use '//' after 'doi:'." ) if url.startswith("doi:"): protocol = "doi" parts = url[4:].split("/") if "zenodo" in parts[1].lower(): netloc = "/".join(parts[:2]) path = "/" + "/".join(parts[2:]) else: netloc = "/".join(parts[:-1]) path = "/" + parts[-1] else: parsed_url = urlsplit(url) protocol = parsed_url.scheme or "file" netloc = parsed_url.netloc path = parsed_url.path return {"protocol": protocol, "netloc": netloc, "path": path} def cache_location(path, env=None, version=None): """ Location of the cache given a base path and optional configuration. Checks for the environment variable to overwrite the path of the local cache. Optionally add *version* to the path if given. Parameters ---------- path : str, PathLike, list or tuple The path to the local data storage folder. If this is a list or tuple, we'll join the parts with the appropriate separator. Use :func:`pooch.os_cache` for a sensible default. version : str or None The version string for your project. Will be appended to given path if not None. env : str or None An environment variable that can be used to overwrite *path*. This allows users to control where they want the data to be stored. We'll append *version* to the end of this value as well. Returns ------- local_path : PathLike The path to the local directory. """ if env is not None and env in os.environ and os.environ[env]: path = os.environ[env] if isinstance(path, (list, tuple)): path = os.path.join(*path) if version is not None: path = os.path.join(str(path), version) path = os.path.expanduser(str(path)) return Path(path) def make_local_storage(path, env=None): """ Create the local cache directory and make sure it's writable. Parameters ---------- path : str or PathLike The path to the local data storage folder. env : str or None An environment variable that can be used to overwrite *path*. Only used in the error message in case the folder is not writable. """ path = str(path) # Check that the data directory is writable if not os.path.exists(path): action = "create" else: action = "write to" try: if action == "create": # When running in parallel, it's possible that multiple jobs will # try to create the path at the same time. Use exist_ok to avoid # raising an error. os.makedirs(path, exist_ok=True) else: with tempfile.NamedTemporaryFile(dir=path): pass except PermissionError as error: message = [ str(error), f"| Pooch could not {action} data cache folder '{path}'.", "Will not be able to download data files.", ] if env is not None: message.append( f"Use environment variable '{env}' to specify a different location." ) raise PermissionError(" ".join(message)) from error @contextmanager def temporary_file(path=None): """ Create a closed and named temporary file and make sure it's cleaned up. Using :class:`tempfile.NamedTemporaryFile` will fail on Windows if trying to open the file a second time (when passing its name to Pooch function, for example). This context manager creates the file, closes it, yields the file path, and makes sure it's deleted in the end. Parameters ---------- path : str or PathLike The directory in which the temporary file will be created. Yields ------ fname : str The path to the temporary file. """ tmp = tempfile.NamedTemporaryFile(delete=False, dir=path) # Close the temp file so that it can be opened elsewhere tmp.close() try: yield tmp.name finally: if os.path.exists(tmp.name): os.remove(tmp.name) def unique_file_name(url): """ Create a unique file name based on the given URL. The file name will be unique to the URL by prepending the name with the MD5 hash (hex digest) of the URL. The name will also include the last portion of the URL. The format will be: ``{md5}-{filename}.{ext}`` The file name will be cropped so that the entire name (including the hash) is less than 255 characters long (the limit on most file systems). Parameters ---------- url : str The URL with a file name at the end. Returns ------- fname : str The file name, unique to this URL. Examples -------- >>> print(unique_file_name("https://www.some-server.org/2020/data.txt")) 02ddee027ce5ebb3d7059fb23d210604-data.txt >>> print(unique_file_name("https://www.some-server.org/2019/data.txt")) 9780092867b497fca6fc87d8308f1025-data.txt >>> print(unique_file_name("https://www.some-server.org/2020/data.txt.gz")) 181a9d52e908219c2076f55145d6a344-data.txt.gz """ md5 = hashlib.md5(url.encode()).hexdigest() fname = parse_url(url)["path"].split("/")[-1] # Crop the start of the file name to fit 255 characters including the hash # and the : fname = fname[-(255 - len(md5) - 1) :] unique_name = f"{md5}-{fname}" return unique_name pooch-1.8.2/pyproject.toml000066400000000000000000000042021463036314300155470ustar00rootroot00000000000000[project] name = "pooch" description = "A friend to fetch your data files" dynamic = ["version"] authors = [ {name="The Pooch Developers", email="fatiandoaterra@protonmail.com"}, ] maintainers = [ {name = "Leonardo Uieda", email = "leo@uieda.com"} ] readme = "README.md" license = {text = "BSD-3-Clause"} keywords = ["data", "download", "caching", "http"] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Science/Research", "Intended Audience :: Developers", "Intended Audience :: Education", "License :: OSI Approved :: BSD License", "Operating System :: OS Independent", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", ] requires-python = ">=3.7" dependencies = [ "platformdirs >= 2.5.0", "packaging >= 20.0", "requests >= 2.19.0", ] [project.optional-dependencies] progress = ["tqdm>=4.41.0,<5.0.0"] sftp = ["paramiko>=2.7.0"] xxhash = ["xxhash>=1.4.3"] [project.urls] "Documentation" = "https://www.fatiando.org/pooch" "Changelog" = "https://www.fatiando.org/pooch/latest/changes.html" "Bug Tracker" = "https://github.com/fatiando/pooch/issues" "Source Code" = "https://github.com/fatiando/pooch" [tool.setuptools.packages] find = {} # Scanning implicit namespaces is active by default [build-system] requires = ["setuptools>=45", "wheel", "setuptools_scm[toml]>=6.2"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] version_scheme = "post-release" local_scheme = "node-and-date" write_to = "pooch/_version.py" [tool.pytest.ini_options] markers = [ "network: test requires network access", ] [tool.burocrata] notice = ''' # Copyright (c) 2018 The Pooch Developers. # Distributed under the terms of the BSD 3-Clause License. # SPDX-License-Identifier: BSD-3-Clause # # This code is part of the Fatiando a Terra project (https://www.fatiando.org) #'''