pax_global_header00006660000000000000000000000064145475107410014522gustar00rootroot0000000000000052 comment=e92db9f0811206312edf621372de35ea9cff719f ansible-compat-4.1.11/000077500000000000000000000000001454751074100145045ustar00rootroot00000000000000ansible-compat-4.1.11/.git_archival.txt000066400000000000000000000002211454751074100177520ustar00rootroot00000000000000node: e92db9f0811206312edf621372de35ea9cff719f node-date: 2024-01-10T12:47:29+00:00 describe-name: v4.1.11 ref-names: HEAD -> main, tag: v4.1.11 ansible-compat-4.1.11/.gitattributes000066400000000000000000000002211454751074100173720ustar00rootroot00000000000000# Force LF line endings for text files * text=auto eol=lf *.png binary # Needed for setuptools-scm-git-archive .git_archival.txt export-subst ansible-compat-4.1.11/.github/000077500000000000000000000000001454751074100160445ustar00rootroot00000000000000ansible-compat-4.1.11/.github/CODEOWNERS000066400000000000000000000000321454751074100174320ustar00rootroot00000000000000* @ansible/devtools ansible-compat-4.1.11/.github/CODE_OF_CONDUCT.md000066400000000000000000000002421454751074100206410ustar00rootroot00000000000000# Community Code of Conduct Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html). ansible-compat-4.1.11/.github/dependabot.yml000066400000000000000000000011211454751074100206670ustar00rootroot00000000000000--- # Until bug below is sorted we will not allow dependabot to run by itself # https://github.com/dependabot/dependabot-core/issues/369 version: 2 updates: - package-ecosystem: pip directory: / schedule: day: sunday interval: weekly labels: - dependabot-deps-updates - skip-changelog versioning-strategy: lockfile-only open-pull-requests-limit: 0 # neutered - package-ecosystem: "github-actions" directory: "/" schedule: interval: weekly open-pull-requests-limit: 3 labels: - "dependencies" - "skip-changelog" ansible-compat-4.1.11/.github/release-drafter.yml000066400000000000000000000001231454751074100216300ustar00rootroot00000000000000--- # see https://github.com/ansible/team-devtools _extends: ansible/team-devtools ansible-compat-4.1.11/.github/workflows/000077500000000000000000000000001454751074100201015ustar00rootroot00000000000000ansible-compat-4.1.11/.github/workflows/ack.yml000066400000000000000000000003601454751074100213610ustar00rootroot00000000000000# See https://github.com/ansible/devtools/blob/main/.github/workflows/ack.yml name: ack on: pull_request_target: types: [opened, labeled, unlabeled, synchronize] jobs: ack: uses: ansible/devtools/.github/workflows/ack.yml@main ansible-compat-4.1.11/.github/workflows/push.yml000066400000000000000000000003641454751074100216060ustar00rootroot00000000000000# See https://github.com/ansible/devtools/blob/main/.github/workflows/push.yml name: push on: push: branches: - main - "releases/**" - "stable/**" jobs: ack: uses: ansible/devtools/.github/workflows/push.yml@main ansible-compat-4.1.11/.github/workflows/release.yml000066400000000000000000000017511454751074100222500ustar00rootroot00000000000000name: release on: release: types: [published] jobs: before-release: uses: ansible/ansible-compat/.github/workflows/tox.yml@main release: name: release ${{ github.event.ref }} needs: before-release # unable to use environment with uses/with, basically cannot reuse release pipelines environment: release runs-on: ubuntu-22.04 permissions: id-token: write env: FORCE_COLOR: 1 PY_COLORS: 1 TOX_PARALLEL_NO_SPINNER: 1 steps: - name: Switch to using Python 3.12 by default uses: actions/setup-python@v5 with: python-version: "3.12" - name: Install tox run: python3 -m pip install --user "tox>=4.0.0" - name: Check out src from Git uses: actions/checkout@v4 with: fetch-depth: 0 # needed by setuptools-scm - name: Build dists run: python -m tox -e pkg - name: Publish to pypi.org uses: pypa/gh-action-pypi-publish@unstable/v1 ansible-compat-4.1.11/.github/workflows/tox.yml000066400000000000000000000062271454751074100214450ustar00rootroot00000000000000name: tox on: create: # is used for publishing to PyPI and TestPyPI tags: # any tag regardless of its name, no branches - "**" push: # only publishes pushes to the main branch to TestPyPI branches: # any integration branch but not tag - "main" pull_request: schedule: - cron: 1 0 * * * # Run daily at 0:01 UTC workflow_call: jobs: pre: name: pre runs-on: ubuntu-22.04 outputs: matrix: ${{ steps.generate_matrix.outputs.matrix }} steps: - name: Determine matrix id: generate_matrix uses: coactions/dynamic-matrix@v1 with: min_python: "3.9" max_python: "3.12" default_python: "3.10" other_names: | lint docs pkg py39-ansible212 py39-ansible213 py39-ansible214 py39-ansible215 py310-ansible215 py311-ansible215 py312-ansible216 py312-devel smoke platforms: linux,macos macos: minmax build: name: ${{ matrix.name }} runs-on: ${{ matrix.os || 'ubuntu-22.04' }} needs: pre strategy: fail-fast: false matrix: ${{ fromJson(needs.pre.outputs.matrix) }} env: FORCE_COLOR: 1 steps: - name: Check out src from Git uses: actions/checkout@v4 with: fetch-depth: 0 # needed by setuptools-scm - name: Set up Python ${{ matrix.python_version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python_version }} - name: Pre-commit cache uses: actions/cache@v3 with: path: ~/.cache/pre-commit key: ${{ matrix.name }}-pre-commit-${{ hashFiles('setup.cfg', 'tox.ini', 'pyproject.toml', '.pre-commit-config.yaml') }} - name: Pip cache uses: actions/cache@v3 with: path: ~/.cache/pip key: ${{ matrix.name }}-pip-${{ hashFiles('setup.cfg', 'tox.ini', 'pyproject.toml', '.pre-commit-config.yaml') }} - name: Install tox run: python3 -m pip install --upgrade 'tox>=4.0.3' - name: Initialize tox envs run: python -m tox --notest --skip-missing-interpreters false -vv -e ${{ matrix.passed_name }} - name: Test with tox run: python -m tox -e ${{ matrix.passed_name }} - name: Archive logs uses: actions/upload-artifact@v3 with: name: logs.zip path: .tox/**/log/ - name: Upload coverage data if: ${{ startsWith(matrix.passed_name, 'py') }} uses: codecov/codecov-action@v3 with: name: ${{ matrix.passed_name }} fail_ci_if_error: false # see https://github.com/codecov/codecov-action/issues/598 token: ${{ secrets.CODECOV_TOKEN }} verbose: true # optional (default = false) check: # This job does nothing and is only used for the branch protection if: always() needs: - build runs-on: ubuntu-22.04 steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@release/v1 with: jobs: ${{ toJSON(needs) }} ansible-compat-4.1.11/.gitignore000066400000000000000000000035631454751074100165030ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec rpm/*.spec *.rpm # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ .test-results *.lcov ansible_collections # Generated by setuptools-scm src/ansible_compat/_version.py ansible-compat-4.1.11/.packit.yaml000066400000000000000000000022331454751074100167210ustar00rootroot00000000000000--- # https://packit.dev/docs/configuration/ # Test locally running: packit build locally # spell-checker:ignore packit specfile copr epel specfile_path: dist/python-ansible-compat.spec actions: create-archive: # packit.dev service does have these module pre-installed: - python3 -m build --sdist --outdir dist - sh -c "ls dist/ansible-compat-*.tar.gz" get-current-version: - ./tools/get-version.sh post-upstream-clone: - rm -f dist/*.tar.gz || true - ./tools/update-version.sh srpm_build_deps: - python3-build - python3-setuptools_scm - python3-pytest - python3-pytest-mock jobs: - job: copr_build trigger: commit branch: main targets: - fedora-rawhide-x86_64 - fedora-rawhide-aarch64 - fedora-latest-x86_64 - fedora-latest-aarch64 # Missing python3-build see https://bugzilla.redhat.com/show_bug.cgi?id=2129071 # - centos-stream-9-aarch64 # - centos-stream-9-x86_64 - job: tests trigger: pull_request branch: main targets: - fedora-latest - fedora-rawhide # - job: propose_downstream # trigger: release # metadata: # dist-git-branch: master ansible-compat-4.1.11/.pre-commit-config.yaml000066400000000000000000000102251454751074100207650ustar00rootroot00000000000000--- ci: # format compatible with commitlint autoupdate_commit_msg: "chore: pre-commit autoupdate" autoupdate_schedule: monthly autofix_commit_msg: | chore: auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci skip: # https://github.com/pre-commit-ci/issues/issues/55 - ccv - pip-compile # No docker on pre-commit.ci - validate-config-in-container default_language_version: # Needed in order to make pip-compile output predictable. python: python3.10 exclude: | (?x)^( test/assets/.* )$ repos: - repo: https://github.com/astral-sh/ruff-pre-commit rev: "v0.1.9" hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] - repo: https://github.com/pappasam/toml-sort rev: v0.23.1 hooks: - id: toml-sort-fix - repo: https://github.com/pre-commit/mirrors-prettier # keep it before yamllint rev: "v3.0.3" hooks: - id: prettier additional_dependencies: - prettier - prettier-plugin-toml - prettier-plugin-sort-json - repo: https://github.com/pre-commit/pre-commit-hooks.git rev: v4.5.0 hooks: - id: end-of-file-fixer - id: trailing-whitespace exclude: > (?x)^( examples/playbooks/(with-skip-tag-id|unicode).yml| examples/playbooks/example.yml )$ - id: mixed-line-ending - id: check-byte-order-marker - id: check-executables-have-shebangs - id: check-merge-conflict - id: debug-statements language_version: python3 - repo: https://github.com/codespell-project/codespell rev: v2.2.6 hooks: - id: codespell - repo: https://github.com/adrienverge/yamllint.git rev: v1.33.0 hooks: - id: yamllint files: \.(yaml|yml)$ types: [file, yaml] entry: yamllint --strict - repo: https://github.com/psf/black rev: 23.12.1 hooks: - id: black language_version: python3 - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.8.0 hooks: - id: mypy # empty args needed in order to match mypy cli behavior args: ["--strict"] additional_dependencies: - ansible-core - cached_property - packaging - pytest - pytest-mock - subprocess-tee>=0.4.1 - "typing-extensions>=4.5.0;python_version<'3.10'" - types-PyYAML - types-pkg_resources - types-jsonschema>=4.4.9 - repo: https://github.com/pycqa/pylint rev: v3.0.3 hooks: - id: pylint additional_dependencies: - PyYAML - pytest - typing_extensions # Keep last due to being considerably slower than the others: - repo: local hooks: - id: pip-compile-upgrade # To run it execute: `pre-commit run pip-compile-upgrade --hook-stage manual` name: Upgrade constraints files and requirements files: ^(pyproject\.toml|requirements\.txt)$ language: python entry: python -m piptools compile --resolver=backtracking --upgrade -q --strip-extras --extra docs --extra test --output-file=requirements.txt pyproject.toml --unsafe-package ansible-core --unsafe-package resolvelib --unsafe-package typing_extensions pass_filenames: false stages: - manual additional_dependencies: - pip-tools>=6.11.0 - id: pip-compile name: Check constraints files and requirements files: ^(pyproject\.toml|requirements\.txt)$ language: python entry: python -m piptools compile --resolver=backtracking -q --strip-extras --extra docs --extra test --output-file=requirements.txt pyproject.toml --unsafe-package ansible-core --unsafe-package resolvelib --unsafe-package typing_extensions pass_filenames: false additional_dependencies: - pip-tools>=6.11.0 - repo: https://github.com/packit/pre-commit-hooks rev: v1.2.0 hooks: - id: validate-config-in-container name: packit alias: packit - repo: https://github.com/mashi/codecov-validator rev: "1.0.1" hooks: - id: ccv name: codecov ansible-compat-4.1.11/.prettierignore000066400000000000000000000001131454751074100175420ustar00rootroot00000000000000test/assets/ # Generated by setuptools-scm src/ansible_compat/_version.py ansible-compat-4.1.11/.prettierrc.yaml000066400000000000000000000005431454751074100176330ustar00rootroot00000000000000--- proseWrap: always jsonRecursiveSort: true # prettier-plugin-sort-json tabWidth: 2 useTabs: false overrides: - files: - "*.md" options: # compatibility with markdownlint proseWrap: always printWidth: 80 - files: - "*.yaml" - "*.yml" options: # compatibility with yamllint proseWrap: preserve ansible-compat-4.1.11/.readthedocs.yml000066400000000000000000000020211454751074100175650ustar00rootroot00000000000000# Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html # for details --- # Required version: 2 # Build documentation in the docs/ directory with Sphinx sphinx: # keep dirhtml for nice URLs without .html extension builder: dirhtml configuration: docs/conf.py fail_on_warning: true # Build documentation with MkDocs #mkdocs: # configuration: mkdocs.yml # fail_on_warning: true # Optionally build your docs in additional formats # such as PDF and ePub formats: [] submodules: include: all # [] exclude: [] recursive: true build: image: latest # Optionally set the version of Python and requirements required # to build docs python: version: "3.9" install: # On https://readthedocs.org/dashboard/ansible-lint/environmentvariables/ we # do have PIP_CONSTRAINTS=requirements.txt which ensures we install only # pinned requirements that that we know to be working. - method: pip path: . extra_requirements: - docs system_packages: false ansible-compat-4.1.11/.vscode/000077500000000000000000000000001454751074100160455ustar00rootroot00000000000000ansible-compat-4.1.11/.vscode/extensions.json000066400000000000000000000007121454751074100211370ustar00rootroot00000000000000{ "recommendations": [ "Tyriar.sort-lines", "charliermarsh.ruff", "esbenp.prettier-vscode", "hbenl.vscode-test-explorer", "ms-python.isort", "ms-python.python", "ms-python.vscode-pylance", "ms-vscode.live-server", "redhat.ansible", "redhat.vscode-yaml", "ryanluker.vscode-coverage-gutters", "shardulm94.trailing-spaces", "tamasfe.even-better-toml", "timonwong.shellcheck", "znck.grammarly" ] } ansible-compat-4.1.11/.vscode/settings.json000066400000000000000000000025311454751074100206010ustar00rootroot00000000000000{ "[markdown]": { "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[python]": { "editor.codeActionsOnSave": { "source.fixAll": "explicit", "source.fixAll.ruff": "never", "source.organizeImports": "never" } }, "editor.formatOnSave": true, "evenBetterToml.formatter.alignComments": false, "evenBetterToml.formatter.allowedBlankLines": 2, "files.exclude": { "*.egg-info": true, ".pytest_cache": true, ".tox": true, "__pycache__": true, "build": true }, "git.ignoreLimitWarning": true, "grammarly.domain": "technical", "grammarly.files.include": ["**/*.txt", "**/*.md"], "grammarly.hideUnavailablePremiumAlerts": true, "grammarly.showExamples": true, "python.analysis.exclude": ["build"], "python.formatting.provider": "black", "python.linting.flake8Args": ["--ignore=E501,W503"], "python.linting.flake8Enabled": false, "python.linting.mypyCategorySeverity.error": "Warning", "python.linting.mypyEnabled": true, "python.linting.pylintEnabled": true, "python.terminal.activateEnvironment": true, "python.testing.pytestEnabled": true, "python.testing.unittestEnabled": false, "sortLines.filterBlankLines": true, "yaml.completion": true, "yaml.customTags": ["!encrypted/pkcs1-oaep scalar", "!vault scalar"], "yaml.format.enable": false, "yaml.validate": true } ansible-compat-4.1.11/.yamllint000066400000000000000000000002571454751074100163420ustar00rootroot00000000000000rules: document-start: disable indentation: level: error indent-sequences: consistent ignore: | .tox # ignore added because this file includes on-purpose errors ansible-compat-4.1.11/LICENSE000066400000000000000000000021071454751074100155110ustar00rootroot00000000000000MIT License Copyright (c) 2021 Community managed Ansible repositories Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ansible-compat-4.1.11/README.md000066400000000000000000000013741454751074100157700ustar00rootroot00000000000000# ansible-compat [![pypi](https://img.shields.io/pypi/v/ansible-compat.svg)](https://pypi.org/project/ansible-compat/) [![docs](https://readthedocs.org/projects/ansible-compat/badge/?version=latest)](https://ansible-compat.readthedocs.io/) [![gh](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml/badge.svg)](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml) [![codecov.io](https://codecov.io/github/ansible/ansible-compat/coverage.svg?branch=main)](https://codecov.io/github/ansible/ansible-compat?branch=main) A python package contains functions that facilitate working with various versions of Ansible 2.12 and newer. Documentation is available at [ansible-compat.readthedocs.io](https://ansible-compat.readthedocs.io/). ansible-compat-4.1.11/ansible.cfg000066400000000000000000000001321454751074100165760ustar00rootroot00000000000000[defaults] # isolate testing of ansible-compat from user local setup collections_path = . ansible-compat-4.1.11/codecov.yml000066400000000000000000000001641454751074100166520ustar00rootroot00000000000000codecov: require_ci_to_pass: true comment: false coverage: status: patch: true # we want github annotations ansible-compat-4.1.11/docs/000077500000000000000000000000001454751074100154345ustar00rootroot00000000000000ansible-compat-4.1.11/docs/api.md000066400000000000000000000002521454751074100165260ustar00rootroot00000000000000# API ::: ansible_compat.config ::: ansible_compat.errors ::: ansible_compat.loaders ::: ansible_compat.prerun ::: ansible_compat.runtime ::: ansible_compat.schema ansible-compat-4.1.11/docs/images/000077500000000000000000000000001454751074100167015ustar00rootroot00000000000000ansible-compat-4.1.11/docs/images/favicon.ico000066400000000000000000000360561454751074100210340ustar00rootroot00000000000000 h6  (ž00 h&Æ(  •0”–”‘Ø“‘ø“‘ø”‘Ø”–•0ŽŽ ”’˜“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý”’˜ŽŽ ŽŽ “’¿“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¿ŽŽ ”’˜“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’˜•0“‘ý“‘ÿ“‘ÿÔӛÿÅÄxÿ“‘ÿ“‘ÿ“‘ÿ”’ÿÈÇ~ÿââŒÿ“‘ÿ“‘ÿ“‘ý•0”–“‘ÿ“‘ÿ“‘ÿ±¯Gÿòòáÿ”’ÿ“‘ÿ˜– ÿÜÜ­ÿÿÿÿÿÍ̊ÿ“‘ÿ“‘ÿ“‘ÿ”–”‘Ø“‘ÿ“‘ÿ“‘ÿ”’ÿïïÚÿ³±Kÿ¢ $ÿììÓÿÕԝÿùùòÿ¡Ÿ"ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ø“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿÃÂrÿêêÏÿóóãÿºž\ÿœŒdÿßߎÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿš™ÿûûõÿ²°Iÿ“‘ÿéèËÿ²±Kÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø”‘Ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÖ՞ÿÈÇÿ©§4ÿððÜÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ø”–“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ©§4ÿòòáÿÕ՞ÿÄÃuÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”–•0“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿèçÉÿúúõÿ›™ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý•0”’˜“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿµŽRÿÌˇÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’˜ŽŽ “’¿“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¿ŽŽ ŽŽ ”’˜“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý”’˜ŽŽ •0”–”‘Ø“‘ø“‘ø”‘Ø”–•0( @ ’’”‘X”‘™”‘È“‘꒐ù’ù“‘ꔑȔ‘™”‘X’’‘‘%’‘›“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ó’‘›‘‘%™™ “‘‰“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘‰™™ ••$“‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï••$••0“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••0••$“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••$™™ “‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï™™ “‘‰“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‰‘‘%“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÃÂrÿééÍÿÛÛ¬ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿœŒdÿòñàÿ¿Ÿjÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø‘‘%”‘›“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ§¥/ÿþþþÿÿÿþÿš§3ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ–”ÿÒѕÿÿÿþÿÿÿÿÿÙÙ§ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘›ˆˆ“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿåäÂÿÿÿÿÿÓҘÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›ÿääÀÿÿÿÿÿÿÿÿÿÿÿÿÿ²°Jÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘ò’’”‘X“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¹·Yÿÿÿÿÿøøðÿ˜– ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ©§4ÿòòàÿÿÿÿÿÿÿÿÿÿÿÿÿïïÛÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘X”‘™“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ–”ÿõôæÿÿÿÿÿœ»dÿ“‘ÿ“‘ÿ“‘ÿº¹\ÿûúõÿÿÿÿÿááºÿîîØÿÿÿÿÿÄÃtÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘™”‘È“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿËʄÿÿÿÿÿèçÈÿ“‘ÿ•“ÿÎ͌ÿþþþÿþþýÿÌˈÿšŠ2ÿþþþÿúúõÿ›™ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘È“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ žÿýýúÿþþþÿ¯­Cÿáàžÿÿÿÿÿùùñÿ¶µTÿ“‘ÿÑєÿÿÿÿÿÖ՟ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘꒐ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÝܰÿÿÿÿÿýýûÿÿÿÿÿíí×ÿ¥£*ÿ“‘ÿ˜– ÿøøïÿÿÿþÿ©š6ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’ù’ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ°¯FÿÿÿÿÿÿÿÿÿÝܯÿ™—ÿ“‘ÿ“‘ÿœŒdÿÿÿÿÿèçÉÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’ù“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿîîØÿÿÿÿÿŸŒeÿ“‘ÿ“‘ÿ“‘ÿèèÊÿÿÿÿÿ»º`ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ꔑȓ‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÃÁqÿÿÿÿÿèèÊÿ“‘ÿ“‘ÿ©§4ÿÿÿþÿööêÿ—• ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘È”‘™“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›™ÿúúôÿþþþÿšŠ3ÿ“‘ÿÔӛÿÿÿÿÿÍ̊ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘™”‘X“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÕԜÿÿÿÿÿÓҘÿ™—ÿùùòÿýýûÿ¢ $ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Xˆˆ“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ©§4ÿþþþÿøøðÿÅÄxÿÿÿÿÿßßµÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ó’’”‘›“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿççÇÿÿÿÿÿüüùÿÿÿÿÿ²±Kÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘›‘‘%“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ»¹^ÿÿÿÿÿÿÿÿÿððÜÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø‘‘%“‘‰“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ—• ÿööêÿÿÿÿÿÄÃuÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‰™™ ”‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿµŽRÿÔӚÿ™—ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï™™ ••$“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••$••0“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••0••$“‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï••$™™ “‘‰“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘‰™™ ‘‘%”‘›“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ó”‘›‘‘%ˆˆ”‘X”‘™”‘È“‘꒐ù’ù“‘ꔑȔ‘™”‘Xˆˆ(0` €€’’#•‘H“‘‚“’¯”‘Ö’‘ê“‘ø“‘ø’‘ꔑ֓’¯“‘‚•‘H’’#€€™™’’1”‘r“’¯“‘á“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü“‘á“’¯”‘r’’1™™— “‘—“â“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“â“‘—— ÿÿ’’“‘m“‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘í“‘m’’ÿÿ““;“’Ž’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“’Ž““;€€•’`“‘å“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“‘å•’`€€’’•“j“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•“j’’€€•“j’‘ë“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ë•“j€€•’`“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•’`ÿÿ““;“‘å“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘å““;ÿÿ’’“’Ž“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“’Ž’’“‘m’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“‘m— “‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ®­AÿÚÚ«ÿÛÛ­ÿÖÖ ÿšŠ1ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ—• ÿ²±JÿææÅÿÛÛ¬ÿ ž ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘헏 ™™“‘—“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ£¢&ÿññÞÿÿÿÿÿýýúÿÉÇÿ•“ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ™˜ÿÈÇ~ÿ÷÷íÿÿÿÿÿþþþÿÂÁqÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘—™™’’1“â“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ—• ÿÕԜÿÿÿþÿÿÿÿÿååÂÿ›ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›™ÿÛÛ«ÿûûöÿÿÿÿÿÿÿÿÿÿÿþÿº¹^ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“â’’1€€”‘r“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ·µUÿùùðÿÿÿÿÿüû÷ÿ¯­Bÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿš§2ÿèèÊÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïïÚÿ¡Ÿ ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø”‘r€€“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›ÿêêÎÿÿÿÿÿÿÿÿÿÙØ§ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿ·¶VÿìëÒÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿþþýÿÑДÿ—• ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯’’#“‘á“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÅÄvÿÿÿÿÿÿÿÿÿúúõÿ¢ #ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ–• ÿÉɀÿøøîÿÿÿÿÿÿÿÿÿÿÿþÿÿÿÿÿÿÿÿÿø÷îÿ³²Mÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘á’’#•‘H“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›™ÿ÷öìÿÿÿÿÿÿÿÿÿÌˈÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿžœÿÓіÿýüùÿÿÿÿÿýýûÿìëÓÿáá¹ÿÿÿÿÿÿÿÿÿèèÊÿ™˜ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü•‘H“‘‚“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÎ͋ÿÿÿþÿÿÿÿÿííÔÿžÿ“‘ÿ“‘ÿ“‘ÿªš6ÿßߎÿýýûÿÿÿÿÿûû÷ÿ××¢ÿª©8ÿòòáÿÿÿÿÿÿÿÿÿ¿Ÿiÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‚“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ§¥0ÿ÷öëÿÿÿÿÿúùòÿ¹žZÿ“‘ÿ”’ÿ޳OÿïïÚÿÿÿþÿÿÿÿÿýýúÿÇÆ{ÿš˜ÿ¿Ÿhÿÿÿþÿÿÿÿÿóóäÿ™—ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯”‘Ö“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ™˜ÿÜÛ¬ÿÿÿþÿÿÿþÿÖ՞ÿœšÿŒ»aÿ÷÷ëÿÿÿÿÿþþýÿððÝÿ¶ŽRÿ“‘ÿ˜—ÿèèÈÿÿÿÿÿþþþÿÈÇ~ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ö“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿŸŒeÿûúôÿÿÿÿÿòòáÿÚÙšÿûûöÿÿÿÿÿÿÿþÿââŒÿ­«>ÿ”’ÿ“‘ÿ±°Hÿ÷÷ìÿÿÿÿÿóóãÿ¥€,ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ê“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¡Ÿ!ÿððÜÿÿÿÿÿÿÿþÿþþþÿÿÿÿÿýýûÿÒіÿ›ÿ“‘ÿ“‘ÿ–”ÿÎ͍ÿþþýÿÿÿÿÿØ×£ÿ˜– ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÐϐÿÿÿÿÿÿÿÿÿÿÿÿÿ÷öìÿÅÅyÿ–• ÿ“‘ÿ“‘ÿ“‘ÿŸžÿëëÑÿÿÿÿÿúúòÿºž\ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ€¢)ÿúúòÿÿÿÿÿÿÿÿÿÑВÿ–”ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿž·ZÿýýúÿÿÿÿÿììÓÿžÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ꔑ֓‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿØØ€ÿÿÿÿÿÿÿÿÿââ»ÿ–”ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿååÃÿÿÿÿÿÿÿÿÿÉȀÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ö“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ­¬>ÿúúôÿÿÿÿÿõõèÿ¬ª;ÿ“‘ÿ“‘ÿ“‘ÿ«ª;ÿþþýÿÿÿÿÿøøðÿžœÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯“‘‚“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿœšÿâá»ÿÿÿÿÿýýúÿÈÇÿ•“ÿ“‘ÿ•“ÿÙØ¥ÿÿÿÿÿÿÿÿÿÒѕÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‚•‘H“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿÄÃuÿüüùÿÿÿÿÿåäÂÿœ›ÿ“‘ÿŠ€.ÿòòáÿÿÿÿÿøøïÿ©š5ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü•‘H’’#“‘á“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ§¥0ÿóòâÿÿÿÿÿûû÷ÿ®­Bÿ”’ÿÃÁqÿüüøÿÿÿÿÿÞݱÿš˜ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘á’’#“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ•“ÿÙØ¥ÿÿÿÿÿÿÿÿÿÙØ§ÿ›™ÿàß¶ÿÿÿÿÿûûöÿÀŸkÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯€€”‘r“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ«©:ÿþþüÿÿÿÿÿúúõÿ¹·Zÿùùñÿÿÿÿÿññßÿ¢¡%ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø”‘r€€’’1“â“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿãã¿ÿÿÿÿÿÿÿÿÿø÷îÿÿÿÿÿÿÿÿÿÓҘÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“â’’1™™“‘—“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¶µTÿüüøÿÿÿÿÿÿÿÿÿÿÿÿÿûûõÿ§¥.ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘—™™— “‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿžÿéèËÿÿÿÿÿÿÿÿÿÿÿÿÿÜÛ«ÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘헏 “‘m’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ•“ÿËʅÿþþüÿÿÿÿÿûûöÿ¯®Dÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“‘m’’“’Ž“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ«ª:ÿòòáÿþþþÿàà·ÿ›šÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“’Ž’’ÿÿ““;“‘å“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ•“ÿ«ª:ÿ¿Ÿjÿ€¢)ÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘å““;ÿÿ•’`“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•’`€€•“j’‘ë“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ë•“j€€’’•“j“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•“j’’€€“‘a“‘å“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“‘å•’`€€““;“’Ž’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“’Ž““;ÿÿ’’“‘m“‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘í“‘m’’ÿÿ— “‘—“â“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“â“‘—— ™™’’1”‘r“’¯“‘á“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü“‘á“’¯”‘r’’1™™€€’’#•‘H“‘‚“’¯”‘Ö“‘ê“‘ø“‘ø“‘ꔑ֓’¯“‘‚•‘H’’#€€ansible-compat-4.1.11/docs/images/logo.png000066400000000000000000000253611454751074100203560ustar00rootroot00000000000000‰PNG  IHDR\ršfŽiTXtXML:com.adobe.xmp devtools-logos-all _‘[>iCCPsRGB IEC61966-2.1(‘u‘Ï+DQÇ?3CÄøÅÂbš†Õ£ÄF %Mc”ÁfæÍ/5oæõÞH“­²U”Øøµà/`«¬•"R²eMlÐsž™šIæÜûœçœî=졎¢U} frzpÂïš/žjž±ÑH nÚ#Š¡ÓTŽ;‰»é±jUŽû×êcqC[­ðˆ¢é9áIáé՜fñ¶p›’ŠÄ„O…œº\PøÖÒ£~±8Yà/‹õPp ìÍ®dGËXI骰Œš^QŠ÷±^âŒgæfeuËìÄ È~\L1΃ô3,~|ôʎ ù}¿ù3d%W¯‘Gg™$)rxE]‘êqY¢Çe€É[ýÿÛW#1à+Twú¡úÉ4ߺ f Ÿ7MóóÐ4¿Àñ™R~ö†ÞEß,iž}hZ‡³Ë’݁ó èxÐ"zäWrÈŽ'ðz ahœ†ºÅBϊçßChMŸê v÷ [⛖~=ÎgÓP6:© pHYs.#.#x¥?v IDATxœíÝyxUÕœðñoFSHʔ@H VDPÁðTœ(£ZE«"Ø}÷œß¶÷í;]Û·ƒ÷>ûîÛVA­b«LbÕ: ±V™Â<d&€„@’÷H2œsöÚ{ísÎïó<çyrÖþgýÎÚk¯õ[ ˆèfى@o È:†ù8æë °؃iT{ýGÞIЀ‘e§ãtòƯþ@[MQU[q’A×iœÔ“ƒ$€ ±ìdàJàZ`;z–ΰ"P7JØ ”+5˜Æ­Q‰$èæ á£j_×sqhkNK%Àr ¡—$¿YvÎ7z]‡ tÑ“>Gp’Áœ[‡­ÅI~°ì,à6.vúhÎû¥ ',ÞÄ4jŽ'æIðŠe·îŸ ŒõuªÅÀà5Lã¬æxb’$•œûùë€û‰Äßʁ…8É`¹Ìš# @Ë.ÀùŠ¿è£9šX·xxÓØ¢;˜h' R– LÆù¶Š9šxõ Κ`ŠqTw0ÑH@ž,»ðcà Us4ÂQ Œü ÓØ®;˜h" T–]ü˜ŠLèU5ððKLc£î`¢$€ÖXö`àgÀxÝ¡ˆÕ‹€'0Ïtd’šcÙÃq:þ-ºC®Œ üбRw A$  >g•ÞhœŽ£Þ`„bO“Õ†IšcÙ׿FfôcÝ'À1åº I–ݧã߯;á«98‰àî@tŠß`ÙIÀtà@ºæh„'pn÷žÆ4ªt£C|&Ë ügßœk€˜Æj݁ø-Ÿ€ewÁ™šNŒýÙEkj€§ŸbÇuã—øèÎ&pîõ/Ӎ¶ÃÀ€9ñ°é(ö€e—†ëED•8·ëtâ¥ØMÎ3ýǁ'‘5û"2•À÷€ßÇêځØLNÝgoéEĄ…ÀñXé8ö€e_ ÌúêEĔÀ$Lcî@TŠ C~áœJàûÀïbå– 6€egà ùeǞðÃ+Àwbᖠú€eÁòçêEĕÀdLãS݁žœ ÀòÀoÍшøtç–à¿¢õ– :€e·þˆSGÝ^Ä4Îé$\ї,»#ð*Ο}!‚â}`<ŠqJw ሮ`ÙÝp*Œ ÖŠMXÜM[Œ£'Xv.ð.§;!Z° žÓØ©;PDGu[ËŸçxiéü"èò€•µŸÙÀ ~°ì€eȁš"zdËj?»ì`Ùãqˆì€;!ÂÔ XŒeß­;–7Xö£À îP„ˆP`!–ýˆî@šÌ`Ù?Æ©ÎÌø„]"0«ö38Á{ `ÙӁ§t‡!„Šc³tQ_°€sÏ¿ù汩˜€iŒª;:ÁIΌébäž_ĶsÀLãC݁@P€óÌt2Û/âC9p=Šñ¹î@ô'Ëî‹S€Qžó‹xržV÷ŠAœ÷ÚÎÚþÅHçñ' x·¶h£/8»úÞB–÷Šø•ŒUێГœýü¯"Gs q%°š¶OøÎÿàTòyÙÏ/D›€?Öö _éÀ ×"ÈŠßõû¢þfËŸ gÆ_jø q©ó8O|+4ê_pJw¯Eª÷ ђÀ`Lã„ó'8÷6 ‘ºýÒ5-]:“Ó©m“’ØU~Š-'N°«Œ\whñî`¢•†“œŸ@­Ç‘λtæ'C†05¿?ɉ—Nœ»gÿç“Õ¬(+ӝÀ9Ór&ð;¯/äýÀ9žcr\W L8gnERBëÿôÿúñ*žXÕç^D³JœùOÏ"Lò²ñÚSzß.óô:"$ßœ|OIb`T¯ž$ìßïm`¢)IÀMÜrë Œó¶gç x÷йï9¥7ò2ÒyrÄð°‡|?6”Áݺz“hU_`¶—ëŒ\0˜àaû" ¿>œÔ€ð| À“#FšH„j"0ëƜI–]ü»'m‹°gfrW¿Èb7öìÁµÙÙ #aúÚ>¥œú`ىÀI¿À˜9Èýgçqmˆˆ¥¿÷âVÀ‹ÀýÀpÚ蘚ʷžngB^?º¥¥)ˆHDhNßRJm°ì.Ào”¶)\¹@RܯŒNMJâáâ" ~ƒewVÙ êÀÈ#¿@™Q¢nè>œž(€õÂ3]qú˜2ꀳÑgº²ö„k7ôèAQfeíõîØ‘q¹9ÊÚy¬¶¯)¡&XvÎğ|=ˆw*&…+ 8‚Jñ©LGªûJvûö®ý5ç¿õîMÿŒ å튰 Uѐû`Ù݁_žEšôHQ!)Mlôq+Ä/TUñ ù5® ¡Hrb"{Öþށh—ì×FRь œŸçŠ»`Ù×ãÁ³IáΝ}séÑ¡œgígŽiÃ=ùžµ/Bö–}›"OΪ$yæ@~ Ñå6 0~ãf… ›Àh`š‹÷  èܙQ={z~+ºvåšl9Ï%†£"}³›ð3ï™1È»{ÿÆf*\d$\‰ž/F–,{8pc€ÞhŸ’Âúvœ‰ýóè*û‚`$–}m$oŒt ßþtoA>é©þmÂl“”ÄÃE…Ÿ]OŽ(¢>~°ìÁÀ-‘\LxKÇÄÜô’âKŒ O݊e_î›"È· ÏÎæòËü߇ÕGöIØ}3Œ`مHyï@ÒùXN&c<–Ö$Pž#€Ÿ„ùóÂ]ÓҘ×OÛõoîӛŒ Y  „ÙGCO–Ýž'̀„.*Œšà§* š­; \¹ËyX8#€‡ùó‰ &û‚"çD¡fµ–dò/€†vïΐn®ëAòôº —üÚÎòrÞÝœÇU»9:q[NŽ«6„2-öáæ€e·Ã9šXŒŠoÿ#g+XT;ùרSëe20†LIJ›-ÚÐÒàÀý ¡T—¶m™œßßu;sJK9W;ùר;v²ÿôWíéӛ~é²? :áôå&µ”dø@€­‚uÿõ'ÿ«ª©áٍî&ýú—(  …ŸÜt°ì,`ŒWшȚÚtóáþýl>~ŒÅŸ™œ~U5îŠe@`Œ©íӗhnp[ ¿'4Û§}Ó;¹n§¥oÿ:ûNŸæÍ»\]§KÛ¶LQp»"\Knmê7šëä#œ‹EDJÅÄÚъ ^ÙÖôä_cO+˜ ôâ€R‘&ûô¥ À9d â:ãÂ9:qkŽû%s65?ùר;»÷°«ŒÜÕõ®ì֍¡Ý»»jC(1ª©DšÙÞÇ#¡jqM(Ãÿ:Õ55Ìv¹2dß.ÙÐToÿ€i“”ÄC…îëý/Û€ÒV&ÿ{nÃFÎWW»ºîäüþd*X¶,\»€oKˆªàçÛ¿ÎÁ¯Ÿâµ;\]·MRߑó‚ •àl À€Q1„>VQÁÂmÛ"z¯Šm²? F6ÞÔx0èâ_<¢5WtUSdcN鿐'ÿ[²w[m Wn§NÜÒGö•i– 4ø6iœdø0ªÖÔ?íâ[Œ†Èn{ür™ €}Œqá€d(*޹ü@ø“ýq㊈GuÆôî­d!“p¥A¿˜,;žÁïhDóŠ @;KiU|{‡³€š9‰ r~€~7Ôöu áàJd÷_`$3TMþmlò¯1“ª63‰ˆu×ýOýÑùâ£{õ"?#Ãu;sJ7Sárè^gùl:PÆŠcî&ÿ›SZÊW.žjcH·n\ÕÝ}I31IA6œ€ˆ$%ëþÕ~ûœ8wŽy[¶ºnçñAƒD#"Ô(Xv: Gº@Jb"¹nçXE Mþ5Šb›°ìÐ*ËîGòíãóú‘Õ®ëv^T8ùר'¿ä³Ã‡]µÑ6)‰‡äü @@àšÚ:«âÙKTLÊþ­$Mqf&×}ã®ÛYQVæúq]kþŒy3§*+]µÑ7œcn*"" hÔ=úóöÛàôùóŒŽe‹ëvd2PIAÒ15•û¬û?~îóÌ҇BÅÊÀ±}z“ÛIöhP›œýÁRºU³ûÐ15Õu;/––z6ùר?ᓃ_ºj#1!ƒäü úcى‰@o@žÇhŠj“ŒÃÿúT<|š°Pöø/ 蕈 ÿµ»¡GŠ2Ý×aYYVƆ£ÞNþ56wËVNœ;窍LEǝ‰°$" €ŽSõèïiŸ¿ýÎ^žÀœÒR×íÈd Y‰È`­²Û·ç®~}]·ãçä_c*Ö\Õœ›’#ÏEX:JÐ쑢BRݟÂö'Wþµfã±c,?pÀu;²KÐw’tJNLäÑb53à~Oþ5Šâ‘àäþýé"ûü$ @§;ûæÒ£C{×í¬,+cýÑ£ "ŠÜ+Û¶säl…«6Ғ“•€"B& @'U+ÿvœ,G÷ŠúsUU<¿i“ëv+)Öþg‰#’tй3£zöTÒÖ} xíöÛHW°È§×¯§ÆeýÒÓ+çøE€.ªW¿Ýž›Ëê)“”¬'ˆÔ¶'Y²wŸëvTŒD«$èÐ>%…ª¿×ퟑÁ'“&2©žò¶C¥b2ð֜>äÈþ?HÐáނ|φëíSR˜wËX~;bž’²báúˎüê+Wm8çÈþHÐÁ!î÷_Á{wß©äTáp\𮿹 ]·óPa!md€×$øíÚìl.¿ì2_®5²gOÖL™ì{ÞYë7P]ãn:ð²4ÙàƒŽ‰@ÝQÄUëþCÕ«c–Oøß)ò¯þÞîS§xg÷×í̔cÄŒÖÑýT²®iiLÈëçûuÛ$%ñÌèQÌ5’TŸ†Õ*¶ ËêΕ²?ÀS‰ÀiÝAċ‡‹ }ë€My€žˆeßOÏÞúÞܹ‹œ§ÜŽfJ±/JNéŽ"$&$0=3Ûòº³fêdnèÑÃÓëTÕÔðÌ÷ûŠæçËþïHðËžÜút Æ|k·Ž4Þ¿ûNŸwÅ7=œÎ36r¡ºÚUiÉÉ<(ûŒ" À/A›ÐJNLäÉëFðòØ1ŽKNný 8pæ ݹËu;ËþHðC^F:7Žþý”üþ¬š<‘~é鞮ÿ”‚ÉÀŒŒtÆôï/ÊIð̒’@ƒ•dfòé”Iܖ“£Œíwwïagy¹ëvd€'$x--9™i(ikú’¥¬óhßF›6ŒqÇ8þ×°¡J“U jЕܖœ9”" ÀkSòÕT¹Ùq²œÙë7pÍü…Œ²m»‚È.•üÏaCyýöqdŽi£¬Ýç6läŒËÉÀĄ ÀS”# Àkª†®ÏlØ@ pæüy&Ÿõ6ÿúñ*×{ï›3.7‡Õ“'Qœ™©€œCgÏòêö®ÛyžšHöš% ÀKC»wWRéö|u5ÏmŒXm§xbõ§Üùƛ”»< ³9y鬚4AÙz|ۄ/Kk«u«s ’à%UßþoìÜɗMl±}cçN†Í[À–'”\§±ö))Ì;†'¯A²ËÊÅK÷ícóñã®c’É@¥$x¥‹ÂÓnZšD+=~œ¡sçóÖ®ÝJ®Õ”ï]ñMÞ»ëNº¹ÜZ¬b2ðê¬,wëêºÔ&€ƒº£ˆETrÞÝ®òrÞÛ³·ÅŸ9YYÉíoü•_~ºÆõõšscϬ™:™¡Ý»GÜÆó›Ô\ŽEUQì`"°Yw±&u‡}>»aSH{ë«kjøéʏ™ôö;œ9^ɵëÙ¡Ë&Œç‘⢈Þ¬¢‚[·¹Žãž‚|¥O)âØæD`à® »h`lŸ>ôMw_ÓîBu5Ïm ¯ºÎ‚­ÛžvÁBv)X|Ӕ6IIÌ5’Ù£GE4#¯b2Pö(q؛ˆiTz•‹Qª&ªÞܵ›g΄ýŸ/ŽeÈÜù,ÙçŸBos.*dلð·¯,+S²˜ifÀWWF­˜FuÝÔ®Ü(’Ó©·æš©k?ËÅ:ú£ŒùËëXÿø\I,MÚœ;k§NæÆžám-~ZÁ( Èû+¢Äfp ‚|ý?œÇJŠITPwï©Ó®Ëj]𮿟—-gÚ{ï{vph׎4Þ¿û.Ÿ?øŠßóbéf%ó2èŠ$ÕÚ$%);Ûîٍ]Ö¬óŠR®_ø ûO‡;Š€„~;b8sǎ¡}JJ«?_^YÉ\G™©ÆB’ ÚÄþyJÊpWÕÔð¬‚ÒÚõ­þòCæÎceY™Òv뛜ߟU“&—ÑúÖb“A©²¥$šŠjHúö®Ýì;­ŸTãÁ¯Ÿb䢿0Ûãċ33ùtÊdÆåæŽøsŸ:ĚC‡\_OwÅ(V/˜ÆIdA+WtíÊ5ÙYJÚR±b®9•UU<ºd)3—~àz‡^sÒSSyýöqü難µ8SÿŽ‚?g׎4&æÉþ€0•aåpq2 pEÕ£¿ý§ÏðÖ®]JÚjÉÖ­gô¢¿pèìYOÚO~>ô*Þž£ù­Å/mÞ¢d3“ßg-ûº$ÒSS¹§ _I[ÏmÜH•¢É¿Ö,?p€!sç)Š7綜>2‰’&¶Ÿ9ž?•ºÿØ]“Å7»úsÚRŒh2”j$&L+š€°fuM Ï(žükÍÞS§¹ná"þŒÙ»üß/=U“'25ÿÒ$©â€Ç RÒNœh2¬ÔHÔK@Ýðñž=ì9åÿæÌ³.pßâ÷øÁG+<}ŽKN楱7ó×_×`kñGŽòq™ûé'Ù–uÿQ?¬A¶‡mt¯^ägd(iËËÉ¿P<¹ö3nyíuŽUx·5䟿y9ïßÝpk±ŠÊÁíÖ^ŒqåÀÚºÿ¹˜Lã𡆀¢šªoÿ2E5ôÝzoÏ^®š7Ÿõž¡GÖNÂ°,gkñü-[9~îœëvg ’ý!ø°¶¯ GK}&ªõìЁ;úæ*ië7¹>EG•'˹fþBm÷Šø(@íYö­ñL/.Š¢ªŠ6¹Ÿ‚ÊÏÈàŠÞœDÓôñÆ `‰Dœé%E$)X÷_Ìöyò¯5§ÏŸg›oó?>þij⣩II<5êFž=ŠçëÕêÖµÙÙ\~™ìšgImßn ¹ã^d"°÷( cjª’¶foˆ®oÿÆ>?r„«æÍg©‡ÅGݐQ@MöéæÀ›@tܘúLUœÿÊÌÔíÈÙ nöžøh€î)È']Q²ŽrUÀ[MýFÓ À4‹= (*ÝУE™]”ŽõŠR*=*Ô鷺⣟÷7ÎèÏÔ>%…ir~ÀâÚ>}‰–N|œãQ0QKåÁ”º7þxáùM›ž~á"ϊFbŠì€úrK à5dwàײ۷çî~}•ŽõÁŸýlõpQNÿòKϋ†#?#ƒÑœâz@9ðzs¿Ù|0³ÀŠJ’âòˆì:Ñ>ù×?Š†#Î'Ôöå&µö‰–Û 91‘G‹Õ”Ÿ>ZQÁ+ÛŒÛe~ Õí}séÕ1Œ#ÌbH‹}žµ°þÚOÝ‘›K핮5gSi &ÊŒö‡uë¹éU†ÂÙ—çì>jéZNÎÆÕŝ¿\&ÿÜX¶ß)>ºöÐam1Äéù/6ÞüÓX(7µqtîÌšž=•޵üÀJWÒVŽÙ{ê4#ŸÂK›/Yê‹îíÚñ­~q·? ÕŸÛz0-À'*¢‰F3©:eRL—³.pïâw=->Ú•#¹(° ÓhõÆP§µãr2°}J  T³äø¹s,غMI[ÑîɵŸqëko(©ŽáÙÙ ºìÒó bTH}6Ô0ð®$L@Ý«p)鋥¥žÑÞݳ‡«æz[|Ž)qR2¬˜Ê†–Lã(ð‚‹€¢’¬üóÖö“'=/>ÚØœùtáó(÷|mŸmU8+[~EmR¹ô㲃l8*¥›RW|ô竌+>Z_û”e‡žTN_ Iè À4¶/EPTR¹zLŸý[Vüß¿¯æ®¿ŸÉ)‹Ö¹&+ŠÀK˜FÈÛLÃ]ÛúË0>*uUXRêĹsÌÛÚêd¬^ß±“aóœ-> pMv¶§íkTC˜}4Œ`Eaœ' ©\4òçÍ[8àÒYA³é˜÷ÅGvéìYۚ-Â4Â:`!’Ý-ODðžš‘˜ÀôuÏþg):ý6žÔý՚µ­ÿpü~òࣰûfø À4Öo‡ýŸ(1.7‡>Š–øû—_òő˜ý°yªºŠ†ÿŸb%SÞY¬ŒøèŠÁت¬Ø[˜ÆgáŸ)Òý­1; ˜©šäÈäŸ ó¶leø‚…ìVxlúJǑPD}2²`+€"zo€õKOçæ>œ•ŽU^YÉÜ-2ù§Â?aÈÜyJŠn\NٙàT*VèLcš›T$€î8ç ªé=BD Cíÿ+»uãÀ™3¬;r”MǎÅòÌ@ŠqÈM#jæŒ,{&ð;%m !B1ÓøƒÛFÔÔ¹†§`Ÿ-Dìø˜¥¢!u‡ŠXöU83’r‹Þ©†bŸªhLÕLc5ÎH@ᝧTu~P™?bîA«q§)£6˜Æ1à‡JÛBÔù!СޮŒê8ÕHWxЮñì#<šÎí̈́e—àÌTª)©+D|«†`ëT7ìÅ€Ú@¿çIÛBğñ¢óƒW Àñ{`¡‡í ®ü4ÇÛgö–¬úzz!bÓ`0ŠqÒ« x9 6ðIÄá©BBžT Lò²óƒ× À4Öß÷ü:BĖïÕöOyŸ¿#ʉ ¡ÈBœ94Ïù·nß²3pær}»ŠÑÇóûþúü€iœÀ™ByB4í<0Ù¯Î~& vÃ|œŠÑãû*7ú„Âßఁ—5\Wˆ {ø/¿/ªgïŸe·ÞFk¹ŸÁò>0Ó8ç÷…õï°ìŽÀRàJm1¡ß§À(LCÝÑGaÐ[œÇ²»áìÌÓ‡zlFž-ìé†þò]– ¬²t‡"„×b;u¡c°!ç/`,P®;!|RŒÑÝù! À4>î|ŸÂgç€Û1/tAIŠñ!pP­;!0RçÆžHDWj·MކÁñ2p›®-œnD_j 'Ü üRcPèS Àœ˜FTž}=“€Í±ì!À|€Ú°ðלƒ;¢zN*:Gõ9E¯èEč…8¥»£ºóC,$š+9>ø.r ™ðN%ÎgÌó#»üý·Yö•8·r ©Pi0ÓX«;•bcPŸ3,ŒM.ÔY€3䏩α˜ þ©ÄrK ÜšÇçÓzü{·Yv ζâáºCQå#`&бNw ^ŠÍ@}Î?àõÀƒÀaÍшà; L®õÎñ0šÏ²;Oovњà)àg˜ÆqÝÁø%>;e_…s[ § pN癉i¬Öˆßbÿ )Î?ô0`&pBs4BŸ8Ÿ«ã±óCŒŽêsŽ'û5ð€îP„¯^~m»÷T“PDz¯~ƒ32±ëà‡˜Ær݁A|Þ4Åù@\ƒSŽ|©æh„zKqþm¯‘ΑŒšcÙ×?nÕŠpå-à Lc¥î@‚H@k,{0ðS`<ò÷-jpv‡þÓøLw0A&èPYv!ð`*AÂ4ª5Ç3$xŲӀ;qFc‰ÃpU‹q:ýë˜ÆYÍñÄ$I~°ì,œõ£j_Ùz ¬2œC_–oa5Çó$øÍ²€|.&ƒ‘@ŠÖ˜ô9ŠÓÙ—ÔŸ¶`5zCŠ/’tsæ Jž˜n vçʁ¹Øé×ÉýŒ^’‚Ʋ“qŠš êœ¢í¶¡ Ø\ïµX‹i\Еh@@ްìN4Lu¯| ­Šš*€-4ìèÎË4Ê5Å$ Ú9·œp’AÎíC8/€SaŸâtôœ2„nÿœ<¥•ù |IEND®B`‚ansible-compat-4.1.11/docs/images/logo.svg000066400000000000000000000020271454751074100203630ustar00rootroot00000000000000 ansible-compat-4.1.11/docs/index.md000066400000000000000000000005501454751074100170650ustar00rootroot00000000000000# Examples ## Using Ansible runtime ```python title="example.py" {!../test/test_runtime_example.py!} ``` ## Access to Ansible configuration As you may not want to parse `ansible-config dump` yourself, you can make use of a simple python class that facilitates access to it, using python data types. ```python {!../test/test_configuration_example.py!} ``` ansible-compat-4.1.11/examples/000077500000000000000000000000001454751074100163225ustar00rootroot00000000000000ansible-compat-4.1.11/examples/reqs_broken/000077500000000000000000000000001454751074100206345ustar00rootroot00000000000000ansible-compat-4.1.11/examples/reqs_broken/requirements.yml000066400000000000000000000001751454751074100241050ustar00rootroot00000000000000roles: [] collections: [] integration_tests_dependencies: [] # <-- invalid key unit_tests_dependencies: [] # <-- invalid key ansible-compat-4.1.11/examples/reqs_v1/000077500000000000000000000000001454751074100177025ustar00rootroot00000000000000ansible-compat-4.1.11/examples/reqs_v1/requirements.yml000066400000000000000000000002631454751074100231510ustar00rootroot00000000000000# v1 requirements test file # ansible-galaxy role install -r requirements.yml -p roles - src: git+https://github.com/geerlingguy/ansible-role-docker.git name: geerlingguy.mysql ansible-compat-4.1.11/examples/reqs_v2/000077500000000000000000000000001454751074100177035ustar00rootroot00000000000000ansible-compat-4.1.11/examples/reqs_v2/community-molecule-0.1.0.tar.gz000066400000000000000000000166771454751074100253340ustar00rootroot00000000000000‹·—€_ÿcommunity-molecule-0.1.0.tarí=ksÛ¶²ý¬_*wj;G€øÔ«uÎ8¶œhŽ_c+ÍÉM2 H‚kŠÔáÎÆõ¿»øì€N›:§·B;‘„Çb±X,v øxïdt8Œ«¿€qôݟ“4HË⟐Ö?uÛÔŸÓÍ®¥éºÙét¿ÃOÝúŽhß=BÊӌ&„|•AÖ÷I7 Òtã0dnÄÑ$ˆüž9 Kš³tA]Pg>Ï£ [6[EfÏch™‡Lä^±$(X ©ºª‰\šg³8I!÷-ü$͋8 "ráÐ$b”lOƒl–;í4;Mšôž7LõD7ç܃ã¡:÷ČN+p+(@K³ šVP<–ºI°È$b{Q8!#Dz©F_£ŒQJ ˜$]07ð—,Â| %ª@ \¥¬Äáx4®:”…?±F”‡¡DdÁ"EnÀû›[9ÈEœYœ,»Y–-ÒA»-ˆ¢ÕÛT ¬TxŠír6ÔÕ ðb7Ÿ3„mÙ÷,ž³²¯ÐGŠ9ÀïԖ0iÐDBMæ4 |˜»‚l7u6; /žˆ8øÙrÁ xežåÎ.Ó|>) Ò5ìÎJ‘Ì‚B£ß3]Šý®Ëz†C;Ô1Y·ï2‡z¶Ó× ßê÷MÖéwuJûŸÝõmS7ËëÚ®éyŸD"Næ4xz1Ž*ãö¯²þ+Êþy}ü†üïXzgMþkݎ¶‘ÿ%ÿqòô†Ë³bÝ©Rš–ëÍ ™µºÞ Asw±U%õõÂÌZgû/÷N^ N_šIš­w\.ôÏ­ôû–z:Ÿ ¿|æQÇ겞ϺZ§ßëuÜŸÜfû¶fY=ß3úžæØF×ïØ^ߡݮivX¯ù0Üåöð˜ä’]¶2KŸ]ÏíBªOÜ8aêbùµŠÎ²<Ïa¬«w{¶fxVG³=jQ¿c±ŽßuMj2Š»FG×-­giÌ빆ß7AÀ,w;ôS7‹Ó,ýZ8{}­C™ëttdíwº^ß÷Y¯gèn×Ñ,Gë˜~×ðüŸ×gŠí˜&ó͎£»š×ïùzç8¯êC_o@ŠãêšÃÇrz¶å›u©Õwz}‹ùœvÂn¯ß×ùj¡=§géºß·ü¶Õyð2¡K'Ž/y¡ÈNÛW4 <š1u9¿åüžÑsuê‚Bá÷]Çì2_7}ðÍ.¥ 6€–at;¶Ö§S»ïXŽÓ÷Sƒv€œº€ó0 ¢¯&ÝNׅ¥å3ÝÑ(µÌ.`i1œgzŸÑ³:žíږœjömæÀ”k®aX}æûЎvü‡ Æ£Ñþðäbøµ°îÔw“ö˜Š÷û=J]£Ïú–nPÃí0˜•š (ëƒôèšêyÀ¹]Ûôzšþ@¬ç,£É¢Ø_;É£,˜Uæô|œ§wûŽcø¶eu©Ý¡nߑIûºÞåÝï dÒÃÕzŒÑŽóîv ­mhŠÿ@reñG5ˆ‚¯Æš~Ïù¢÷AÂ۞ïõtD=Õ{®ÆL×Ò©iŽæè /MÓ×\RTƒé‡õbŒ>kwF£) ãé£J£ª×vù•/î¯E=Ø*ûŸôêz~×é‚ cã*°§֍Öó:ZÏÔ™–ëûfÏê[ ˜Ý±L§ùåãˆ#?øºƒ0`g÷Àë1ÓœmÃð=Ê,0L[l§Oa߄ f×֝N_·|×7uØøa¥÷AoøòAø ¢éü­˜¡ì¿­^2¶øZ„úi®eÅ뻺«[}ŸŸåÂFÞñŸa] «Î,. L ÷|6yt.û¡ÛÓ"A£>2åO`Të<£çÛ[‡8 ÒÛ.í›6LŒíê&ŒšÙ7zÆc™ }ù݈ Û·z÷o² î¯ùKZí_/­]߯þ×l]/죫èÿ5ôîÆþŒô:Câ0’/P1öˆ³$„Pà€ Sª ŠÄ$Š3ÂŒ #Ù,HɜF9 Ãå÷Æc䳟@éÿkß¹í„±ÓžÓ j{±[ÛSdH‹“ O—(zháÿY|M²˜ä)»ÓFcÆÜKçùðáþ]êÃ8K‰ÈÍ^%oâœD è=ˆ¶„f$d4Åa3€˜YÈ@ÀBpÃl0{Ñõ_Øòfžº¶ýÉõßW×?ìðÆwÄÞ¬ÿG›éËù/™T¹Íüƒù_÷å=Æþo늵6ÿ¶ié›ýÿ1R³y÷DôóBªBYÃOâ9™Lü<Ë6™`Ÿˆ“ŒP'Ã b”‚EWüš§*ü P Š,ÛnŸ ÷_ 'ӟ'xÀØl‘“8b;8Ô]m䯋åö/ ü»=ˆÙšZJE.e™š‚s‡õ¶áWënsÙk-Wˆ€Vó'Û«Ec6_àäŒmö€ù43 &@”XVÓçM8lµ–+ØHt,™yPM©Ï&aLœí; v$+fñ}ŒxÈ5`Éq)y³w|€S";H³d»êÄËç £Žf| ÞË~¿Åw%õ?Ovù.Y£ÔœUq bmÉÈ+„¿ø§æõ“•1ÝîGüh•ëg ró ëÝ~ØN]ÊÕÑÉÆõ©sýc;v~anM‚\IF¡IWc ÃxE®gÍj;e¡¿SQ¢’ DŽ ÝÀÈZ55¬š)qŒYJ³T_`šå÷Öj¹¯@ ùm­\lP,Ÿ¬•Þ#Û¡ê=¹U»Û¿ªKäo•xôßÜÇoùÁÞ_÷ÿ¶œ±ÿ#…±KCd‚Íjý[Š2’èÛ­«k­ŸÿhV§³Yÿ‘j&Ý~³ÝhŒýþíþèýöïðFeÀã뎜?j;ԛ25œšþ“]n³»ÈÓÙ΁NEÑAÿW×x­áGÑôÅÿäAÂ5ʎѐŽ=°BŠ‚;.¶u/)žzq·–š#‹žíSí4F‘æè“î„Fã§ï…Œ !ýžìð©æG„ŸGûF:ðð€AŸ‘W£·œ0*€Òáõe‹,ãœÌé²<–cºŸˆžÖԂòRÛ&,s¹{°EÐŒƒˆ›V@VVPûÐíîWèðàÝUgŒ*Q”g|¯Ò š ï[[8R9~’Æ`æ•ÜK聢ò^L¥„cNñç|$ÄCK—ŠP 5ŽùƒÍ$xªú¶`Jå*îÄ©’)ëkšŸ¥4k#óLŠyà±v­ý$Gˆê,›‡;™91 %«í:@á<ãTˆï␱ì Æ‰Éýˆ` Ÿ#éY4E.âlóù*33zw±eK‹‰ÛÀÃÖÀ}xR¬¯¢ÁÍ9ÃSXnS²TP©û¶<áý#ò$TÄ!ôJ<Ďì:ŠÞœ.$Ÿò«D4T‹EÀñHDNÊ5Bµøb’žãEŒ‹;ƒiË SHà4K`ŸŠK`)I"”V„, ?qŸ˜&`µÅi=q—.ÌyɯÇ8…5N–qï,¯kî2æp ßÐ ìySœu‹€¹;Co#ÊrDqšÄù‚Ì€5•BíÑù>‘S˜òµ·rIR…ÀËÝlB„æY¹"@JxÄ Ù/&A~‘âá_í`}ßà+aµk¹€îŒèžɅóúðáU1!bû ¢èøåy‚q’Dd¹§ê_ &îðºNC–úµ ;OõŸê‚8ŠÁ4Rhâ΂+ރkÿ3ßµ;^¯k1ÍèêºÆLÛcTcšéê?Þ®×阶o[†ÆÙç&Eß êÀú5Ò/'¹žüÖ¶ìŸjøuA Z×ñhüGÄXK#îw~3.¥ ŽÿoŠÿÐ Íڜÿ?Îùÿ}—#×ÿÓµ;kóoâ•ðý÷I!Ü8 •#ˆ)E•m⃞ƒŸ†)ƒÜšð®k…Ü•APÑôjÈ*蛈 Ïî¶P…ùð›×l éJö˜“O+gù<Hóæ†lm‘_?ïö¶ùp€Oüë!Qü",Jßý×㉘ܝÿò^Ñ7ôÿÚÆºÿW×Lc³þ#¡]y°Ê=æÐ ̈ùx žøc£‘ñï$Ô-ðæô£Â‡H­"‚ê QãˆÙæ’e_ت°œœ ÃPRêpá¬Ê”ÌËlVÏΒ<›-E'4 ãkæ)°¡ž5 o· ”mµÈ—dø%޶ðäòÉœaË^°”µ)k‹£ÐÙtKç‚pÎcUsiâTÈüµü¿RQü–ç?†fÚwÎŽÿ÷QÒ~ŒX&Át–ˆNVÞfi4ÎX2ÖG+ vt8K2M(žO·„ë­¿…ÅÒž¶%k7E³ÜAo'šK”`„ÖävûÙ5M„‘ŠiìüêÁšË%v_óB¶hîðNÀX eä=)Šž«  ˜—I /÷Ð EqÌÙwÛàÐSyŸ ÅñÄ2/ðñ“ña-rlëÆ8ŠÂìcèqä;/ÜœÓS4eaˆÀ›µÂNº€b€ ô/IÄûœ“tu$@"?O"èR8<œHÆ{݃"TďQÌ ¯`$ü7št¡ÕL §t˙â P•^(€ŸšfU¥3*®…_ÈKkÃI°{X)Qí1֒;ֆ©Bÿ/‡äâôpüzï|HFäìüôçÑÁð€4÷.àw³E^Æ/O_ Ô8ß;¿!§‡dïä ù×èä E†ÿ>;^\Ós2:>; !ot²ôê`tò‚<‡v'§cr4K€ŽO v(A†ìxxŽ^ŒñÞóÑÑhüŠEGã„y@÷ÈÙÞùxŽÿêhœ:?;œB÷ödtrxœ ‡'cz…<2ü~‹—{GGŒ«œW€ý9ÇoÿôìÍùèÅË1yyzt0„ÌçCÀlïùÑPtƒÚ?Ú·ÈÁÞñދ!ou PÎy5‰Ýë—CžýíÁÿûãÑé cÿôd|?[0ÊóqÙôõèbØ"{ç£ $Èáù)€GrB‹Sڝ $5Y™š‚¿_] +\†{Gë×+«›#áÿÿ‰ß<„ø/Œÿ_6þŸG›ÿÚËßÂþÓîœÿæÆÿóHþEiHg:‘næÙz¶kš}U×¶6{Àÿï$Ÿîø†öŸÒ~ýþ¿ŸYÿÞŒõHE òè«ZƒEW⺅ðÏ‘ž„{ü‹Ác«*êm¶†tÇ7,H…KrÒ:Qt"E ‘ÝH˳Ó2»-³w{'C¹ñÐ)t‹×Æš­eVÖñmAº—§'0-¯Ïíç@ÉýӃ¡ìK”Ÿ:yþêðpx><Ù“á¿Á8¿ØUB¢(n|µ[†7)JcŽGŠOY®à3¿°p>ÎÃ"ÆSNk&ŒšBÔÄà‡Ýz¹h–òš(Ç{#ŠŒ#d¯ÞðØ¢Sžð¢ËùCN×T­Ÿà›Ÿ&šWêÁâ2x9ôDùÇi|;÷kÃÀQÐ`À»r?­g»6c³„–(pø«{Æ"öæò.+JIþå:ˆŒø:müH€¬I' ›ȏâ.Æ%՘’(x[—øDÁ…©,h6#[+L‹îéVQôŽ=™,–.ù0™ð|~!i멺XŸußCŽ‡Ò”ÉÓ—o¢_ŒP9Ù¹'7ŸV×äŸ6‰‚‚Ž$s¢$þywóî–üãÇ­F1,IIëÕRŠ"^R D‚û_ý1e°PšB¹Y!Ûá%P”WžM¡À*º"h Ãà›qýEêrSã"u¿-™ðNîr”!1<Ô±’‡°Q0ìXÆV€mJµwppz6ŸØ-w•yÑLY€ÔÀŠàK†ƒPža<ûA'ÏV— /VáwS¢ªˆ(2<˛aŒ%PˆKmq%/ÍÈÆ"º¶ž¡ûߒ€ãëL®ÕâÆ6Ôýß<QÁ@olb̄¬”®låPªÀB&XŽ —GRn܊_«$àŸ§°]Œl4®gAÆP›±«/!“ŠksIàJÿàª{ßš=zÛÚy¥Ü‹…\iÂ#úu}Âw±ï±£Âür‰GàâI4ìsÍê/^Õé%ë5ש°I­R1ª¢·Rb‘ Ô³]]Õ{ª^‰®¹<»‹Ç{lu§A Õ°Q/]ö(ºŠ/ñ ÷Y}T„†‡×›ÅX×/ÇÇGB£å73I©ö-–&ñrVŒ3ûÄßÚÂM KÊé<|Vð™· |`Pã$!’ˆ,+Ê:ªE`ZÝÙ†Tµ†ÅÚ0+ ‚<]ŸÐ^×&°š1dòNÆ (—‡òºÊpˆÜQ`%ÁÒ.ó=Ò¬KUþ$þƒ²Í²ÏþLÍ6—qOÈ>ÁP‘æð‰z‡–âˆ\Êb§,¹bµÁšæ)º.ôO”ðýH¶d>‚› žÝ¢L=ƒÏ퀎ds‡ŽùµîuL$<ȲxÏ ÖF]µˆŸ„Züi‹m¿ù.:X9«¢W '¹i’Gzòï¯íö»› î»ÛxY~\ ;iqîúP0LJ•WH`rrq²„m¥@ ¶ 5w¶îlŒéÚzPB°‚†ï |~i à\["þ›®­”>œY]UÙmu×*ɂ5Í{­RUvû[\>©°¯ñ%Æ,WŒ)ø±Š)­U™®ª·~ÒÑôý ]K³w¢B—+³ž]žò;øbëDÐ Ãsµ³á™eiš-€BØ-œê±Ô?ÏFP˜Ãèp3 óa…n(?ðè$„ȇºää/‰@â44Ûy]óãÛûÄ)JÁh++•…êy• 5Oqæ CBÜ+¡k.tŒ»E…‚¯w°—5Uè™ ¶°õ®ÈéyÒ€†:2'‹AHñ: È 6͑‰»(†ª¡|Y,óhš’ÂM#»^œÆ?€ wɚWïß0ÕHü/:ÿÓMÝܜÿ=òü¯>Gý˜çúzü—¥›øÏGIò&ZŠj³ü­åíÜG=ÿ‡Õn®ßÿ°,s³þuþ¹‘ŠNÒI&¿U]œÛž§¶<Л€î!ٚ¬“ЧûÈêŠRò[Š4{te7ðŒpRŸ_\:ÁœF.Ø_q2vFÄ®'òÆ÷„ú>?ÄïVˆ‡$œ‰Ï(>W™6øeaДñY$ º¶†8NRyÕ±hÊåà$ÍÁlH–wjñgŸå-ä Y-­]ˆQà¿9ý%N&r”òÞÇ1æ‘ý2× ¢»õ0o­÷«âéåjÕç2»šMÚä c2!ç^•­aNæòó͒$ÀAYB«ls‡†¢Á¹È.k“mtqž†ËZ?; Ð%O‚l \ð±„q!3É¡Ìä#̧õ:Ï«ŸXzÅ×ÑDœ\ÊÿÂ,2YrZ—’ñˏbŒ_š}ß{ü* áÚÔËÜÆß]þWâ›ëÿŠ©éýÿ›Í?ÿ $šÿߙËØüýßÇIŸøÃ-©ÿ™ºÞœ£ÿu6óÿHöß"–Ú ~ܱT²H{𗥕 uCŸšy5 W¶Ú‘þÙ^&/.š*$ð€šü¹.ž«³íàc"åI6mhjÏù$hÞ öÆo4ÍéŽTJåAÚCÇW[ Œ·U\®žÏùSž°ÈSb_AœWA&Y)Í„  )ü\—_‚\)ŸC Oá×Pá°-J¹ëWq–SâÄc‰ÚçåZ¢Nv•*øÞ·’ΘtIï©Ë/8ñeï|ø=c…¿›*ÔæßIãìþ‡¶Åqñ*QMµ§ZŸ¢©hPv^œT3©Ç.ÔߔU~º·ÄÈ[Õ%_¥¢R<`¯ˆÅûªpÞâ1.÷ƒgˆx"Œç×¶dŒ?À‡÷Ý<]c@]5ìOs`ÑšD„_¢wê6ýºœ‡;ÿSbP ^ÅJ-Þö}í¥õ,YVñ„ߪûÈG^Š€•{`©Šj ÿz»jÎÃkºL'IIë¯(ÀƒÄÒ­œSPœµÌÏÚð…e~°[<µÆ_A¢éJoP™‰£¹ipÅÐ äÍâØ<ø <>¢ 8,^a "üÛ^NCAnµÖè4*CAõŠÑ€ œ8ekc–óò¬Æ+ÃP°¬]&õ•«/Y9Oøkð¶{ȓ2nW>H V †A®¬ˆòõ‡Cm“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6é¥ÿ9R‘ ansible-compat-4.1.11/examples/reqs_v2/requirements.yml000066400000000000000000000014561454751074100231570ustar00rootroot00000000000000--- # For local install run: # ansible-galaxy role install -r requirements.yml -p roles # ansible-galaxy collection install -r requirements.yml -p collections # # Current test file avoids using galaxy server on purpose, for resiliency roles: - src: git+https://github.com/geerlingguy/ansible-role-docker.git name: geerlingguy.mysql collections: - name: community-molecule-0.1.0.tar.gz # Also needed for testing purposes as this should trigger addition of --pre # argument as this is required due to # https://github.com/ansible/ansible-lint/issues/3686 # https://github.com/ansible/ansible/issues/79109 - name: https://github.com/ansible-collections/amazon.aws.git type: git version: main - name: https://github.com/ansible-collections/community.aws.git type: git version: main ansible-compat-4.1.11/mkdocs.yml000066400000000000000000000043311454751074100165100ustar00rootroot00000000000000--- site_name: Ansible Compat Library site_url: https://ansible-compat.readthedocs.io/ repo_url: https://github.com/ansible/ansible-compat edit_uri: blob/main/docs/ copyright: Copyright © 2023 Red Hat, Inc. docs_dir: docs # strict: true watch: - mkdocs.yml - src - docs theme: name: ansible features: - content.code.copy - content.action.edit - navigation.expand - navigation.sections - navigation.instant - navigation.indexes - navigation.tracking - toc.integrate extra: social: - icon: fontawesome/brands/github-alt link: https://github.com/ansible/ansible-compat nav: - examples: index.md - api: api.md plugins: - autorefs - search - material/social - material/tags - mkdocstrings: handlers: python: import: - https://docs.python.org/3/objects.inv options: # heading_level: 2 docstring_style: sphinx docstring_options: ignore_init_summary: yes show_submodules: no docstring_section_style: list members_order: alphabetical show_category_heading: no # cannot merge init into class due to parse error... # merge_init_into_class: yes # separate_signature: yes show_root_heading: yes show_signature_annotations: yes separate_signature: yes # show_bases: false # options: # show_root_heading: true # docstring_style: sphinx markdown_extensions: - markdown_include.include: base_path: docs - admonition - def_list - footnotes - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.superfences - pymdownx.magiclink: repo_url_shortener: true repo_url_shorthand: true social_url_shorthand: true social_url_shortener: true user: facelessuser repo: pymdown-extensions normalize_issue_symbols: true - pymdownx.tabbed: alternate_style: true - toc: toc_depth: 2 permalink: true - pymdownx.superfences: custom_fences: - name: mermaid class: mermaid format: !!python/name:pymdownx.superfences.fence_code_format ansible-compat-4.1.11/pyproject.toml000066400000000000000000000111741454751074100174240ustar00rootroot00000000000000[build-system] requires = [ "setuptools >= 65.3.0", # required by pyproject+setuptools_scm integration and editable installs "setuptools_scm[toml] >= 7.0.5" # required for "no-local-version" scheme ] build-backend = "setuptools.build_meta" [project] # https://peps.python.org/pep-0621/#readme requires-python = ">=3.9" dynamic = ["version"] name = "ansible-compat" description = "Ansible compatibility goodies" readme = "README.md" authors = [{"name" = "Sorin Sbarnea", "email" = "ssbarnea@redhat.com"}] maintainers = [{"name" = "Sorin Sbarnea", "email" = "ssbarnea@redhat.com"}] license = {text = "MIT"} classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python", "Topic :: System :: Systems Administration", "Topic :: Software Development :: Bug Tracking", "Topic :: Software Development :: Quality Assurance", "Topic :: Software Development :: Testing", "Topic :: Utilities" ] keywords = ["ansible"] dependencies = [ # https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html "ansible-core>=2.12", "packaging", "PyYAML", "subprocess-tee>=0.4.1", "jsonschema>=4.6.0", "typing-extensions>=4.5.0;python_version<'3.10'" ] [project.optional-dependencies] docs = ["argparse-manpage", "black", "mkdocs-ansible[lock]>=0.1.2"] test = ["coverage", "pip-tools", "pytest>=7.2.0", "pytest-mock", "pytest-plus>=0.6.1"] [project.urls] homepage = "https://github.com/ansible/ansible-compat" documentation = "https://ansible-compat.readthedocs.io/" repository = "https://github.com/ansible/ansible-compat" changelog = "https://github.com/ansible/ansible-compat/releases" [tool.coverage.report] exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:"] fail_under = 92 skip_covered = true show_missing = true [tool.coverage.run] source = ["src"] # Do not use branch until bug is fixes: # https://github.com/nedbat/coveragepy/issues/605 branch = false parallel = true concurrency = ["multiprocessing", "thread"] [tool.isort] profile = "black" [tool.mypy] python_version = 3.9 color_output = true error_summary = true disallow_untyped_calls = true disallow_untyped_defs = true disallow_any_generics = true # disallow_any_unimported = True # ; warn_redundant_casts = True # warn_return_any = True # warn_unused_configs = True exclude = "test/local-content" [[tool.mypy.overrides]] module = "ansible.*" ignore_missing_imports = true [tool.pylint.BASIC] good-names = [ "f", # filename "i", "j", "k", "ns", # namespace "ex", "Run", "_" ] [tool.pylint.IMPORTS] preferred-modules = ["unittest:pytest"] [tool.pylint."MESSAGES CONTROL"] disable = [ # On purpose disabled as we rely on black "line-too-long", # local imports do not work well with pre-commit hook "import-error", # already covered by ruff which is faster "too-many-arguments", # PLR0913 "raise-missing-from", # Temporary disable duplicate detection we remove old code from prerun "duplicate-code" ] [tool.pytest.ini_options] # ensure we treat warnings as error filterwarnings = [ "error", # py312 ansible-core # https://github.com/ansible/ansible/issues/81906 "ignore:'importlib.abc.TraversableResources' is deprecated and slated for removal in Python 3.14:DeprecationWarning" ] testpaths = ["test"] [tool.ruff] select = ["ALL"] ignore = [ # Disabled on purpose: "ANN101", # Missing type annotation for `self` in method "D203", # incompatible with D211 "D211", "D213", # incompatible with D212 "E501", # we use black "RET504", # Unnecessary variable assignment before `return` statement # Temporary disabled during adoption: "S607", # Starting a process with a partial executable path "PLR0912", # Bug https://github.com/charliermarsh/ruff/issues/4244 "PLR0913", # Bug https://github.com/charliermarsh/ruff/issues/4244 "RUF012", "PERF203" ] target-version = "py39" [tool.ruff.flake8-pytest-style] parametrize-values-type = "tuple" [tool.ruff.isort] known-first-party = ["ansible_compat"] known-third-party = ["packaging"] [tool.ruff.per-file-ignores] "test/**/*.py" = ["SLF001", "S101", "FBT001"] [tool.ruff.pydocstyle] convention = "pep257" [tool.setuptools_scm] local_scheme = "no-local-version" write_to = "src/ansible_compat/_version.py" ansible-compat-4.1.11/readthedocs.yml000066400000000000000000000003561454751074100175200ustar00rootroot00000000000000version: 2 submodules: include: all recursive: true mkdocs: fail_on_warning: true build: os: ubuntu-22.04 tools: python: "3.11" python: install: - method: pip path: . extra_requirements: - docs ansible-compat-4.1.11/requirements.txt000066400000000000000000000151761454751074100200020ustar00rootroot00000000000000# # This file is autogenerated by pip-compile with Python 3.10 # by the following command: # # pip-compile --extra=docs --extra=test --output-file=requirements.txt --strip-extras --unsafe-package=ansible-core --unsafe-package=resolvelib --unsafe-package=typing_extensions pyproject.toml # argparse-manpage==4.5 # via ansible-compat (pyproject.toml) attrs==23.2.0 # via # jsonschema # referencing babel==2.13.1 # via # mkdocs-ansible # mkdocs-material beautifulsoup4==4.12.2 # via # linkchecker # mkdocs-ansible # mkdocs-htmlproofer-plugin black==23.12.1 # via ansible-compat (pyproject.toml) build==1.0.3 # via pip-tools cairocffi==1.6.1 # via # cairosvg # mkdocs-ansible cairosvg==2.7.1 # via mkdocs-ansible certifi==2023.11.17 # via # mkdocs-ansible # requests cffi==1.16.0 # via # cairocffi # cryptography # mkdocs-ansible charset-normalizer==3.3.2 # via # mkdocs-ansible # requests click==8.1.7 # via # black # mkdocs # mkdocs-ansible # mkdocstrings # pip-tools colorama==0.4.6 # via # griffe # mkdocs-ansible # mkdocs-material coverage==7.4.0 # via ansible-compat (pyproject.toml) cryptography==41.0.7 # via ansible-core csscompressor==0.9.5 # via # mkdocs-ansible # mkdocs-minify-plugin cssselect2==0.7.0 # via # cairosvg # mkdocs-ansible defusedxml==0.7.1 # via # cairosvg # mkdocs-ansible dnspython==2.4.2 # via # linkchecker # mkdocs-ansible exceptiongroup==1.2.0 # via pytest ghp-import==2.1.0 # via # mkdocs # mkdocs-ansible griffe==0.38.0 # via # mkdocs-ansible # mkdocstrings-python htmlmin2==0.1.13 # via # mkdocs-ansible # mkdocs-minify-plugin idna==3.4 # via # mkdocs-ansible # requests importlib-metadata==6.8.0 # via mkdocs-ansible iniconfig==2.0.0 # via pytest jinja2==3.1.2 # via # ansible-core # mkdocs # mkdocs-ansible # mkdocs-material # mkdocstrings jsmin==3.0.1 # via # mkdocs-ansible # mkdocs-minify-plugin jsonschema==4.20.0 # via ansible-compat (pyproject.toml) jsonschema-specifications==2023.12.1 # via jsonschema linkchecker==10.3.0 # via mkdocs-ansible markdown==3.5.1 # via # markdown-include # mkdocs # mkdocs-ansible # mkdocs-autorefs # mkdocs-htmlproofer-plugin # mkdocs-material # mkdocstrings # pymdown-extensions markdown-exec==1.8.0 # via mkdocs-ansible markdown-include==0.8.1 # via mkdocs-ansible markupsafe==2.1.3 # via # jinja2 # mkdocs # mkdocs-ansible # mkdocstrings mergedeep==1.3.4 # via # mkdocs # mkdocs-ansible mkdocs==1.5.3 # via # mkdocs-ansible # mkdocs-autorefs # mkdocs-gen-files # mkdocs-htmlproofer-plugin # mkdocs-material # mkdocs-minify-plugin # mkdocs-monorepo-plugin # mkdocstrings mkdocs-ansible==0.2.1 # via # ansible-compat (pyproject.toml) # mkdocs-ansible mkdocs-autorefs==0.5.0 # via # mkdocs-ansible # mkdocstrings mkdocs-gen-files==0.5.0 # via mkdocs-ansible mkdocs-htmlproofer-plugin==1.0.0 # via mkdocs-ansible mkdocs-material==9.5.3 # via mkdocs-ansible mkdocs-material-extensions==1.3.1 # via # mkdocs-ansible # mkdocs-material mkdocs-minify-plugin==0.7.2 # via mkdocs-ansible mkdocs-monorepo-plugin==1.1.0 # via mkdocs-ansible mkdocstrings==0.24.0 # via # mkdocs-ansible # mkdocstrings-python mkdocstrings-python==1.7.5 # via mkdocs-ansible mypy-extensions==1.0.0 # via black packaging==23.2 # via # ansible-compat (pyproject.toml) # ansible-core # black # build # mkdocs # mkdocs-ansible # pytest paginate==0.5.6 # via # mkdocs-ansible # mkdocs-material pathspec==0.11.2 # via # black # mkdocs # mkdocs-ansible pillow==10.2.0 # via # cairosvg # mkdocs-ansible pip==23.3.2 # via pip-tools pip-tools==7.3.0 # via ansible-compat (pyproject.toml) pipdeptree==2.13.1 # via mkdocs-ansible platformdirs==4.0.0 # via # black # mkdocs # mkdocs-ansible # mkdocstrings pluggy==1.3.0 # via pytest pycparser==2.21 # via # cffi # mkdocs-ansible pygments==2.17.1 # via # mkdocs-ansible # mkdocs-material pymdown-extensions==10.7 # via # markdown-exec # mkdocs-ansible # mkdocs-material # mkdocstrings pyproject-hooks==1.0.0 # via build pytest==7.4.4 # via # ansible-compat (pyproject.toml) # pytest-mock # pytest-plus pytest-mock==3.12.0 # via ansible-compat (pyproject.toml) pytest-plus==0.6.1 # via ansible-compat (pyproject.toml) python-dateutil==2.8.2 # via # ghp-import # mkdocs-ansible python-slugify==8.0.1 # via # mkdocs-ansible # mkdocs-monorepo-plugin pyyaml==6.0.1 # via # ansible-compat (pyproject.toml) # ansible-core # mkdocs # mkdocs-ansible # pymdown-extensions # pyyaml-env-tag pyyaml-env-tag==0.1 # via # mkdocs # mkdocs-ansible referencing==0.32.1 # via # jsonschema # jsonschema-specifications regex==2023.10.3 # via # mkdocs-ansible # mkdocs-material requests==2.31.0 # via # linkchecker # mkdocs-ansible # mkdocs-htmlproofer-plugin # mkdocs-material rpds-py==0.16.2 # via # jsonschema # referencing setuptools==69.0.3 # via pip-tools six==1.16.0 # via # mkdocs-ansible # python-dateutil soupsieve==2.5 # via # beautifulsoup4 # mkdocs-ansible subprocess-tee==0.4.1 # via ansible-compat (pyproject.toml) text-unidecode==1.3 # via # mkdocs-ansible # python-slugify tinycss2==1.2.1 # via # cairosvg # cssselect2 # mkdocs-ansible tomli==2.0.1 # via # argparse-manpage # black # build # pip-tools # pyproject-hooks # pytest typing-extensions==4.8.0 # via # black # mkdocs-ansible urllib3==2.1.0 # via # mkdocs-ansible # requests watchdog==3.0.0 # via # mkdocs # mkdocs-ansible webencodings==0.5.1 # via # cssselect2 # mkdocs-ansible # tinycss2 wheel==0.42.0 # via pip-tools zipp==3.17.0 # via # importlib-metadata # mkdocs-ansible # The following packages are considered to be unsafe in a requirements file: # ansible-core # resolvelib ansible-compat-4.1.11/src/000077500000000000000000000000001454751074100152735ustar00rootroot00000000000000ansible-compat-4.1.11/src/ansible_compat/000077500000000000000000000000001454751074100202535ustar00rootroot00000000000000ansible-compat-4.1.11/src/ansible_compat/__init__.py000066400000000000000000000003661454751074100223710ustar00rootroot00000000000000"""ansible_compat package.""" from importlib.metadata import PackageNotFoundError, version try: __version__ = version("ansible-compat") except PackageNotFoundError: # pragma: no cover __version__ = "0.1.dev1" __all__ = ["__version__"] ansible-compat-4.1.11/src/ansible_compat/config.py000066400000000000000000000370641454751074100221040ustar00rootroot00000000000000"""Store configuration options as a singleton.""" from __future__ import annotations import ast import copy import os import re import subprocess from collections import UserDict from typing import Literal from packaging.version import Version from ansible_compat.constants import ANSIBLE_MIN_VERSION from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError from ansible_compat.ports import cache # do not use lru_cache here, as environment can change between calls def ansible_collections_path() -> str: """Return collection path variable for current version of Ansible.""" for env_var in [ "ANSIBLE_COLLECTIONS_PATH", "ANSIBLE_COLLECTIONS_PATHS", ]: if env_var in os.environ: return env_var return "ANSIBLE_COLLECTIONS_PATH" def parse_ansible_version(stdout: str) -> Version: """Parse output of 'ansible --version'.""" # Ansible can produce extra output before displaying version in debug mode. # ansible-core 2.11+: 'ansible [core 2.11.3]' match = re.search( r"^ansible \[(?:core|base) (?P[^\]]+)\]", stdout, re.MULTILINE, ) if match: return Version(match.group("version")) msg = f"Unable to parse ansible cli version: {stdout}\nKeep in mind that only {ANSIBLE_MIN_VERSION } or newer are supported." raise InvalidPrerequisiteError(msg) @cache def ansible_version(version: str = "") -> Version: """Return current Version object for Ansible. If version is not mentioned, it returns current version as detected. When version argument is mentioned, it return converts the version string to Version object in order to make it usable in comparisons. """ if version: return Version(version) proc = subprocess.run( ["ansible", "--version"], # noqa: S603 text=True, check=False, capture_output=True, ) if proc.returncode != 0: raise MissingAnsibleError(proc=proc) return parse_ansible_version(proc.stdout) class AnsibleConfig(UserDict[str, object]): # pylint: disable=too-many-ancestors """Interface to query Ansible configuration. This should allow user to access everything provided by `ansible-config dump` without having to parse the data himself. """ _aliases = { "COLLECTIONS_PATH": "COLLECTIONS_PATHS", # 2.9 -> 2.10 } # Expose some attributes to enable auto-complete in editors, based on # https://docs.ansible.com/ansible/latest/reference_appendices/config.html action_warnings: bool = True agnostic_become_prompt: bool = True allow_world_readable_tmpfiles: bool = False ansible_connection_path: str | None = None ansible_cow_acceptlist: list[str] ansible_cow_path: str | None = None ansible_cow_selection: str = "default" ansible_force_color: bool = False ansible_nocolor: bool = False ansible_nocows: bool = False ansible_pipelining: bool = False any_errors_fatal: bool = False become_allow_same_user: bool = False become_plugin_path: list[str] = [ "~/.ansible/plugins/become", "/usr/share/ansible/plugins/become", ] cache_plugin: str = "memory" cache_plugin_connection: str | None = None cache_plugin_prefix: str = "ansible_facts" cache_plugin_timeout: int = 86400 callable_accept_list: list[str] = [] callbacks_enabled: list[str] = [] collections_on_ansible_version_mismatch: Literal["warning", "ignore"] = "warning" collections_paths: list[str] = [ "~/.ansible/collections", "/usr/share/ansible/collections", ] collections_scan_sys_path: bool = True color_changed: str = "yellow" color_console_prompt: str = "white" color_debug: str = "dark gray" color_deprecate: str = "purple" color_diff_add: str = "green" color_diff_lines: str = "cyan" color_diff_remove: str = "red" color_error: str = "red" color_highlight: str = "white" color_ok: str = "green" color_skip: str = "cyan" color_unreachable: str = "bright red" color_verbose: str = "blue" color_warn: str = "bright purple" command_warnings: bool = False conditional_bare_vars: bool = False connection_facts_modules: dict[str, str] controller_python_warning: bool = True coverage_remote_output: str | None coverage_remote_paths: list[str] default_action_plugin_path: list[str] = [ "~/.ansible/plugins/action", "/usr/share/ansible/plugins/action", ] default_allow_unsafe_lookups: bool = False default_ask_pass: bool = False default_ask_vault_pass: bool = False default_become: bool = False default_become_ask_pass: bool = False default_become_exe: str | None = None default_become_flags: str default_become_method: str = "sudo" default_become_user: str = "root" default_cache_plugin_path: list[str] = [ "~/.ansible/plugins/cache", "/usr/share/ansible/plugins/cache", ] default_callback_plugin_path: list[str] = [ "~/.ansible/plugins/callback", "/usr/share/ansible/plugins/callback", ] default_cliconf_plugin_path: list[str] = [ "~/.ansible/plugins/cliconf", "/usr/share/ansible/plugins/cliconf", ] default_connection_plugin_path: list[str] = [ "~/.ansible/plugins/connection", "/usr/share/ansible/plugins/connection", ] default_debug: bool = False default_executable: str = "/bin/sh" default_fact_path: str | None = None default_filter_plugin_path: list[str] = [ "~/.ansible/plugins/filter", "/usr/share/ansible/plugins/filter", ] default_force_handlers: bool = False default_forks: int = 5 default_gathering: Literal["smart", "explicit", "implicit"] = "smart" default_gather_subset: list[str] = ["all"] default_gather_timeout: int = 10 default_handler_includes_static: bool = False default_hash_behaviour: str = "replace" default_host_list: list[str] = ["/etc/ansible/hosts"] default_httpapi_plugin_path: list[str] = [ "~/.ansible/plugins/httpapi", "/usr/share/ansible/plugins/httpapi", ] default_internal_poll_interval: float = 0.001 default_inventory_plugin_path: list[str] = [ "~/.ansible/plugins/inventory", "/usr/share/ansible/plugins/inventory", ] default_jinja2_extensions: list[str] = [] default_jinja2_native: bool = False default_keep_remote_files: bool = False default_libvirt_lxc_noseclabel: bool = False default_load_callback_plugins: bool = False default_local_tmp: str = "~/.ansible/tmp" default_log_filter: list[str] = [] default_log_path: str | None = None default_lookup_lugin_path: list[str] = [ "~/.ansible/plugins/lookup", "/usr/share/ansible/plugins/lookup", ] default_managed_str: str = "Ansible managed" default_module_args: str default_module_compression: str = "ZIP_DEFLATED" default_module_name: str = "command" default_module_path: list[str] = [ "~/.ansible/plugins/modules", "/usr/share/ansible/plugins/modules", ] default_module_utils_path: list[str] = [ "~/.ansible/plugins/module_utils", "/usr/share/ansible/plugins/module_utils", ] default_netconf_plugin_path: list[str] = [ "~/.ansible/plugins/netconf", "/usr/share/ansible/plugins/netconf", ] default_no_log: bool = False default_no_target_syslog: bool = False default_null_representation: str | None = None default_poll_interval: int = 15 default_private_key_file: str | None = None default_private_role_vars: bool = False default_remote_port: str | None = None default_remote_user: str | None = None # https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths default_collections_path: list[str] = [ "~/.ansible/collections", "/usr/share/ansible/collections", ] default_roles_path: list[str] = [ "~/.ansible/roles", "/usr/share/ansible/roles", "/etc/ansible/roles", ] default_selinux_special_fs: list[str] = [ "fuse", "nfs", "vboxsf", "ramfs", "9p", "vfat", ] default_stdout_callback: str = "default" default_strategy: str = "linear" default_strategy_plugin_path: list[str] = [ "~/.ansible/plugins/strategy", "/usr/share/ansible/plugins/strategy", ] default_su: bool = False default_syslog_facility: str = "LOG_USER" default_task_includes_static: bool = False default_terminal_plugin_path: list[str] = [ "~/.ansible/plugins/terminal", "/usr/share/ansible/plugins/terminal", ] default_test_plugin_path: list[str] = [ "~/.ansible/plugins/test", "/usr/share/ansible/plugins/test", ] default_timeout: int = 10 default_transport: str = "smart" default_undefined_var_behavior: bool = True default_vars_plugin_path: list[str] = [ "~/.ansible/plugins/vars", "/usr/share/ansible/plugins/vars", ] default_vault_encrypt_identity: str | None = None default_vault_identity: str = "default" default_vault_identity_list: list[str] = [] default_vault_id_match: bool = False default_vault_password_file: str | None = None default_verbosity: int = 0 deprecation_warnings: bool = False devel_warning: bool = True diff_always: bool = False diff_context: int = 3 display_args_to_stdout: bool = False display_skipped_hosts: bool = True docsite_root_url: str = "https://docs.ansible.com/ansible/" doc_fragment_plugin_path: list[str] = [ "~/.ansible/plugins/doc_fragments", "/usr/share/ansible/plugins/doc_fragments", ] duplicate_yaml_dict_key: Literal["warn", "error", "ignore"] = "warn" enable_task_debugger: bool = False error_on_missing_handler: bool = True facts_modules: list[str] = ["smart"] galaxy_cache_dir: str = "~/.ansible/galaxy_cache" galaxy_display_progress: str | None = None galaxy_ignore_certs: bool = False galaxy_role_skeleton: str | None = None galaxy_role_skeleton_ignore: list[str] = ["^.git$", "^.*/.git_keep$"] galaxy_server: str = "https://galaxy.ansible.com" galaxy_server_list: str | None = None galaxy_token_path: str = "~/.ansible/galaxy_token" host_key_checking: bool = True host_pattern_mismatch: Literal["warning", "error", "ignore"] = "warning" inject_facts_as_vars: bool = True interpreter_python: str = "auto_legacy" interpreter_python_distro_map: dict[str, str] interpreter_python_fallback: list[str] invalid_task_attribute_failed: bool = True inventory_any_unparsed_is_failed: bool = False inventory_cache_enabled: bool = False inventory_cache_plugin: str | None = None inventory_cache_plugin_connection: str | None = None inventory_cache_plugin_prefix: str = "ansible_facts" inventory_cache_timeout: int = 3600 inventory_enabled: list[str] = [ "host_list", "script", "auto", "yaml", "ini", "toml", ] inventory_export: bool = False inventory_ignore_exts: str inventory_ignore_patterns: list[str] = [] inventory_unparsed_is_failed: bool = False localhost_warning: bool = True max_file_size_for_diff: int = 104448 module_ignore_exts: str netconf_ssh_config: str | None = None network_group_modules: list[str] = [ "eos", "nxos", "ios", "iosxr", "junos", "enos", "ce", "vyos", "sros", "dellos9", "dellos10", "dellos6", "asa", "aruba", "aireos", "bigip", "ironware", "onyx", "netconf", "exos", "voss", "slxos", ] old_plugin_cache_clearing: bool = False paramiko_host_key_auto_add: bool = False paramiko_look_for_keys: bool = True persistent_command_timeout: int = 30 persistent_connect_retry_timeout: int = 15 persistent_connect_timeout: int = 30 persistent_control_path_dir: str = "~/.ansible/pc" playbook_dir: str | None playbook_vars_root: Literal["top", "bottom", "all"] = "top" plugin_filters_cfg: str | None = None python_module_rlimit_nofile: int = 0 retry_files_enabled: bool = False retry_files_save_path: str | None = None run_vars_plugins: str = "demand" show_custom_stats: bool = False string_conversion_action: Literal["warn", "error", "ignore"] = "warn" string_type_filters: list[str] = [ "string", "to_json", "to_nice_json", "to_yaml", "to_nice_yaml", "ppretty", "json", ] system_warnings: bool = True tags_run: list[str] = [] tags_skip: list[str] = [] task_debugger_ignore_errors: bool = True task_timeout: int = 0 transform_invalid_group_chars: Literal[ "always", "never", "ignore", "silently", ] = "never" use_persistent_connections: bool = False variable_plugins_enabled: list[str] = ["host_group_vars"] variable_precedence: list[str] = [ "all_inventory", "groups_inventory", "all_plugins_inventory", "all_plugins_play", "groups_plugins_inventory", "groups_plugins_play", ] verbose_to_stderr: bool = False win_async_startup_timeout: int = 5 worker_shutdown_poll_count: int = 0 worker_shutdown_poll_delay: float = 0.1 yaml_filename_extensions: list[str] = [".yml", ".yaml", ".json"] def __init__( self, config_dump: str | None = None, data: dict[str, object] | None = None, ) -> None: """Load config dictionary.""" super().__init__() if data: self.data = copy.deepcopy(data) return if not config_dump: env = os.environ.copy() # Avoid possible ANSI garbage env["ANSIBLE_FORCE_COLOR"] = "0" config_dump = subprocess.check_output( ["ansible-config", "dump"], # noqa: S603 universal_newlines=True, env=env, ) for match in re.finditer( r"^(?P[A-Za-z0-9_]+).* = (?P.*)$", config_dump, re.MULTILINE, ): key = match.groupdict()["key"] value = match.groupdict()["value"] try: self[key] = ast.literal_eval(value) except (NameError, SyntaxError, ValueError): self[key] = value def __getattribute__(self, attr_name: str) -> object: """Allow access of config options as attributes.""" _dict = super().__dict__ # pylint: disable=no-member if attr_name in _dict: return _dict[attr_name] data = super().__getattribute__("data") if attr_name == "data": # pragma: no cover return data name = attr_name.upper() if name in data: return data[name] if name in AnsibleConfig._aliases: return data[AnsibleConfig._aliases[name]] return super().__getattribute__(attr_name) def __getitem__(self, name: str) -> object: """Allow access to config options using indexing.""" return super().__getitem__(name.upper()) def __copy__(self) -> AnsibleConfig: """Allow users to run copy on Config.""" return AnsibleConfig(data=self.data) def __deepcopy__(self, memo: object) -> AnsibleConfig: """Allow users to run deeepcopy on Config.""" return AnsibleConfig(data=self.data) __all__ = [ "ansible_collections_path", "parse_ansible_version", "ansible_version", "AnsibleConfig", ] ansible-compat-4.1.11/src/ansible_compat/constants.py000066400000000000000000000030461454751074100226440ustar00rootroot00000000000000"""Constants used by ansible_compat.""" from pathlib import Path META_MAIN = (Path("meta") / Path("main.yml"), Path("meta") / Path("main.yaml")) REQUIREMENT_LOCATIONS = [ "requirements.yml", "roles/requirements.yml", "collections/requirements.yml", # These is more of less the official way to store test requirements in collections so far, comments shows number of repos using this reported by https://sourcegraph.com/ at the time of writing "tests/requirements.yml", # 170 "tests/integration/requirements.yml", # 3 "tests/unit/requirements.yml", # 1 ] # Minimal version of Ansible we support for runtime ANSIBLE_MIN_VERSION = "2.12" # Based on https://docs.ansible.com/ansible/latest/reference_appendices/config.html ANSIBLE_DEFAULT_ROLES_PATH = ( "~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles" ) INVALID_CONFIG_RC = 2 ANSIBLE_MISSING_RC = 4 INVALID_PREREQUISITES_RC = 10 MSG_INVALID_FQRL = """\ Computed fully qualified role name of {0} does not follow current galaxy requirements. Please edit meta/main.yml and assure we can correctly determine full role name: galaxy_info: role_name: my_name # if absent directory name hosting role is used instead namespace: my_galaxy_namespace # if absent, author is used instead Namespace: https://galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespace-limitations Role: https://galaxy.ansible.com/docs/contributing/creating_role.html#role-names As an alternative, you can add 'role-name' to either skip_list or warn_list. """ RC_ANSIBLE_OPTIONS_ERROR = 5 ansible-compat-4.1.11/src/ansible_compat/errors.py000066400000000000000000000030271454751074100221430ustar00rootroot00000000000000"""Module to deal with errors.""" from __future__ import annotations from typing import TYPE_CHECKING, Any from ansible_compat.constants import ANSIBLE_MISSING_RC, INVALID_PREREQUISITES_RC if TYPE_CHECKING: from subprocess import CompletedProcess class AnsibleCompatError(RuntimeError): """Generic error originating from ansible_compat library.""" code = 1 # generic error def __init__( self, message: str | None = None, proc: CompletedProcess[Any] | None = None, ) -> None: """Construct generic library exception.""" super().__init__(message) self.proc = proc class AnsibleCommandError(RuntimeError): """Exception running an Ansible command.""" def __init__(self, proc: CompletedProcess[Any]) -> None: """Construct an exception given a completed process.""" message = ( f"Got {proc.returncode} exit code while running: {' '.join(proc.args)}" ) super().__init__(message) self.proc = proc class MissingAnsibleError(AnsibleCompatError): """Reports a missing or broken Ansible installation.""" code = ANSIBLE_MISSING_RC def __init__( self, message: str | None = "Unable to find a working copy of ansible executable.", proc: CompletedProcess[Any] | None = None, ) -> None: """.""" super().__init__(message) self.proc = proc class InvalidPrerequisiteError(AnsibleCompatError): """Reports a missing requirement.""" code = INVALID_PREREQUISITES_RC ansible-compat-4.1.11/src/ansible_compat/loaders.py000066400000000000000000000016301454751074100222560ustar00rootroot00000000000000"""Utilities for loading various files.""" from __future__ import annotations from typing import TYPE_CHECKING, Any import yaml from ansible_compat.errors import InvalidPrerequisiteError if TYPE_CHECKING: from pathlib import Path def yaml_from_file(path: Path) -> Any: # noqa: ANN401 """Return a loaded YAML file.""" with path.open(encoding="utf-8") as content: return yaml.load(content, Loader=yaml.SafeLoader) def colpath_from_path(path: Path) -> str | None: """Return a FQCN from a path.""" galaxy_file = path / "galaxy.yml" if galaxy_file.exists(): galaxy = yaml_from_file(galaxy_file) for k in ("namespace", "name"): if k not in galaxy: msg = f"{galaxy_file} is missing the following mandatory field {k}" raise InvalidPrerequisiteError(msg) return f"{galaxy['namespace']}/{galaxy['name']}" return None ansible-compat-4.1.11/src/ansible_compat/ports.py000066400000000000000000000001601454751074100217710ustar00rootroot00000000000000"""Portability helpers.""" from functools import cache, cached_property __all__ = ["cache", "cached_property"] ansible-compat-4.1.11/src/ansible_compat/prerun.py000066400000000000000000000014471454751074100221460ustar00rootroot00000000000000"""Utilities for configuring ansible runtime environment.""" import hashlib import os from pathlib import Path def get_cache_dir(project_dir: Path) -> Path: """Compute cache directory to be used based on project path.""" # we only use the basename instead of the full path in order to ensure that # we would use the same key regardless the location of the user home # directory or where the project is clones (as long the project folder uses # the same name). basename = project_dir.resolve().name.encode(encoding="utf-8") # 6 chars of entropy should be enough cache_key = hashlib.sha256(basename).hexdigest()[:6] cache_dir = ( Path(os.getenv("XDG_CACHE_HOME", "~/.cache")).expanduser() / "ansible-compat" / cache_key ) return cache_dir ansible-compat-4.1.11/src/ansible_compat/py.typed000066400000000000000000000000001454751074100217400ustar00rootroot00000000000000ansible-compat-4.1.11/src/ansible_compat/runtime.py000066400000000000000000001113631454751074100223150ustar00rootroot00000000000000"""Ansible runtime environment manager.""" from __future__ import annotations import contextlib import importlib import json import logging import os import re import shutil import subprocess import sys import warnings from collections import OrderedDict from dataclasses import dataclass, field from pathlib import Path from typing import TYPE_CHECKING, Any, Callable, no_type_check import subprocess_tee from packaging.version import Version from ansible_compat.config import ( AnsibleConfig, ansible_collections_path, ansible_version, parse_ansible_version, ) from ansible_compat.constants import ( META_MAIN, MSG_INVALID_FQRL, RC_ANSIBLE_OPTIONS_ERROR, REQUIREMENT_LOCATIONS, ) from ansible_compat.errors import ( AnsibleCommandError, AnsibleCompatError, InvalidPrerequisiteError, MissingAnsibleError, ) from ansible_compat.loaders import colpath_from_path, yaml_from_file from ansible_compat.prerun import get_cache_dir if TYPE_CHECKING: # https://github.com/PyCQA/pylint/issues/3240 # pylint: disable=unsubscriptable-object CompletedProcess = subprocess.CompletedProcess[Any] else: CompletedProcess = subprocess.CompletedProcess _logger = logging.getLogger(__name__) # regex to extract the first version from a collection range specifier version_re = re.compile(":[>=<]*([^,]*)") namespace_re = re.compile("^[a-z][a-z0-9_]+$") class AnsibleWarning(Warning): """Warnings related to Ansible runtime.""" @dataclass class Collection: """Container for Ansible collection information.""" name: str version: str path: Path class CollectionVersion(Version): """Collection version.""" def __init__(self, version: str) -> None: """Initialize collection version.""" # As packaging Version class does not support wildcard, we convert it # to "0", as this being the smallest version possible. if version == "*": version = "0" super().__init__(version) @dataclass class Plugins: # pylint: disable=too-many-instance-attributes """Dataclass to access installed Ansible plugins, uses ansible-doc to retrieve them.""" runtime: Runtime become: dict[str, str] = field(init=False) cache: dict[str, str] = field(init=False) callback: dict[str, str] = field(init=False) cliconf: dict[str, str] = field(init=False) connection: dict[str, str] = field(init=False) httpapi: dict[str, str] = field(init=False) inventory: dict[str, str] = field(init=False) lookup: dict[str, str] = field(init=False) netconf: dict[str, str] = field(init=False) shell: dict[str, str] = field(init=False) vars: dict[str, str] = field(init=False) # noqa: A003 module: dict[str, str] = field(init=False) strategy: dict[str, str] = field(init=False) test: dict[str, str] = field(init=False) filter: dict[str, str] = field(init=False) # noqa: A003 role: dict[str, str] = field(init=False) keyword: dict[str, str] = field(init=False) @no_type_check def __getattribute__(self, attr: str): # noqa: ANN204 """Get attribute.""" if attr in { "become", "cache", "callback", "cliconf", "connection", "httpapi", "inventory", "lookup", "netconf", "shell", "vars", "module", "strategy", "test", "filter", "role", "keyword", }: try: result = super().__getattribute__(attr) except AttributeError as exc: if ansible_version() < Version("2.14") and attr in {"filter", "test"}: msg = "Ansible version below 2.14 does not support retrieving filter and test plugins." raise RuntimeError(msg) from exc proc = self.runtime.run( ["ansible-doc", "--json", "-l", "-t", attr], ) data = json.loads(proc.stdout) if not isinstance(data, dict): # pragma: no cover msg = "Unexpected output from ansible-doc" raise AnsibleCompatError(msg) from exc result = data else: result = super().__getattribute__(attr) return result # pylint: disable=too-many-instance-attributes class Runtime: """Ansible Runtime manager.""" _version: Version | None = None collections: OrderedDict[str, Collection] = OrderedDict() cache_dir: Path | None = None # Used to track if we have already initialized the Ansible runtime as attempts # to do it multiple tilmes will cause runtime warnings from within ansible-core initialized: bool = False plugins: Plugins def __init__( self, project_dir: Path | None = None, *, isolated: bool = False, min_required_version: str | None = None, require_module: bool = False, max_retries: int = 0, environ: dict[str, str] | None = None, verbosity: int = 0, ) -> None: """Initialize Ansible runtime environment. :param project_dir: The directory containing the Ansible project. If not mentioned it will be guessed from the current working directory. :param isolated: Assure that installation of collections or roles does not affect Ansible installation, an unique cache directory being used instead. :param min_required_version: Minimal version of Ansible required. If not found, a :class:`RuntimeError` exception is raised. :param require_module: If set, instantiation will fail if Ansible Python module is missing or is not matching the same version as the Ansible command line. That is useful for consumers that expect to also perform Python imports from Ansible. :param max_retries: Number of times it should retry network operations. Default is 0, no retries. :param environ: Environment dictionary to use, if undefined ``os.environ`` will be copied and used. :param verbosity: Verbosity level to use. """ self.project_dir = project_dir or Path.cwd() self.isolated = isolated self.max_retries = max_retries self.environ = environ or os.environ.copy() self.plugins = Plugins(runtime=self) self.verbosity = verbosity self.initialize_logger(level=self.verbosity) # Reduce noise from paramiko, unless user already defined PYTHONWARNINGS # paramiko/transport.py:236: CryptographyDeprecationWarning: Blowfish has been deprecated # https://github.com/paramiko/paramiko/issues/2038 # As CryptographyDeprecationWarning is not a builtin, we cannot use # PYTHONWARNINGS to ignore it using category but we can use message. # https://stackoverflow.com/q/68251969/99834 if "PYTHONWARNINGS" not in self.environ: # pragma: no cover self.environ["PYTHONWARNINGS"] = "ignore:Blowfish has been deprecated" if isolated: self.cache_dir = get_cache_dir(self.project_dir) self.config = AnsibleConfig() # Add the sys.path to the collection paths if not isolated self._add_sys_path_to_collection_paths() if not self.version_in_range(lower=min_required_version): msg = f"Found incompatible version of ansible runtime {self.version}, instead of {min_required_version} or newer." raise RuntimeError(msg) if require_module: self._ensure_module_available() # pylint: disable=import-outside-toplevel from ansible.utils.display import Display # pylint: disable=unused-argument def warning( self: Display, # noqa: ARG001 msg: str, *, formatted: bool = False, # noqa: ARG001 ) -> None: """Override ansible.utils.display.Display.warning to avoid printing warnings.""" warnings.warn( message=msg, category=AnsibleWarning, stacklevel=2, source={"msg": msg}, ) # Monkey patch ansible warning in order to use warnings module. Display.warning = warning def initialize_logger(self, level: int = 0) -> None: """Set up the global logging level based on the verbosity number.""" verbosity_map = { -2: logging.CRITICAL, -1: logging.ERROR, 0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG, } # Unknown logging level is treated as DEBUG logging_level = verbosity_map.get(level, logging.DEBUG) _logger.setLevel(logging_level) # Use module-level _logger instance to validate it _logger.debug("Logging initialized to level %s", logging_level) def _add_sys_path_to_collection_paths(self) -> None: """Add the sys.path to the collection paths.""" if self.config.collections_scan_sys_path: for path in sys.path: if ( path not in self.config.collections_paths and (Path(path) / "ansible_collections").is_dir() ): self.config.collections_paths.append( # pylint: disable=E1101 path, ) def load_collections(self) -> None: """Load collection data.""" self.collections = OrderedDict() no_collections_msg = "None of the provided paths were usable" proc = self.run(["ansible-galaxy", "collection", "list", "--format=json"]) if proc.returncode == RC_ANSIBLE_OPTIONS_ERROR and ( no_collections_msg in proc.stdout or no_collections_msg in proc.stderr ): _logger.debug("Ansible reported no installed collections at all.") return if proc.returncode != 0: _logger.error(proc) msg = f"Unable to list collections: {proc}" raise RuntimeError(msg) data = json.loads(proc.stdout) if not isinstance(data, dict): msg = f"Unexpected collection data, {data}" raise TypeError(msg) for path in data: for collection, collection_info in data[path].items(): if not isinstance(collection, str): msg = f"Unexpected collection data, {collection}" raise TypeError(msg) if not isinstance(collection_info, dict): msg = f"Unexpected collection data, {collection_info}" raise TypeError(msg) self.collections[collection] = Collection( name=collection, version=collection_info["version"], path=path, ) def _ensure_module_available(self) -> None: """Assure that Ansible Python module is installed and matching CLI version.""" ansible_release_module = None with contextlib.suppress(ModuleNotFoundError, ImportError): ansible_release_module = importlib.import_module("ansible.release") if ansible_release_module is None: msg = "Unable to find Ansible python module." raise RuntimeError(msg) ansible_module_version = Version( ansible_release_module.__version__, ) if ansible_module_version != self.version: msg = f"Ansible CLI ({self.version}) and python module ({ansible_module_version}) versions do not match. This indicates a broken execution environment." raise RuntimeError(msg) # For ansible 2.15+ we need to initialize the plugin loader # https://github.com/ansible/ansible-lint/issues/2945 if not Runtime.initialized: col_path = [f"{self.cache_dir}/collections"] if self.version >= Version("2.15.0.dev0"): # pylint: disable=import-outside-toplevel,no-name-in-module from ansible.plugins.loader import init_plugin_loader init_plugin_loader(col_path) else: # noinspection PyProtectedMember from ansible.utils.collection_loader._collection_finder import ( # pylint: disable=import-outside-toplevel _AnsibleCollectionFinder, ) # noinspection PyProtectedMember # pylint: disable=protected-access col_path += self.config.collections_paths col_path += os.path.dirname( # noqa: PTH120 os.environ.get(ansible_collections_path(), "."), ).split(":") _AnsibleCollectionFinder( # noqa: SLF001 paths=col_path, )._install() # pylint: disable=protected-access Runtime.initialized = True def clean(self) -> None: """Remove content of cache_dir.""" if self.cache_dir: shutil.rmtree(self.cache_dir, ignore_errors=True) def run( # ruff: disable=PLR0913 self, args: str | list[str], *, retry: bool = False, tee: bool = False, env: dict[str, str] | None = None, cwd: Path | None = None, ) -> CompletedProcess: """Execute a command inside an Ansible environment. :param retry: Retry network operations on failures. :param tee: Also pass captured stdout/stderr to system while running. """ if tee: run_func: Callable[..., CompletedProcess] = subprocess_tee.run else: run_func = subprocess.run env = self.environ if env is None else env.copy() # Presence of ansible debug variable or config option will prevent us # from parsing its JSON output due to extra debug messages on stdout. env["ANSIBLE_DEBUG"] = "0" # https://github.com/ansible/ansible-lint/issues/3522 env["ANSIBLE_VERBOSE_TO_STDERR"] = "True" for _ in range(self.max_retries + 1 if retry else 1): result = run_func( args, universal_newlines=True, check=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=str(cwd) if cwd else None, ) if result.returncode == 0: break _logger.debug("Environment: %s", env) if retry: _logger.warning( "Retrying execution failure %s of: %s", result.returncode, " ".join(args), ) return result @property def version(self) -> Version: """Return current Version object for Ansible. If version is not mentioned, it returns current version as detected. When version argument is mentioned, it return converts the version string to Version object in order to make it usable in comparisons. """ if self._version: return self._version proc = self.run(["ansible", "--version"]) if proc.returncode == 0: self._version = parse_ansible_version(proc.stdout) return self._version msg = "Unable to find a working copy of ansible executable." raise MissingAnsibleError(msg, proc=proc) def version_in_range( self, lower: str | None = None, upper: str | None = None, ) -> bool: """Check if Ansible version is inside a required range. The lower limit is inclusive and the upper one exclusive. """ if lower and self.version < Version(lower): return False if upper and self.version >= Version(upper): return False return True def install_collection( self, collection: str | Path, *, destination: Path | None = None, force: bool = False, ) -> None: """Install an Ansible collection. Can accept arguments like: 'foo.bar:>=1.2.3' 'git+https://github.com/ansible-collections/ansible.posix.git,main' """ cmd = [ "ansible-galaxy", "collection", "install", "-vvv", # this is needed to make ansible display important info in case of failures ] if force: cmd.append("--force") if isinstance(collection, Path): collection = str(collection) # As ansible-galaxy install is not able to automatically determine # if the range requires a pre-release, we need to manually add the --pre # flag when needed. matches = version_re.search(collection) if ( not is_url(collection) and matches and CollectionVersion(matches[1]).is_prerelease ): cmd.append("--pre") cpaths: list[str] = self.config.collections_paths if destination and str(destination) not in cpaths: # we cannot use '-p' because it breaks galaxy ability to ignore already installed collections, so # we hack ansible_collections_path instead and inject our own path there. # pylint: disable=no-member cpaths.insert(0, str(destination)) cmd.append(f"{collection}") _logger.info("Running from %s : %s", Path.cwd(), " ".join(cmd)) process = self.run( cmd, retry=True, env={**self.environ, ansible_collections_path(): ":".join(cpaths)}, ) if process.returncode != 0: msg = f"Command returned {process.returncode} code:\n{process.stdout}\n{process.stderr}" _logger.error(msg) raise InvalidPrerequisiteError(msg) def install_collection_from_disk( self, path: Path, destination: Path | None = None, ) -> None: """Build and install collection from a given disk path.""" self.install_collection(path, destination=destination, force=True) # pylint: disable=too-many-branches def install_requirements( # noqa: C901 self, requirement: Path, *, retry: bool = False, offline: bool = False, ) -> None: """Install dependencies from a requirements.yml. :param requirement: path to requirements.yml file :param retry: retry network operations on failures :param offline: bypass installation, may fail if requirements are not met. """ if not Path(requirement).exists(): return reqs_yaml = yaml_from_file(Path(requirement)) if not isinstance(reqs_yaml, (dict, list)): msg = f"{requirement} file is not a valid Ansible requirements file." raise InvalidPrerequisiteError(msg) if isinstance(reqs_yaml, dict): for key in reqs_yaml: if key not in ("roles", "collections"): msg = f"{requirement} file is not a valid Ansible requirements file. Only 'roles' and 'collections' keys are allowed at root level. Recognized valid locations are: {', '.join(REQUIREMENT_LOCATIONS)}" raise InvalidPrerequisiteError(msg) if isinstance(reqs_yaml, list) or "roles" in reqs_yaml: cmd = [ "ansible-galaxy", "role", "install", "-r", f"{requirement}", ] if self.verbosity > 0: cmd.extend(["-" + ("v" * self.verbosity)]) if self.cache_dir: cmd.extend(["--roles-path", f"{self.cache_dir}/roles"]) if offline: _logger.warning( "Skipped installing old role dependencies due to running in offline mode.", ) else: _logger.info("Running %s", " ".join(cmd)) result = self.run(cmd, retry=retry) _logger.debug(result.stdout) if result.returncode != 0: _logger.error(result.stderr) raise AnsibleCommandError(result) # Run galaxy collection install works on v2 requirements.yml if "collections" in reqs_yaml and reqs_yaml["collections"] is not None: cmd = [ "ansible-galaxy", "collection", "install", ] if self.verbosity > 0: cmd.extend(["-" + ("v" * self.verbosity)]) for collection in reqs_yaml["collections"]: if isinstance(collection, dict) and collection.get("type", "") == "git": _logger.info( "Adding '--pre' to ansible-galaxy collection install because we detected one collection being sourced from git.", ) cmd.append("--pre") break if offline: _logger.warning( "Skipped installing collection dependencies due to running in offline mode.", ) else: cmd.extend(["-r", str(requirement)]) cpaths = self.config.collections_paths if self.cache_dir: # we cannot use '-p' because it breaks galaxy ability to ignore already installed collections, so # we hack ansible_collections_path instead and inject our own path there. dest_path = f"{self.cache_dir}/collections" if dest_path not in cpaths: # pylint: disable=no-member cpaths.insert(0, dest_path) _logger.info("Running %s", " ".join(cmd)) result = self.run( cmd, retry=retry, env={**os.environ, "ANSIBLE_COLLECTIONS_PATH": ":".join(cpaths)}, ) _logger.debug(result.stdout) if result.returncode != 0: _logger.error(result.stderr) raise AnsibleCommandError(result) def prepare_environment( # noqa: C901 self, required_collections: dict[str, str] | None = None, *, retry: bool = False, install_local: bool = False, offline: bool = False, role_name_check: int = 0, ) -> None: """Make dependencies available if needed.""" destination: Path | None = None if required_collections is None: required_collections = {} # first one is standard for collection layout repos and the last two # are part of Tower specification # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#ansible-galaxy-support # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#collections-support for req_file in REQUIREMENT_LOCATIONS: self.install_requirements(Path(req_file), retry=retry, offline=offline) self._prepare_ansible_paths() if not install_local: return for gpath in search_galaxy_paths(self.project_dir): # processing all found galaxy.yml files galaxy_path = Path(gpath) if galaxy_path.exists(): data = yaml_from_file(galaxy_path) if isinstance(data, dict) and "dependencies" in data: for name, required_version in data["dependencies"].items(): _logger.info( "Provisioning collection %s:%s from galaxy.yml", name, required_version, ) self.install_collection( f"{name}{',' if is_url(name) else ':'}{required_version}", destination=destination, ) if self.cache_dir: destination = self.cache_dir / "collections" for name, min_version in required_collections.items(): self.install_collection( f"{name}:>={min_version}", destination=destination, ) if (self.project_dir / "galaxy.yml").exists(): if destination: # while function can return None, that would not break the logic colpath = Path( f"{destination}/ansible_collections/{colpath_from_path(self.project_dir)}", ) if colpath.is_symlink(): if os.path.realpath(colpath) == str(Path.cwd()): _logger.warning( "Found symlinked collection, skipping its installation.", ) return _logger.warning( "Collection is symlinked, but not pointing to %s directory, so we will remove it.", Path.cwd(), ) colpath.unlink() # molecule scenario within a collection self.install_collection_from_disk( galaxy_path.parent, destination=destination, ) elif ( Path().resolve().parent.name == "roles" and Path("../../galaxy.yml").exists() ): # molecule scenario located within roles//molecule inside # a collection self.install_collection_from_disk( Path("../.."), destination=destination, ) else: # no collection, try to recognize and install a standalone role self._install_galaxy_role( self.project_dir, role_name_check=role_name_check, ignore_errors=True, ) # reload collections self.load_collections() def require_collection( self, name: str, version: str | None = None, *, install: bool = True, ) -> tuple[CollectionVersion, Path]: """Check if a minimal collection version is present or exits. In the future this method may attempt to install a missing or outdated collection before failing. :param name: collection name :param version: minimal version required :param install: if True, attempt to install a missing collection :returns: tuple of (found_version, collection_path) """ try: ns, coll = name.split(".", 1) except ValueError as exc: msg = f"Invalid collection name supplied: {name}%s" raise InvalidPrerequisiteError( msg, ) from exc paths: list[str] = self.config.collections_paths if not paths or not isinstance(paths, list): msg = f"Unable to determine ansible collection paths. ({paths})" raise InvalidPrerequisiteError( msg, ) if self.cache_dir: # if we have a cache dir, we want to be use that would be preferred # destination when installing a missing collection # https://github.com/PyCQA/pylint/issues/4667 paths.insert(0, f"{self.cache_dir}/collections") # pylint: disable=E1101 for path in paths: collpath = Path(path) / "ansible_collections" / ns / coll if collpath.exists(): mpath = collpath / "MANIFEST.json" if not mpath.exists(): msg = f"Found collection at '{collpath}' but missing MANIFEST.json, cannot get info." _logger.fatal(msg) raise InvalidPrerequisiteError(msg) with mpath.open(encoding="utf-8") as f: manifest = json.loads(f.read()) found_version = CollectionVersion( manifest["collection_info"]["version"], ) if version and found_version < CollectionVersion(version): if install: self.install_collection(f"{name}:>={version}") self.require_collection(name, version, install=False) else: msg = f"Found {name} collection {found_version} but {version} or newer is required." _logger.fatal(msg) raise InvalidPrerequisiteError(msg) return found_version, collpath.resolve() break else: if install: self.install_collection(f"{name}:>={version}" if version else name) return self.require_collection( name=name, version=version, install=False, ) msg = f"Collection '{name}' not found in '{paths}'" _logger.fatal(msg) raise InvalidPrerequisiteError(msg) def _prepare_ansible_paths(self) -> None: """Configure Ansible environment variables.""" try: library_paths: list[str] = self.config.default_module_path.copy() roles_path: list[str] = self.config.default_roles_path.copy() collections_path: list[str] = self.config.collections_paths.copy() except AttributeError as exc: msg = "Unexpected ansible configuration" raise RuntimeError(msg) from exc alterations_list: list[tuple[list[str], str, bool]] = [ (library_paths, "plugins/modules", True), (roles_path, "roles", True), ] alterations_list.extend( [ (roles_path, f"{self.cache_dir}/roles", False), (library_paths, f"{self.cache_dir}/modules", False), (collections_path, f"{self.cache_dir}/collections", False), ] if self.isolated else [], ) for path_list, path_, must_be_present in alterations_list: path = Path(path_) if not path.exists(): if must_be_present: continue path.mkdir(parents=True, exist_ok=True) if str(path) not in path_list: path_list.insert(0, str(path)) if library_paths != self.config.DEFAULT_MODULE_PATH: self._update_env("ANSIBLE_LIBRARY", library_paths) if collections_path != self.config.default_collections_path: self._update_env(ansible_collections_path(), collections_path) if roles_path != self.config.default_roles_path: self._update_env("ANSIBLE_ROLES_PATH", roles_path) def _get_roles_path(self) -> Path: """Return roles installation path. If `self.isolated` is set to `True`, `self.cache_dir` would be created, then it returns the `self.cache_dir/roles`. When `self.isolated` is not mentioned or set to `False`, it returns the first path in `default_roles_path`. """ if self.cache_dir: path = Path(f"{self.cache_dir}/roles") else: path = Path(self.config.default_roles_path[0]).expanduser() return path def _install_galaxy_role( self, project_dir: Path, role_name_check: int = 0, *, ignore_errors: bool = False, ) -> None: """Detect standalone galaxy role and installs it. :param: role_name_check: logic to used to check role name 0: exit with error if name is not compliant (default) 1: warn if name is not compliant 2: bypass any name checking :param: ignore_errors: if True, bypass installing invalid roles. Our implementation aims to match ansible-galaxy's behaviour for installing roles from a tarball or scm. For example ansible-galaxy will install a role that has both galaxy.yml and meta/main.yml present but empty. Also missing galaxy.yml is accepted but missing meta/main.yml is not. """ yaml = None galaxy_info = {} for meta_main in META_MAIN: meta_filename = Path(project_dir) / meta_main if meta_filename.exists(): break else: if ignore_errors: return yaml = yaml_from_file(meta_filename) if yaml and "galaxy_info" in yaml: galaxy_info = yaml["galaxy_info"] fqrn = _get_role_fqrn(galaxy_info, project_dir) if role_name_check in [0, 1]: if not re.match(r"[a-z0-9][a-z0-9_]+\.[a-z][a-z0-9_]+$", fqrn): msg = MSG_INVALID_FQRL.format(fqrn) if role_name_check == 1: _logger.warning(msg) else: _logger.error(msg) raise InvalidPrerequisiteError(msg) elif "role_name" in galaxy_info: # when 'role-name' is in skip_list, we stick to plain role names role_namespace = _get_galaxy_role_ns(galaxy_info) role_name = _get_galaxy_role_name(galaxy_info) fqrn = f"{role_namespace}{role_name}" else: fqrn = Path(project_dir).absolute().name path = self._get_roles_path() path.mkdir(parents=True, exist_ok=True) link_path = path / fqrn # despite documentation stating that is_file() reports true for symlinks, # it appears that is_dir() reports true instead, so we rely on exists(). target = Path(project_dir).absolute() if not link_path.exists() or ( link_path.is_symlink() and link_path.readlink() != target ): # must call unlink before checking exists because a broken # link reports as not existing and we want to repair it link_path.unlink(missing_ok=True) # https://github.com/python/cpython/issues/73843 link_path.symlink_to(str(target), target_is_directory=True) _logger.info( "Using %s symlink to current repository in order to enable Ansible to find the role using its expected full name.", link_path, ) def _update_env(self, varname: str, value: list[str], default: str = "") -> None: """Update colon based environment variable if needed. New values are prepended to make sure they take precedence. """ if not value: return orig_value = self.environ.get(varname, default) if orig_value: value = [*value, *orig_value.split(":")] value_str = ":".join(value) if value_str != self.environ.get(varname, ""): self.environ[varname] = value_str _logger.info("Set %s=%s", varname, value_str) def _get_role_fqrn(galaxy_infos: dict[str, Any], project_dir: Path) -> str: """Compute role fqrn.""" role_namespace = _get_galaxy_role_ns(galaxy_infos) role_name = _get_galaxy_role_name(galaxy_infos) if len(role_name) == 0: role_name = Path(project_dir).absolute().name role_name = re.sub(r"(ansible-|ansible-role-)", "", role_name).split( ".", maxsplit=2, )[-1] return f"{role_namespace}{role_name}" def _get_galaxy_role_ns(galaxy_infos: dict[str, Any]) -> str: """Compute role namespace from meta/main.yml, including trailing dot.""" role_namespace = galaxy_infos.get("namespace", "") if len(role_namespace) == 0: role_namespace = galaxy_infos.get("author", "") if not isinstance(role_namespace, str): msg = f"Role namespace must be string, not {role_namespace}" raise AnsibleCompatError(msg) # if there's a space in the name space, it's likely author name # and not the galaxy login, so act as if there was no namespace if not role_namespace or re.match(r"^\w+ \w+", role_namespace): role_namespace = "" else: role_namespace = f"{role_namespace}." return role_namespace def _get_galaxy_role_name(galaxy_infos: dict[str, Any]) -> str: """Compute role name from meta/main.yml.""" result = galaxy_infos.get("role_name", "") if not isinstance(result, str): return "" return result def search_galaxy_paths(search_dir: Path) -> list[str]: """Search for galaxy paths (only one level deep).""" galaxy_paths: list[str] = [] for file in [".", *os.listdir(search_dir)]: # We ignore any folders that are not valid namespaces, just like # ansible galaxy does at this moment. if file != "." and not namespace_re.match(file): continue file_path = search_dir / file / "galaxy.yml" if file_path.is_file(): galaxy_paths.append(str(file_path)) return galaxy_paths def is_url(name: str) -> bool: """Return True if a dependency name looks like an URL.""" return bool(re.match("^git[+@]", name)) ansible-compat-4.1.11/src/ansible_compat/schema.py000066400000000000000000000063011454751074100220650ustar00rootroot00000000000000"""Utils for JSON Schema validation.""" from __future__ import annotations import json from collections.abc import Mapping, Sequence from dataclasses import dataclass from typing import TYPE_CHECKING import jsonschema from jsonschema.validators import validator_for if TYPE_CHECKING: from ansible_compat.types import JSON def to_path(schema_path: Sequence[str | int]) -> str: """Flatten a path to a dot delimited string. :param schema_path: The schema path :returns: The dot delimited path """ return ".".join(str(index) for index in schema_path) def json_path(absolute_path: Sequence[str | int]) -> str: """Flatten a data path to a dot delimited string. :param absolute_path: The path :returns: The dot delimited string """ path = "$" for elem in absolute_path: if isinstance(elem, int): path += "[" + str(elem) + "]" else: path += "." + elem return path @dataclass(order=True) class JsonSchemaError: # pylint: disable=too-many-instance-attributes """Data structure to hold a json schema validation error.""" # order of attributes below is important for sorting schema_path: str data_path: str json_path: str message: str expected: bool | int | str relative_schema: str validator: str found: str def to_friendly(self) -> str: """Provide a friendly explanation of the error. :returns: The error message """ return f"In '{self.data_path}': {self.message}." def validate( schema: JSON, data: JSON, ) -> list[JsonSchemaError]: """Validate some data against a JSON schema. :param schema: the JSON schema to use for validation :param data: The data to validate :returns: Any errors encountered """ errors: list[JsonSchemaError] = [] if isinstance(schema, str): schema = json.loads(schema) try: if not isinstance(schema, Mapping): msg = "Invalid schema, must be a mapping" raise jsonschema.SchemaError(msg) # noqa: TRY301 validator = validator_for(schema) validator.check_schema(schema) except jsonschema.SchemaError as exc: error = JsonSchemaError( message=str(exc), data_path="schema sanity check", json_path="", schema_path="", relative_schema="", expected="", validator="", found="", ) errors.append(error) return errors for validation_error in validator(schema).iter_errors(data): if isinstance(validation_error, jsonschema.ValidationError): error = JsonSchemaError( message=validation_error.message, data_path=to_path(validation_error.absolute_path), json_path=json_path(validation_error.absolute_path), schema_path=to_path(validation_error.schema_path), relative_schema=validation_error.schema, expected=validation_error.validator_value, validator=str(validation_error.validator), found=str(validation_error.instance), ) errors.append(error) return sorted(errors) ansible-compat-4.1.11/src/ansible_compat/types.py000066400000000000000000000010501454751074100217650ustar00rootroot00000000000000"""Custom types.""" from __future__ import annotations from collections.abc import Mapping, Sequence from typing import Union try: # py39 does not have TypeAlias from typing_extensions import TypeAlias except ImportError: from typing import TypeAlias # type: ignore[no-redef,attr-defined] JSON: TypeAlias = Union[dict[str, "JSON"], list["JSON"], str, int, float, bool, None] JSON_ro: TypeAlias = Union[ Mapping[str, "JSON_ro"], Sequence["JSON_ro"], str, int, float, bool, None, ] __all__ = ["JSON", "JSON_ro"] ansible-compat-4.1.11/test/000077500000000000000000000000001454751074100154635ustar00rootroot00000000000000ansible-compat-4.1.11/test/__init__.py000066400000000000000000000000501454751074100175670ustar00rootroot00000000000000"""Tests for ansible_compat package.""" ansible-compat-4.1.11/test/assets/000077500000000000000000000000001454751074100167655ustar00rootroot00000000000000ansible-compat-4.1.11/test/assets/galaxy_paths/000077500000000000000000000000001454751074100214515ustar00rootroot00000000000000ansible-compat-4.1.11/test/assets/galaxy_paths/.bar/000077500000000000000000000000001454751074100222735ustar00rootroot00000000000000ansible-compat-4.1.11/test/assets/galaxy_paths/.bar/galaxy.yml000066400000000000000000000000001454751074100242710ustar00rootroot00000000000000ansible-compat-4.1.11/test/assets/galaxy_paths/foo/000077500000000000000000000000001454751074100222345ustar00rootroot00000000000000ansible-compat-4.1.11/test/assets/galaxy_paths/foo/galaxy.yml000066400000000000000000000000001454751074100242320ustar00rootroot00000000000000ansible-compat-4.1.11/test/assets/requirements-invalid-collection.yml000066400000000000000000000001461454751074100260110ustar00rootroot00000000000000# "ansible-galaxy collection install" is expected to fail this invalid file collections: - foo: bar ansible-compat-4.1.11/test/assets/requirements-invalid-role.yml000066400000000000000000000001421454751074100246130ustar00rootroot00000000000000# file expected to make "ansible-galaxy role install" to fail roles: - this_role_does_not_exist ansible-compat-4.1.11/test/assets/validate0_data.json000066400000000000000000000000711454751074100225200ustar00rootroot00000000000000{ "environment": { "a": false, "b": true, "c": "foo" } } ansible-compat-4.1.11/test/assets/validate0_expected.json000066400000000000000000000011671454751074100234170ustar00rootroot00000000000000[ { "message": "False is not of type 'string'", "data_path": "environment.a", "json_path": "$.environment.a", "schema_path": "properties.environment.additionalProperties.type", "relative_schema": { "type": "string" }, "expected": "string", "validator": "type", "found": "False" }, { "message": "True is not of type 'string'", "data_path": "environment.b", "json_path": "$.environment.b", "schema_path": "properties.environment.additionalProperties.type", "relative_schema": { "type": "string" }, "expected": "string", "validator": "type", "found": "True" } ] ansible-compat-4.1.11/test/assets/validate0_schema.json000066400000000000000000000002701454751074100230500ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-07/schema#", "properties": { "environment": { "type": "object", "additionalProperties": { "type": "string" } } } } ansible-compat-4.1.11/test/collections/000077500000000000000000000000001454751074100200015ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.broken/000077500000000000000000000000001454751074100221655ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.broken/galaxy.yml000066400000000000000000000000451454751074100241740ustar00rootroot00000000000000foo: that is not a valid collection! ansible-compat-4.1.11/test/collections/acme.goodies/000077500000000000000000000000001454751074100223365ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/galaxy.yml000066400000000000000000000012511454751074100243450ustar00rootroot00000000000000name: goodies namespace: acme version: 1.0.0 readme: README.md authors: - Red Hat description: Sample collection to use with molecule dependencies: community.molecule: ">=0.1.0" # used to also test '=>' condition ansible.utils: "*" # used to also test '*' git+https://github.com/ansible-collections/community.crypto.git: main # tests ability to install from git build_ignore: - "*.egg-info" - .DS_Store - .eggs - .gitignore - .mypy_cache - .pytest_cache - .stestr - .stestr.conf - .tox - .vscode - MANIFEST.in - build - dist - doc - report.html - setup.cfg - setup.py - "tests/unit/*.*" - README.rst - tox.ini license_file: LICENSE ansible-compat-4.1.11/test/collections/acme.goodies/molecule/000077500000000000000000000000001454751074100241435ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/molecule/default/000077500000000000000000000000001454751074100255675ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/molecule/default/converge.yml000066400000000000000000000002361454751074100301230ustar00rootroot00000000000000--- - name: Converge hosts: localhost tasks: - name: "Include sample role from current collection" include_role: name: acme.goodies.baz ansible-compat-4.1.11/test/collections/acme.goodies/molecule/default/molecule.yml000066400000000000000000000002161454751074100301160ustar00rootroot00000000000000--- dependency: name: galaxy driver: name: delegated platforms: - name: instance provisioner: name: ansible verifier: name: ansible ansible-compat-4.1.11/test/collections/acme.goodies/roles/000077500000000000000000000000001454751074100234625ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/000077500000000000000000000000001454751074100242365ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/molecule/000077500000000000000000000000001454751074100260435ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/000077500000000000000000000000001454751074100306435ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml000066400000000000000000000002341454751074100331750ustar00rootroot00000000000000--- - name: Converge hosts: localhost tasks: - name: "Sample testing task part of deep_scenario" include_role: name: acme.goodies.baz ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml000066400000000000000000000002161454751074100331720ustar00rootroot00000000000000--- dependency: name: galaxy driver: name: delegated platforms: - name: instance provisioner: name: ansible verifier: name: ansible ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/tasks/000077500000000000000000000000001454751074100253635ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/roles/baz/tasks/main.yml000066400000000000000000000001171454751074100270310ustar00rootroot00000000000000- name: "some task inside foo.bar collection" debug: msg: "hello world!" ansible-compat-4.1.11/test/collections/acme.goodies/tests/000077500000000000000000000000001454751074100235005ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.goodies/tests/requirements.yml000066400000000000000000000000721454751074100267450ustar00rootroot00000000000000collections: - name: ansible.posix version: ">=1.0" ansible-compat-4.1.11/test/collections/acme.minimal/000077500000000000000000000000001454751074100223335ustar00rootroot00000000000000ansible-compat-4.1.11/test/collections/acme.minimal/galaxy.yml000066400000000000000000000006771454751074100243550ustar00rootroot00000000000000name: minimal namespace: acme version: 1.0.0 readme: README.md authors: - Red Hat description: Sample collection to use with molecule build_ignore: - "*.egg-info" - .DS_Store - .eggs - .gitignore - .mypy_cache - .pytest_cache - .stestr - .stestr.conf - .tox - .vscode - MANIFEST.in - build - dist - doc - report.html - setup.cfg - setup.py - "tests/unit/*.*" - README.rst - tox.ini license_file: LICENSE ansible-compat-4.1.11/test/conftest.py000066400000000000000000000071151454751074100176660ustar00rootroot00000000000000"""Pytest fixtures.""" import importlib.metadata import json import pathlib import subprocess import sys from collections.abc import Generator from pathlib import Path from typing import Callable import pytest from ansible_compat.runtime import Runtime @pytest.fixture() # pylint: disable=unused-argument def runtime(scope: str = "session") -> Generator[Runtime, None, None]: # noqa: ARG001 """Isolated runtime fixture.""" instance = Runtime(isolated=True) yield instance instance.clean() @pytest.fixture() # pylint: disable=unused-argument def runtime_tmp( tmp_path: pathlib.Path, scope: str = "session", # noqa: ARG001 ) -> Generator[Runtime, None, None]: """Isolated runtime fixture using a temp directory.""" instance = Runtime(project_dir=tmp_path, isolated=True) yield instance instance.clean() def query_pkg_version(pkg: str) -> str: """Get the version of a current installed package. :param pkg: Package name :return: Package version """ return importlib.metadata.version(pkg) @pytest.fixture() def pkg_version() -> Callable[[str], str]: """Get the version of a current installed package. :return: Callable function to get package version """ return query_pkg_version class VirtualEnvironment: """Virtualenv wrapper.""" def __init__(self, path: Path) -> None: """Initialize. :param path: Path to virtualenv """ self.project = path self.venv_path = self.project / "venv" self.venv_bin_path = self.venv_path / "bin" self.venv_python_path = self.venv_bin_path / "python" def create(self) -> None: """Create virtualenv.""" cmd = [str(sys.executable), "-m", "venv", str(self.venv_path)] subprocess.check_call(args=cmd) # Install this package into the virtual environment self.install(f"{__file__}/../..") def install(self, *packages: str) -> None: """Install packages in virtualenv. :param packages: Packages to install """ cmd = [str(self.venv_python_path), "-m", "pip", "install", *packages] subprocess.check_call(args=cmd) def python_script_run(self, script: str) -> subprocess.CompletedProcess[str]: """Run command in project dir using venv. :param args: Command to run """ proc = subprocess.run( args=[self.venv_python_path, "-c", script], capture_output=True, cwd=self.project, check=False, text=True, ) return proc def site_package_dirs(self) -> list[Path]: """Get site packages. :return: List of site packages dirs """ script = "import json, site; print(json.dumps(site.getsitepackages()))" proc = subprocess.run( args=[self.venv_python_path, "-c", script], capture_output=True, check=False, text=True, ) dirs = json.loads(proc.stdout) if not isinstance(dirs, list): msg = "Expected list of site packages" raise TypeError(msg) sanitized = list({Path(d).resolve() for d in dirs}) return sanitized @pytest.fixture(scope="module") def venv_module(tmp_path_factory: pytest.TempPathFactory) -> VirtualEnvironment: """Create a virtualenv in a temporary directory. :param tmp_path: pytest fixture for temp path :return: VirtualEnvironment instance """ test_project = tmp_path_factory.mktemp(basename="test_project-", numbered=True) _venv = VirtualEnvironment(test_project) _venv.create() return _venv ansible-compat-4.1.11/test/roles/000077500000000000000000000000001454751074100166075ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/acme.missing_deps/000077500000000000000000000000001454751074100221775ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/acme.missing_deps/meta/000077500000000000000000000000001454751074100231255ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/acme.missing_deps/meta/main.yml000066400000000000000000000002101454751074100245650ustar00rootroot00000000000000--- galaxy_info: name: missing_deps namespace: acme description: foo license: GPL min_ansible_version: "2.10" platforms: [] ansible-compat-4.1.11/test/roles/acme.missing_deps/requirements.yml000066400000000000000000000001271454751074100254450ustar00rootroot00000000000000collections: - foo.bar # collection that does not exist, so we can test offline mode ansible-compat-4.1.11/test/roles/acme.sample2/000077500000000000000000000000001454751074100210565ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/acme.sample2/meta/000077500000000000000000000000001454751074100220045ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/acme.sample2/meta/main.yml000066400000000000000000000004641454751074100234570ustar00rootroot00000000000000--- dependencies: [] galaxy_info: # role_name is missing in order to test deduction from folder name author: acme description: ACME sample role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-4.1.11/test/roles/ansible-role-sample/000077500000000000000000000000001454751074100224425ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/ansible-role-sample/meta/000077500000000000000000000000001454751074100233705ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/ansible-role-sample/meta/main.yml000066400000000000000000000004031454751074100250340ustar00rootroot00000000000000--- dependencies: [] galaxy_info: role_name: sample author: acme description: ACME sample role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-4.1.11/test/roles/sample3/000077500000000000000000000000001454751074100201535ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/sample3/meta/000077500000000000000000000000001454751074100211015ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/sample3/meta/main.yml000066400000000000000000000004641454751074100225540ustar00rootroot00000000000000--- dependencies: [] galaxy_info: # role_name is missing in order to test deduction from folder name author: acme description: ACME samble role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-4.1.11/test/roles/sample4/000077500000000000000000000000001454751074100201545ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/sample4/meta/000077500000000000000000000000001454751074100211025ustar00rootroot00000000000000ansible-compat-4.1.11/test/roles/sample4/meta/main.yml000066400000000000000000000004641454751074100225550ustar00rootroot00000000000000--- dependencies: [] galaxy_info: # role_name is missing in order to test deduction from folder name author: acme description: ACME samble role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-4.1.11/test/test_api.py000066400000000000000000000001461454751074100176460ustar00rootroot00000000000000"""Tests for ansible_compat package.""" def test_placeholder() -> None: """Placeholder test.""" ansible-compat-4.1.11/test/test_config.py000066400000000000000000000055211454751074100203440ustar00rootroot00000000000000"""Tests for ansible_compat.config submodule.""" import copy import subprocess import pytest from _pytest.monkeypatch import MonkeyPatch from packaging.version import Version from ansible_compat.config import AnsibleConfig, ansible_version, parse_ansible_version from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError def test_config() -> None: """Checks that config vars are loaded with their expected type.""" config = AnsibleConfig() assert isinstance(config.ACTION_WARNINGS, bool) assert isinstance(config.CACHE_PLUGIN_PREFIX, str) assert isinstance(config.CONNECTION_FACTS_MODULES, dict) assert config.ANSIBLE_COW_PATH is None assert isinstance(config.NETWORK_GROUP_MODULES, list) assert isinstance(config.DEFAULT_GATHER_TIMEOUT, (int, type(None))) # check lowercase and older name aliasing assert isinstance(config.collections_paths, list) assert isinstance(config.collections_path, list) assert config.collections_paths == config.collections_path # check if we can access the special data member assert config.data["ACTION_WARNINGS"] == config.ACTION_WARNINGS with pytest.raises(AttributeError): _ = config.THIS_DOES_NOT_EXIST def test_config_with_dump() -> None: """Tests that config can parse given dumps.""" config = AnsibleConfig(config_dump="ACTION_WARNINGS(default) = True") assert config.ACTION_WARNINGS is True def test_config_copy() -> None: """Checks ability to use copy/deepcopy.""" config = AnsibleConfig() new_config = copy.copy(config) assert isinstance(new_config, AnsibleConfig) assert new_config is not config # deepcopy testing new_config = copy.deepcopy(config) assert isinstance(new_config, AnsibleConfig) assert new_config is not config def test_parse_ansible_version_fail() -> None: """Checks that parse_ansible_version raises an error on invalid input.""" with pytest.raises( InvalidPrerequisiteError, match="Unable to parse ansible cli version", ): parse_ansible_version("foo") def test_ansible_version_missing(monkeypatch: MonkeyPatch) -> None: """Validate ansible_version behavior when ansible is missing.""" monkeypatch.setattr( "subprocess.run", lambda *args, **kwargs: subprocess.CompletedProcess( # noqa: ARG005 args=[], returncode=1, ), ) with pytest.raises( MissingAnsibleError, match="Unable to find a working copy of ansible executable.", ): # bypassing lru cache ansible_version.__wrapped__() def test_ansible_version() -> None: """Validate ansible_version behavior.""" assert ansible_version() >= Version("1.0") def test_ansible_version_arg() -> None: """Validate ansible_version behavior.""" assert ansible_version("2.0") >= Version("1.0") ansible-compat-4.1.11/test/test_configuration_example.py000066400000000000000000000006561454751074100234650ustar00rootroot00000000000000"""Sample usage of AnsibleConfig.""" from ansible_compat.config import AnsibleConfig def test_example_config() -> None: """Test basic functionality of AnsibleConfig.""" cfg = AnsibleConfig() assert isinstance(cfg.ACTION_WARNINGS, bool) # you can also use lowercase: assert isinstance(cfg.action_warnings, bool) # you can also use it as dictionary assert cfg["action_warnings"] == cfg.action_warnings ansible-compat-4.1.11/test/test_loaders.py000066400000000000000000000004211454751074100205220ustar00rootroot00000000000000"""Test for ansible_compat.loaders module.""" from pathlib import Path from ansible_compat.loaders import colpath_from_path def test_colpath_from_path() -> None: """Test colpath_from_path non existing path.""" assert colpath_from_path(Path("/foo/bar/")) is None ansible-compat-4.1.11/test/test_prerun.py000066400000000000000000000005251454751074100204110ustar00rootroot00000000000000"""Tests for ansible_compat.prerun module.""" from pathlib import Path from ansible_compat.prerun import get_cache_dir def test_get_cache_dir_relative() -> None: """Test behaviors of get_cache_dir.""" relative_path = Path() abs_path = relative_path.resolve() assert get_cache_dir(relative_path) == get_cache_dir(abs_path) ansible-compat-4.1.11/test/test_runtime.py000066400000000000000000000735601454751074100205720ustar00rootroot00000000000000"""Tests for Runtime class.""" # pylint: disable=protected-access from __future__ import annotations import logging import os import pathlib import subprocess from contextlib import contextmanager from pathlib import Path from shutil import rmtree from typing import TYPE_CHECKING, Any import pytest from packaging.version import Version from ansible_compat.config import ansible_version from ansible_compat.constants import INVALID_PREREQUISITES_RC from ansible_compat.errors import ( AnsibleCommandError, AnsibleCompatError, InvalidPrerequisiteError, ) from ansible_compat.runtime import ( CompletedProcess, Runtime, _get_galaxy_role_name, is_url, search_galaxy_paths, ) if TYPE_CHECKING: from collections.abc import Iterator from _pytest.monkeypatch import MonkeyPatch from pytest_mock import MockerFixture def test_runtime_version(runtime: Runtime) -> None: """Tests version property.""" version = runtime.version assert isinstance(version, Version) # tests that caching property value worked (coverage) assert version == runtime.version @pytest.mark.parametrize( "require_module", (True, False), ids=("module-required", "module-unrequired"), ) def test_runtime_version_outdated(require_module: bool) -> None: """Checks that instantiation raises if version is outdated.""" with pytest.raises(RuntimeError, match="Found incompatible version of ansible"): Runtime(min_required_version="9999.9.9", require_module=require_module) def test_runtime_missing_ansible_module(monkeypatch: MonkeyPatch) -> None: """Checks that we produce a RuntimeError when ansible module is missing.""" class RaiseException: """Class to raise an exception.""" def __init__( self, *args: Any, # noqa: ARG002,ANN401 **kwargs: Any, # noqa: ARG002,ANN401 ) -> None: raise ModuleNotFoundError monkeypatch.setattr("importlib.import_module", RaiseException) with pytest.raises(RuntimeError, match="Unable to find Ansible python module."): Runtime(require_module=True) def test_runtime_mismatch_ansible_module(monkeypatch: MonkeyPatch) -> None: """Test that missing module is detected.""" monkeypatch.setattr("ansible.release.__version__", "0.0.0", raising=False) with pytest.raises(RuntimeError, match="versions do not match"): Runtime(require_module=True) def test_runtime_require_module() -> None: """Check that require_module successful pass.""" Runtime(require_module=True) # Now we try to set the collection path, something to check if that is # causing an exception, as 2.15 introduced new init code. from ansible.utils.collection_loader import ( # pylint: disable=import-outside-toplevel AnsibleCollectionConfig, ) AnsibleCollectionConfig.playbook_paths = "." # Calling it again in order to see that it does not produce UserWarning: AnsibleCollectionFinder has already been configured # which is done by Ansible core 2.15+. We added special code inside Runtime # that should avoid initializing twice and raise that warning. Runtime(require_module=True) def test_runtime_version_fail_module(mocker: MockerFixture) -> None: """Tests for failure to detect Ansible version.""" patched = mocker.patch( "ansible_compat.runtime.parse_ansible_version", autospec=True, ) patched.side_effect = InvalidPrerequisiteError( "Unable to parse ansible cli version", ) runtime = Runtime() with pytest.raises( InvalidPrerequisiteError, match="Unable to parse ansible cli version", ): _ = runtime.version # pylint: disable=pointless-statement def test_runtime_version_fail_cli(mocker: MockerFixture) -> None: """Tests for failure to detect Ansible version.""" mocker.patch( "ansible_compat.runtime.Runtime.run", return_value=CompletedProcess( ["x"], returncode=123, stdout="oops", stderr="some error", ), autospec=True, ) runtime = Runtime() with pytest.raises( RuntimeError, match="Unable to find a working copy of ansible executable.", ): _ = runtime.version # pylint: disable=pointless-statement def test_runtime_prepare_ansible_paths_validation() -> None: """Check that we validate collection_path.""" runtime = Runtime() runtime.config.collections_paths = "invalid-value" # type: ignore[assignment] with pytest.raises(RuntimeError, match="Unexpected ansible configuration"): runtime._prepare_ansible_paths() @pytest.mark.parametrize( ("folder", "role_name", "isolated"), ( ("ansible-role-sample", "acme.sample", True), ("acme.sample2", "acme.sample2", True), ("sample3", "acme.sample3", True), ("sample4", "acme.sample4", False), ), ids=("1", "2", "3", "4"), ) def test_runtime_install_role( caplog: pytest.LogCaptureFixture, folder: str, role_name: str, isolated: bool, ) -> None: """Checks that we can install roles.""" caplog.set_level(logging.INFO) project_dir = Path(__file__).parent / "roles" / folder runtime = Runtime(isolated=isolated, project_dir=project_dir) runtime.prepare_environment(install_local=True) # check that role appears as installed now result = runtime.run(["ansible-galaxy", "list"]) assert result.returncode == 0, result assert role_name in result.stdout if isolated: assert pathlib.Path(f"{runtime.cache_dir}/roles/{role_name}").is_symlink() else: assert pathlib.Path( f"{Path(runtime.config.default_roles_path[0]).expanduser()}/{role_name}", ).is_symlink() runtime.clean() # also test that clean does not break when cache_dir is missing tmp_dir = runtime.cache_dir runtime.cache_dir = None runtime.clean() runtime.cache_dir = tmp_dir def test_prepare_environment_with_collections(tmp_path: pathlib.Path) -> None: """Check that collections are correctly installed.""" runtime = Runtime(isolated=True, project_dir=tmp_path) runtime.prepare_environment(required_collections={"community.molecule": "0.1.0"}) def test_runtime_install_requirements_missing_file() -> None: """Check that missing requirements file is ignored.""" # Do not rely on this behavior, it may be removed in the future runtime = Runtime() runtime.install_requirements(Path("/that/does/not/exist")) @pytest.mark.parametrize( ("file", "exc", "msg"), ( ( Path("/dev/null"), InvalidPrerequisiteError, "file is not a valid Ansible requirements file", ), ( Path(__file__).parent / "assets" / "requirements-invalid-collection.yml", AnsibleCommandError, "Got 1 exit code while running: ansible-galaxy", ), ( Path(__file__).parent / "assets" / "requirements-invalid-role.yml", AnsibleCommandError, "Got 1 exit code while running: ansible-galaxy", ), ), ids=("empty", "invalid-collection", "invalid-role"), ) def test_runtime_install_requirements_invalid_file( file: Path, exc: type[Any], msg: str, ) -> None: """Check that invalid requirements file is raising.""" runtime = Runtime() with pytest.raises( exc, match=msg, ): runtime.install_requirements(file) @contextmanager def cwd(path: Path) -> Iterator[None]: """Context manager for temporary changing current working directory.""" old_pwd = Path.cwd() os.chdir(path) try: yield finally: os.chdir(old_pwd) def test_prerun_reqs_v1(caplog: pytest.LogCaptureFixture) -> None: """Checks that the linter can auto-install requirements v1 when found.""" runtime = Runtime(verbosity=1) path = Path(__file__).parent.parent / "examples" / "reqs_v1" with cwd(path): runtime.prepare_environment() assert any( msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages ) assert all( "Running ansible-galaxy collection install" not in msg for msg in caplog.messages ) def test_prerun_reqs_v2(caplog: pytest.LogCaptureFixture) -> None: """Checks that the linter can auto-install requirements v2 when found.""" runtime = Runtime(verbosity=1) path = (Path(__file__).parent.parent / "examples" / "reqs_v2").resolve() with cwd(path): runtime.prepare_environment() assert any( msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages ) assert any( msg.startswith("Running ansible-galaxy collection install") for msg in caplog.messages ) def test_prerun_reqs_broken(runtime: Runtime) -> None: """Checks that the we report invalid requirements.yml file.""" path = (Path(__file__).parent.parent / "examples" / "reqs_broken").resolve() with cwd(path), pytest.raises(InvalidPrerequisiteError): runtime.prepare_environment() def test__update_env_no_old_value_no_default_no_value(monkeypatch: MonkeyPatch) -> None: """Make sure empty value does not touch environment.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", []) assert "DUMMY_VAR" not in runtime.environ def test__update_env_no_old_value_no_value(monkeypatch: MonkeyPatch) -> None: """Make sure empty value does not touch environment.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", [], "a:b") assert "DUMMY_VAR" not in runtime.environ def test__update_env_no_default_no_value(monkeypatch: MonkeyPatch) -> None: """Make sure empty value does not touch environment.""" monkeypatch.setenv("DUMMY_VAR", "a:b") runtime = Runtime() runtime._update_env("DUMMY_VAR", []) assert runtime.environ["DUMMY_VAR"] == "a:b" @pytest.mark.parametrize( ("value", "result"), ( (["a"], "a"), (["a", "b"], "a:b"), (["a", "b", "c"], "a:b:c"), ), ) def test__update_env_no_old_value_no_default( monkeypatch: MonkeyPatch, value: list[str], result: str, ) -> None: """Values are concatenated using : as the separator.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", value) assert runtime.environ["DUMMY_VAR"] == result @pytest.mark.parametrize( ("default", "value", "result"), ( ("a:b", ["c"], "c:a:b"), ("a:b", ["c:d"], "c:d:a:b"), ), ) def test__update_env_no_old_value( monkeypatch: MonkeyPatch, default: str, value: list[str], result: str, ) -> None: """Values are appended to default value.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", value, default) assert runtime.environ["DUMMY_VAR"] == result @pytest.mark.parametrize( ("old_value", "value", "result"), ( ("a:b", ["c"], "c:a:b"), ("a:b", ["c:d"], "c:d:a:b"), ), ) def test__update_env_no_default( monkeypatch: MonkeyPatch, old_value: str, value: list[str], result: str, ) -> None: """Values are appended to preexisting value.""" monkeypatch.setenv("DUMMY_VAR", old_value) runtime = Runtime() runtime._update_env("DUMMY_VAR", value) assert runtime.environ["DUMMY_VAR"] == result @pytest.mark.parametrize( ("old_value", "default", "value", "result"), ( ("", "", ["e"], "e"), ("a", "", ["e"], "e:a"), ("", "c", ["e"], "e"), ("a", "c", ["e:f"], "e:f:a"), ), ) def test__update_env( monkeypatch: MonkeyPatch, old_value: str, default: str, # pylint: disable=unused-argument # noqa: ARG001 value: list[str], result: str, ) -> None: """Defaults are ignored when preexisting value is present.""" monkeypatch.setenv("DUMMY_VAR", old_value) runtime = Runtime() runtime._update_env("DUMMY_VAR", value) assert runtime.environ["DUMMY_VAR"] == result def test_require_collection_wrong_version(runtime: Runtime) -> None: """Tests behaviour of require_collection.""" subprocess.check_output( [ # noqa: S603 "ansible-galaxy", "collection", "install", "examples/reqs_v2/community-molecule-0.1.0.tar.gz", "-p", "~/.ansible/collections", ], ) with pytest.raises(InvalidPrerequisiteError) as pytest_wrapped_e: runtime.require_collection("community.molecule", "9999.9.9") assert pytest_wrapped_e.type == InvalidPrerequisiteError assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC def test_require_collection_invalid_name(runtime: Runtime) -> None: """Check that require_collection raise with invalid collection name.""" with pytest.raises( InvalidPrerequisiteError, match="Invalid collection name supplied:", ): runtime.require_collection("that-is-invalid") def test_require_collection_invalid_collections_path(runtime: Runtime) -> None: """Check that require_collection raise with invalid collections path.""" runtime.config.collections_paths = "/that/is/invalid" # type: ignore[assignment] with pytest.raises( InvalidPrerequisiteError, match="Unable to determine ansible collection paths", ): runtime.require_collection("community.molecule") def test_require_collection_preexisting_broken(tmp_path: pathlib.Path) -> None: """Check that require_collection raise with broken pre-existing collection.""" runtime = Runtime(isolated=True, project_dir=tmp_path) dest_path: str = runtime.config.collections_paths[0] dest = pathlib.Path(dest_path) / "ansible_collections" / "foo" / "bar" dest.mkdir(parents=True, exist_ok=True) with pytest.raises(InvalidPrerequisiteError, match="missing MANIFEST.json"): runtime.require_collection("foo.bar") def test_require_collection(runtime_tmp: Runtime) -> None: """Check that require collection successful install case.""" runtime_tmp.require_collection("community.molecule", "0.1.0") @pytest.mark.parametrize( ("name", "version", "install"), ( ("fake_namespace.fake_name", None, True), ("fake_namespace.fake_name", "9999.9.9", True), ("fake_namespace.fake_name", None, False), ), ids=("a", "b", "c"), ) def test_require_collection_missing( name: str, version: str, install: bool, runtime: Runtime, ) -> None: """Tests behaviour of require_collection, missing case.""" with pytest.raises(AnsibleCompatError) as pytest_wrapped_e: runtime.require_collection(name=name, version=version, install=install) assert pytest_wrapped_e.type == InvalidPrerequisiteError assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC def test_install_collection(runtime: Runtime) -> None: """Check that valid collection installs do not fail.""" runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz") def test_install_collection_git(runtime: Runtime) -> None: """Check that valid collection installs do not fail.""" runtime.install_collection( "git+https://github.com/ansible-collections/ansible.posix,main", ) def test_install_collection_dest(runtime: Runtime, tmp_path: pathlib.Path) -> None: """Check that valid collection to custom destination passes.""" # Since Ansible 2.15.3 there is no guarantee that this will install the collection at requested path # as it might decide to not install anything if requirement is already present at another location. runtime.install_collection( "examples/reqs_v2/community-molecule-0.1.0.tar.gz", destination=tmp_path, ) runtime.load_collections() for collection in runtime.collections: if collection == "community.molecule": return msg = "Failed to find collection as installed." raise AssertionError(msg) def test_install_collection_fail(runtime: Runtime) -> None: """Check that invalid collection install fails.""" with pytest.raises(AnsibleCompatError) as pytest_wrapped_e: runtime.install_collection("community.molecule:>=9999.0") assert pytest_wrapped_e.type == InvalidPrerequisiteError assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC def test_install_galaxy_role(runtime_tmp: Runtime) -> None: """Check install role with empty galaxy file.""" pathlib.Path(f"{runtime_tmp.project_dir}/galaxy.yml").touch() pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").touch() # this should only raise a warning runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1) # this should test the bypass role name check path runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2) # this should raise an error with pytest.raises( InvalidPrerequisiteError, match="does not follow current galaxy requirements", ): runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=0) def test_install_galaxy_role_unlink( caplog: pytest.LogCaptureFixture, ) -> None: """Test ability to unlink incorrect symlinked roles.""" runtime_tmp = Runtime(verbosity=1) runtime_tmp.prepare_environment() pathlib.Path(f"{runtime_tmp.cache_dir}/roles").mkdir(parents=True, exist_ok=True) pathlib.Path(f"{runtime_tmp.cache_dir}/roles/acme.get_rich").symlink_to("/dev/null") pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: get_rich namespace: acme """, encoding="utf-8", ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir) assert "symlink to current repository" in caplog.text def test_install_galaxy_role_bad_namespace(runtime_tmp: Runtime) -> None: """Check install role with bad namespace in galaxy info.""" pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: foo author: bar namespace: ["xxx"] """, ) # this should raise an error regardless the role_name_check value with pytest.raises(AnsibleCompatError, match="Role namespace must be string, not"): runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1) @pytest.mark.parametrize( "galaxy_info", ( """galaxy_info: role_name: foo-bar namespace: acme """, """galaxy_info: role_name: foo-bar """, ), ids=("bad-name", "bad-name-without-namespace"), ) def test_install_galaxy_role_name_role_name_check_equals_to_1( runtime_tmp: Runtime, galaxy_info: str, caplog: pytest.LogCaptureFixture, ) -> None: """Check install role with bad role name in galaxy info.""" caplog.set_level(logging.WARN) pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( galaxy_info, encoding="utf-8", ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1) assert "Computed fully qualified role name of " in caplog.text def test_install_galaxy_role_no_checks(runtime_tmp: Runtime) -> None: """Check install role with bad namespace in galaxy info.""" runtime_tmp.prepare_environment() pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: foo author: bar namespace: acme """, ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2) result = runtime_tmp.run(["ansible-galaxy", "list"]) assert "- acme.foo," in result.stdout assert result.returncode == 0, result def test_upgrade_collection(runtime_tmp: Runtime) -> None: """Check that collection upgrade is possible.""" # ensure that we inject our tmp folders in ansible paths runtime_tmp.prepare_environment() # we install specific oudated version of a collection runtime_tmp.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz") with pytest.raises( InvalidPrerequisiteError, match="Found community.molecule collection 0.1.0 but 9.9.9 or newer is required.", ): # we check that when install=False, we raise error runtime_tmp.require_collection("community.molecule", "9.9.9", install=False) # this should not fail, as we have this version runtime_tmp.require_collection("community.molecule", "0.1.0") def test_require_collection_no_cache_dir() -> None: """Check require_collection without a cache directory.""" runtime = Runtime() assert not runtime.cache_dir runtime.require_collection("community.molecule", "0.1.0", install=True) def test_runtime_env_ansible_library(monkeypatch: MonkeyPatch) -> None: """Verify that custom path specified using ANSIBLE_LIBRARY is not lost.""" path_name = "foo" monkeypatch.setenv("ANSIBLE_LIBRARY", path_name) path_name = os.path.realpath(path_name) runtime = Runtime() runtime.prepare_environment() assert path_name in runtime.config.default_module_path @pytest.mark.parametrize( ("lower", "upper", "expected"), ( ("1.0", "9999.0", True), (None, "9999.0", True), ("1.0", None, True), ("9999.0", None, False), (None, "1.0", False), ), ids=("1", "2", "3", "4", "5"), ) def test_runtime_version_in_range( lower: str | None, upper: str | None, expected: bool, ) -> None: """Validate functioning of version_in_range.""" runtime = Runtime() assert runtime.version_in_range(lower=lower, upper=upper) is expected @pytest.mark.parametrize( ("path", "scenario", "expected_collections"), ( pytest.param( "test/collections/acme.goodies", "default", [ "ansible.posix", # from tests/requirements.yml "ansible.utils", # from galaxy.yml "community.molecule", # from galaxy.yml "community.crypto", # from galaxy.yml as a git dependency ], id="normal", ), pytest.param( "test/collections/acme.goodies/roles/baz", "deep_scenario", ["community.molecule"], id="deep", ), ), ) def test_install_collection_from_disk( path: str, scenario: str, expected_collections: list[str], ) -> None: """Tests ability to install a local collection.""" # ensure we do not have acme.goodies installed in user directory as it may # produce false positives rmtree( pathlib.Path( "~/.ansible/collections/ansible_collections/acme/goodies", ).expanduser(), ignore_errors=True, ) with cwd(Path(path)): runtime = Runtime(isolated=True) # this should call install_collection_from_disk(".") runtime.prepare_environment(install_local=True) # that molecule converge playbook can be used without molecule and # should validate that the installed collection is available. result = runtime.run(["ansible-playbook", f"molecule/{scenario}/converge.yml"]) assert result.returncode == 0, result.stdout runtime.load_collections() for collection_name in expected_collections: assert ( collection_name in runtime.collections ), f"{collection_name} not found in {runtime.collections.keys()}" runtime.clean() def test_install_collection_from_disk_fail() -> None: """Tests that we fail to install a broken collection.""" with cwd(Path("test/collections/acme.broken")): runtime = Runtime(isolated=True) with pytest.raises(RuntimeError) as exc_info: runtime.prepare_environment(install_local=True) # based on version of Ansible used, we might get a different error, # but both errors should be considered acceptable assert exc_info.type in ( RuntimeError, AnsibleCompatError, AnsibleCommandError, InvalidPrerequisiteError, ) assert exc_info.match( "(is missing the following mandatory|Got 1 exit code while running: ansible-galaxy collection build)", ) def test_prepare_environment_offline_role() -> None: """Ensure that we can make use of offline roles.""" with cwd(Path("test/roles/acme.missing_deps")): runtime = Runtime(isolated=True) runtime.prepare_environment(install_local=True, offline=True) def test_runtime_run(runtime: Runtime) -> None: """Check if tee and non tee mode return same kind of results.""" result1 = runtime.run(["seq", "10"]) result2 = runtime.run(["seq", "10"], tee=True) assert result1.returncode == result2.returncode assert result1.stderr == result2.stderr assert result1.stdout == result2.stdout def test_runtime_exec_cwd(runtime: Runtime) -> None: """Check if passing cwd works as expected.""" path = Path("/") result1 = runtime.run(["pwd"], cwd=path) result2 = runtime.run(["pwd"]) assert result1.stdout.rstrip() == str(path) assert result1.stdout != result2.stdout def test_runtime_exec_env(runtime: Runtime) -> None: """Check if passing env works.""" result = runtime.run(["printenv", "FOO"]) assert not result.stdout result = runtime.run(["printenv", "FOO"], env={"FOO": "bar"}) assert result.stdout.rstrip() == "bar" runtime.environ["FOO"] = "bar" result = runtime.run(["printenv", "FOO"]) assert result.stdout.rstrip() == "bar" def test_runtime_plugins(runtime: Runtime) -> None: """Tests ability to access detected plugins.""" assert len(runtime.plugins.cliconf) == 0 # ansible.netcommon.restconf might be in httpapi assert isinstance(runtime.plugins.httpapi, dict) # "ansible.netcommon.default" might be in runtime.plugins.netconf assert isinstance(runtime.plugins.netconf, dict) assert isinstance(runtime.plugins.role, dict) assert "become" in runtime.plugins.keyword if ansible_version() < Version("2.14.0"): assert "sudo" in runtime.plugins.become assert "memory" in runtime.plugins.cache assert "default" in runtime.plugins.callback assert "local" in runtime.plugins.connection assert "ini" in runtime.plugins.inventory assert "env" in runtime.plugins.lookup assert "sh" in runtime.plugins.shell assert "host_group_vars" in runtime.plugins.vars assert "file" in runtime.plugins.module assert "free" in runtime.plugins.strategy # ansible-doc below 2.14 does not support listing 'test' and 'filter' types: with pytest.raises(RuntimeError): assert "is_abs" in runtime.plugins.test with pytest.raises(RuntimeError): assert "bool" in runtime.plugins.filter else: assert "ansible.builtin.sudo" in runtime.plugins.become assert "ansible.builtin.memory" in runtime.plugins.cache assert "ansible.builtin.default" in runtime.plugins.callback assert "ansible.builtin.local" in runtime.plugins.connection assert "ansible.builtin.ini" in runtime.plugins.inventory assert "ansible.builtin.env" in runtime.plugins.lookup assert "ansible.builtin.sh" in runtime.plugins.shell assert "ansible.builtin.host_group_vars" in runtime.plugins.vars assert "ansible.builtin.file" in runtime.plugins.module assert "ansible.builtin.free" in runtime.plugins.strategy assert "ansible.builtin.is_abs" in runtime.plugins.test assert "ansible.builtin.bool" in runtime.plugins.filter @pytest.mark.parametrize( ("path", "result"), ( pytest.param( "test/assets/galaxy_paths", ["test/assets/galaxy_paths/foo/galaxy.yml"], id="1", ), pytest.param( "test/collections", [], # should find nothing because these folders are not valid namespaces id="2", ), pytest.param( "test/assets/galaxy_paths/foo", ["test/assets/galaxy_paths/foo/galaxy.yml"], id="3", ), ), ) def test_galaxy_path(path: str, result: list[str]) -> None: """Check behavior of galaxy path search.""" assert search_galaxy_paths(Path(path)) == result @pytest.mark.parametrize( ("name", "result"), ( pytest.param( "foo", False, id="0", ), pytest.param( "git+git", True, id="1", ), pytest.param( "git@acme.com", True, id="2", ), ), ) def test_is_url(name: str, result: bool) -> None: """Checks functionality of is_url.""" assert is_url(name) == result def test_prepare_environment_repair_broken_symlink( caplog: pytest.LogCaptureFixture, ) -> None: """Ensure we can deal with broken symlinks in collections.""" caplog.set_level(logging.INFO) project_dir = Path(__file__).parent / "collections" / "acme.minimal" runtime = Runtime(isolated=True, project_dir=project_dir) assert runtime.cache_dir acme = runtime.cache_dir / "collections" / "ansible_collections" / "acme" acme.mkdir(parents=True, exist_ok=True) goodies = acme / "minimal" rmtree(goodies, ignore_errors=True) goodies.unlink(missing_ok=True) goodies.symlink_to("/invalid/destination") runtime.prepare_environment(install_local=True) assert any( msg.startswith("Collection is symlinked, but not pointing to") for msg in caplog.messages ) def test_get_galaxy_role_name_invalid() -> None: """Verifies that function returns empty string on invalid input.""" galaxy_infos = { "role_name": False, # <-- invalid data, should be string } assert _get_galaxy_role_name(galaxy_infos) == "" ansible-compat-4.1.11/test/test_runtime_example.py000066400000000000000000000016461454751074100223010ustar00rootroot00000000000000"""Sample use of Runtime class.""" from ansible_compat.runtime import Runtime def test_runtime_example() -> None: """Test basic functionality of Runtime class.""" # instantiate the runtime using isolated mode, so installing new # roles/collections do not pollute the default setup. runtime = Runtime(isolated=True, max_retries=3) # Print Ansible core version _ = runtime.version # 2.9.10 (Version object) # Get configuration info from runtime _ = runtime.config.collections_path # Detect if current project is a collection and install its requirements runtime.prepare_environment(install_local=True) # will retry 3 times if needed # Install a new collection (will retry 3 times if needed) runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz") # Execute a command result = runtime.run(["ansible-doc", "--list"]) assert result.returncode == 0 ansible-compat-4.1.11/test/test_runtime_scan_path.py000066400000000000000000000064211454751074100226020ustar00rootroot00000000000000"""Test the scan path functionality of the runtime.""" import json import textwrap from dataclasses import dataclass, fields from pathlib import Path import pytest from _pytest.monkeypatch import MonkeyPatch from ansible_compat.runtime import Runtime from .conftest import VirtualEnvironment V2_COLLECTION_TARBALL = Path("examples/reqs_v2/community-molecule-0.1.0.tar.gz") V2_COLLECTION_NAMESPACE = "community" V2_COLLECTION_NAME = "molecule" V2_COLLECTION_VERSION = "0.1.0" V2_COLLECTION_FULL_NAME = f"{V2_COLLECTION_NAMESPACE}.{V2_COLLECTION_NAME}" @dataclass class ScanSysPath: """Parameters for scan tests.""" scan: bool raises_not_found: bool def __str__(self) -> str: """Return a string representation of the object.""" parts = [ f"{field.name}{str(getattr(self, field.name))[0]}" for field in fields(self) ] return "-".join(parts) @pytest.mark.parametrize( ("param"), ( ScanSysPath(scan=False, raises_not_found=True), ScanSysPath(scan=True, raises_not_found=False), ), ids=str, ) def test_scan_sys_path( venv_module: VirtualEnvironment, monkeypatch: MonkeyPatch, runtime_tmp: Runtime, tmp_path: Path, param: ScanSysPath, ) -> None: """Confirm sys path is scanned for collections. :param venv_module: Fixture for a virtual environment :param monkeypatch: Fixture for monkeypatching :param runtime_tmp: Fixture for a Runtime object :param tmp_dir: Fixture for a temporary directory :param param: The parameters for the test """ first_site_package_dir = venv_module.site_package_dirs()[0] installed_to = ( first_site_package_dir / "ansible_collections" / V2_COLLECTION_NAMESPACE / V2_COLLECTION_NAME ) if not installed_to.exists(): # Install the collection into the venv site packages directory, force # as of yet this test is not isolated from the rest of the system runtime_tmp.install_collection( collection=V2_COLLECTION_TARBALL, destination=first_site_package_dir, force=True, ) # Confirm the collection is installed assert installed_to.exists() # Set the sys scan path environment variable monkeypatch.setenv("ANSIBLE_COLLECTIONS_SCAN_SYS_PATH", str(param.scan)) # Set the ansible collections paths to avoid bleed from other tests monkeypatch.setenv("ANSIBLE_COLLECTIONS_PATH", str(tmp_path)) script = textwrap.dedent( f""" import json; from ansible_compat.runtime import Runtime; r = Runtime(); fv, cp = r.require_collection(name="{V2_COLLECTION_FULL_NAME}", version="{V2_COLLECTION_VERSION}", install=False); print(json.dumps({{"found_version": str(fv), "collection_path": str(cp)}})); """, ) proc = venv_module.python_script_run(script) if param.raises_not_found: assert proc.returncode != 0, (proc.stdout, proc.stderr) assert "InvalidPrerequisiteError" in proc.stderr assert "'community.molecule' not found" in proc.stderr else: assert proc.returncode == 0, (proc.stdout, proc.stderr) result = json.loads(proc.stdout) assert result["found_version"] == V2_COLLECTION_VERSION assert result["collection_path"] == str(installed_to) ansible-compat-4.1.11/test/test_schema.py000066400000000000000000000045111454751074100203350ustar00rootroot00000000000000"""Tests for schema utilities.""" from __future__ import annotations import json from pathlib import Path from typing import TYPE_CHECKING, Any import pytest from ansible_compat.schema import JsonSchemaError, json_path, validate if TYPE_CHECKING: from ansible_compat.types import JSON expected_results = [ JsonSchemaError( message="False is not of type 'string'", data_path="environment.a", json_path="$.environment.a", schema_path="properties.environment.additionalProperties.type", relative_schema='{"type": "string"}', expected="string", validator="type", found="False", ), JsonSchemaError( message="True is not of type 'string'", data_path="environment.b", json_path="$.environment.b", schema_path="properties.environment.additionalProperties.type", relative_schema='{"type": "string"}', expected="string", validator="type", found="True", ), ] def json_from_asset(file_name: str) -> JSON: """Load a json file from disk.""" file = Path(__file__).parent / file_name with file.open(encoding="utf-8") as f: return json.load(f) # type: ignore[no-any-return] def jsonify(data: Any) -> JSON: # noqa: ANN401 """Convert object in JSON data structure.""" return json.loads(json.dumps(data, default=vars, sort_keys=True)) # type: ignore[no-any-return] @pytest.mark.parametrize("index", range(1)) def test_schema(index: int) -> None: """Test the schema validator.""" schema = json_from_asset(f"assets/validate{index}_schema.json") data = json_from_asset(f"assets/validate{index}_data.json") expected = json_from_asset(f"assets/validate{index}_expected.json") # ensure we produce consistent results between runs for _ in range(1, 100): found_errors = validate(schema=schema, data=data) # ensure returned results are already sorted, as we assume our class # knows how to sort itself assert sorted(found_errors) == found_errors, "multiple errors not sorted" found_errors_json = jsonify(found_errors) assert ( found_errors_json == expected ), f"inconsistent returns: {found_errors_json}" def test_json_path() -> None: """Test json_path function.""" assert json_path(["a", 1, "b"]) == "$.a[1].b" ansible-compat-4.1.11/tools/000077500000000000000000000000001454751074100156445ustar00rootroot00000000000000ansible-compat-4.1.11/tools/get-version.sh000077500000000000000000000003701454751074100204450ustar00rootroot00000000000000#!/bin/bash set -e { python3 -c "import setuptools_scm" || python3 -m pip install --user setuptools-scm } 1>&2 # redirect stdout to stderr to avoid polluting the output python3 -m setuptools_scm | \ sed 's/Guessed Version\([^+]\+\).*/\1/' ansible-compat-4.1.11/tools/update-version.sh000077500000000000000000000003601454751074100211470ustar00rootroot00000000000000#!/bin/bash DIR=$(dirname "$0") VERSION=$(./tools/get-version.sh) mkdir -p "${DIR}/../dist" sed -e "s/VERSION_PLACEHOLDER/${VERSION}/" \ "${DIR}/../dist/python-ansible-compat.spec.in" \ > "${DIR}/../dist/python-ansible-compat.spec" ansible-compat-4.1.11/tox.ini000066400000000000000000000122141454751074100160170ustar00rootroot00000000000000[tox] minversion = 4.0.0 envlist = lint pkg docs py py-devel py39-ansible212 py39-ansible213 py39-ansible214 py39-ansible215 py310-ansible212 py310-ansible213 py310-ansible214 py310-ansible215 py311-ansible212 py311-ansible213 py311-ansible214 py311-ansible215 py312-ansible216 isolated_build = true skip_missing_interpreters = True requires = tox >= 4.6.3 setuptools >= 65.3.0 # editable installs [testenv] description = Run the tests devel: ansible devel branch ansible212: ansible-core 2.12 ansible213: ansible-core 2.13 ansible214: ansible-core 2.14 ansible215: ansible-core 2.15 ansible216: ansible-core 2.16 deps = ansible212: ansible-core>=2.12,<2.13 ansible213: ansible-core>=2.13,<2.14 ansible214: ansible-core>=2.14,<2.15 ansible215: ansible-core>=2.15,<2.16 ansible216: ansible-core>=2.16,<2.17 devel: ansible-core @ git+https://github.com/ansible/ansible.git@c5d18c39d81e2b3b10856b2fb76747230e4fac4a # GPLv3+ # avoid installing ansible-core on -devel envs: !devel: ansible-core extras = test commands = sh -c "ansible --version | head -n 1" # We add coverage options but not making them mandatory as we do not want to force # pytest users to run coverage when they just want to run a single test with `pytest -k test` coverage run -m pytest {posargs:} sh -c "coverage combine -a -q --data-file=.coverage {toxworkdir}/.coverage.*" # needed for upload to codecov.io -sh -c "COVERAGE_FILE= coverage xml --ignore-errors -q --fail-under=0" # needed for vscode integration due to https://github.com/ryanluker/vscode-coverage-gutters/issues/403 -sh -c "COVERAGE_FILE= coverage lcov --ignore-errors -q --fail-under=0" sh -c "COVERAGE_FILE= coverage report" # We fail if files are modified at the end git diff --exit-code commands_pre = # safety measure to assure we do not accidentally run tests with broken dependencies {envpython} -m pip check # cleaning needed to prevent errors between runs sh -c "rm -f .coverage {toxworkdir}/.coverage.* 2>/dev/null || true" passenv = CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437 FORCE_COLOR HOME NO_COLOR PYTEST_* # allows developer to define their own preferences PY_COLORS REQUESTS_CA_BUNDLE # https proxies SSL_CERT_FILE # https proxies LANG LC_ALL LC_CTYPE setenv = ANSIBLE_DEVEL_WARNING='false' COVERAGE_FILE = {env:COVERAGE_FILE:{toxworkdir}/.coverage.{envname}} COVERAGE_PROCESS_START={toxinidir}/pyproject.toml PIP_DISABLE_PIP_VERSION_CHECK = 1 PIP_CONSTRAINT = {toxinidir}/requirements.txt PRE_COMMIT_COLOR = always PYTEST_REQPASS = 93 FORCE_COLOR = 1 allowlist_externals = ansible git sh # https://tox.wiki/en/latest/upgrading.html#editable-mode package = editable [testenv:lint] description = Run all linters # locked basepython is needed because to keep constrains.txt predictable basepython = python3.10 deps = pre-commit>=2.6.0 skip_install = true usedevelop = false commands = pre-commit run -a --show-diff-on-failure {posargs:} pre-commit run -a pip-compile passenv = {[testenv]passenv} PRE_COMMIT_HOME setenv = {[testenv]setenv} PIP_CONSTRAINT = /dev/null [testenv:deps] description = Bump all test dependencies basepython = {[testenv:lint]basepython} envdir = {toxworkdir}/lint deps = {[testenv:lint]deps} skip_install = true commands = pre-commit run -a --hook-stage manual pip-compile-upgrade {[testenv:lint]commands} setenv = {[testenv]setenv} PIP_CONSTRAINT = /dev/null [testenv:pkg] description = Build package, verify metadata, install package and assert behavior when ansible is missing. deps = build >= 0.9.0 twine >= 4.0.1 skip_install = true # Ref: https://twitter.com/di_codes/status/1044358639081975813 commands = # build wheel and sdist using PEP-517 {envpython} -c 'import os.path, shutil, sys; \ dist_dir = os.path.join("{toxinidir}", "dist"); \ os.path.isdir(dist_dir) or sys.exit(0); \ print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \ shutil.rmtree(dist_dir)' {envpython} -m build \ --outdir {toxinidir}/dist/ \ {toxinidir} # Validate metadata using twine twine check --strict {toxinidir}/dist/* # Install the wheel sh -c "python3 -m pip install {toxinidir}/dist/*.whl" pip uninstall -y ansible-compat [testenv:py] description = Run the tests with {basepython} ansible-core 2.12+ deps = {[testenv]deps} ansible-core>=2.12 [testenv:rpm] description = Use packit to build RPM (requires RPM based Linux distro) deps = packitos commands = packit build in-mock [testenv:docs] description = Build docs commands = mkdocs {posargs:build} --strict extras = docs passenv = * [testenv:smoke] description = Run ansible-lint own testing with current code from compat library commands_pre = ansible localhost -m ansible.builtin.git -a 'repo=https://github.com/ansible/ansible-lint dest={envdir}/tmp/ansible-lint' pip install -e "{envdir}/tmp/ansible-lint[test]" commands = bash -c "pip freeze|grep ansible" pytest -k role deps = ansible-core setenv = {[testenv]setenv} PIP_CONSTRAINT = /dev/null PYTEST_REQPASS = 0 changedir = {envdir}/tmp/ansible-lint allowlist_externals = pwd bash