pax_global_header00006660000000000000000000000064147551377450014534gustar00rootroot0000000000000052 comment=af45514a5aeae2acf1879ade9d7da941325e5186 ansible-compat-25.1.4/000077500000000000000000000000001475513774500145235ustar00rootroot00000000000000ansible-compat-25.1.4/.config/000077500000000000000000000000001475513774500160465ustar00rootroot00000000000000ansible-compat-25.1.4/.config/constraints.txt000066400000000000000000000117511475513774500211630ustar00rootroot00000000000000# This file was autogenerated by uv via the following command: # tox run deps argparse-manpage==4.6 # via ansible-compat (pyproject.toml) attrs==25.1.0 # via jsonschema, referencing babel==2.17.0 # via mkdocs-material beautifulsoup4==4.13.3 # via linkchecker, mkdocs-htmlproofer-plugin black==25.1.0 # via ansible-compat (pyproject.toml) cairocffi==1.7.1 # via cairosvg cairosvg==2.7.1 # via mkdocs-ansible certifi==2025.1.31 # via requests cffi==1.17.1 # via cairocffi, cryptography charset-normalizer==3.4.1 # via requests click==8.1.8 # via black, mkdocs colorama==0.4.6 # via griffe, mkdocs-material coverage==7.6.12 # via ansible-compat (pyproject.toml) cryptography==44.0.1 # via ansible-core csscompressor==0.9.5 # via mkdocs-minify-plugin cssselect2==0.7.0 # via cairosvg defusedxml==0.7.1 # via cairosvg dnspython==2.7.0 # via linkchecker exceptiongroup==1.2.2 # via pytest ghp-import==2.1.0 # via mkdocs griffe==1.5.7 # via mkdocstrings-python hjson==3.1.0 # via mkdocs-macros-plugin, super-collections htmlmin2==0.1.13 # via mkdocs-minify-plugin idna==3.10 # via requests iniconfig==2.0.0 # via pytest jinja2==3.1.5 # via ansible-core, mkdocs, mkdocs-macros-plugin, mkdocs-material, mkdocstrings jsmin==3.0.1 # via mkdocs-minify-plugin jsonschema==4.23.0 # via ansible-compat (pyproject.toml) jsonschema-specifications==2024.10.1 # via jsonschema linkchecker==10.5.0 # via mkdocs-ansible markdown==3.7 # via markdown-include, mkdocs, mkdocs-autorefs, mkdocs-htmlproofer-plugin, mkdocs-material, mkdocstrings, pymdown-extensions markdown-exec==1.10.0 # via mkdocs-ansible markdown-include==0.8.1 # via mkdocs-ansible markupsafe==3.0.2 # via jinja2, mkdocs, mkdocs-autorefs, mkdocstrings mergedeep==1.3.4 # via mkdocs, mkdocs-get-deps mkdocs==1.6.1 # via mkdocs-ansible, mkdocs-autorefs, mkdocs-gen-files, mkdocs-htmlproofer-plugin, mkdocs-macros-plugin, mkdocs-material, mkdocs-minify-plugin, mkdocs-monorepo-plugin, mkdocstrings mkdocs-ansible==24.12.0 # via ansible-compat (pyproject.toml) mkdocs-autorefs==1.3.1 # via mkdocstrings, mkdocstrings-python mkdocs-gen-files==0.5.0 # via mkdocs-ansible mkdocs-get-deps==0.2.0 # via mkdocs, mkdocstrings mkdocs-htmlproofer-plugin==1.3.0 # via mkdocs-ansible mkdocs-macros-plugin==1.3.7 # via mkdocs-ansible mkdocs-material==9.6.4 # via mkdocs-ansible mkdocs-material-extensions==1.3.1 # via mkdocs-ansible, mkdocs-material mkdocs-minify-plugin==0.8.0 # via mkdocs-ansible mkdocs-monorepo-plugin==1.1.0 # via mkdocs-ansible mkdocstrings==0.28.1 # via mkdocs-ansible, mkdocstrings-python mkdocstrings-python==1.15.0 # via mkdocs-ansible mypy-extensions==1.0.0 # via black packaging==24.2 # via ansible-core, black, mkdocs, mkdocs-macros-plugin, pytest, ansible-compat (pyproject.toml) paginate==0.5.7 # via mkdocs-material pathspec==0.12.1 # via black, mkdocs, mkdocs-macros-plugin pillow==11.1.0 # via cairosvg, mkdocs-ansible platformdirs==4.3.6 # via black, mkdocs-get-deps pluggy==1.5.0 # via pytest pycparser==2.22 # via cffi pygments==2.19.1 # via mkdocs-material pymdown-extensions==10.14.3 # via markdown-exec, mkdocs-ansible, mkdocs-material, mkdocstrings pytest==8.3.4 # via pytest-instafail, pytest-mock, pytest-plus, ansible-compat (pyproject.toml) pytest-instafail==0.5.0 # via ansible-compat (pyproject.toml) pytest-mock==3.14.0 # via ansible-compat (pyproject.toml) pytest-plus==0.8.1 # via ansible-compat (pyproject.toml) python-dateutil==2.9.0.post0 # via ghp-import, mkdocs-macros-plugin python-slugify==8.0.4 # via mkdocs-monorepo-plugin pyyaml==6.0.2 # via ansible-core, mkdocs, mkdocs-get-deps, mkdocs-macros-plugin, pymdown-extensions, pyyaml-env-tag, ansible-compat (pyproject.toml) pyyaml-env-tag==0.1 # via mkdocs referencing==0.36.2 # via jsonschema, jsonschema-specifications regex==2024.11.6 # via mkdocs-material requests==2.32.3 # via linkchecker, mkdocs-htmlproofer-plugin, mkdocs-material rpds-py==0.22.3 # via jsonschema, referencing six==1.17.0 # via python-dateutil soupsieve==2.6 # via beautifulsoup4 subprocess-tee==0.4.2 # via ansible-compat (pyproject.toml) super-collections==0.5.3 # via mkdocs-macros-plugin termcolor==2.5.0 # via mkdocs-macros-plugin text-unidecode==1.3 # via python-slugify tinycss2==1.4.0 # via cairosvg, cssselect2 tomli==2.2.1 # via argparse-manpage, black, pytest urllib3==2.3.0 # via requests watchdog==6.0.0 # via mkdocs webencodings==0.5.1 # via cssselect2, tinycss2 # The following packages were excluded from the output: # ansible-core # pip # resolvelib # typing-extensions # uv ansible-compat-25.1.4/.config/pydoclint-baseline.txt000066400000000000000000001634721475513774500224110ustar00rootroot00000000000000src/ansible_compat/config.py DOC101: Function `parse_ansible_version`: Docstring contains fewer arguments than in function signature. DOC103: Function `parse_ansible_version`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [stdout: str]. DOC201: Function `parse_ansible_version` does not have a return section in docstring DOC501: Function `parse_ansible_version` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Function `parse_ansible_version` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['InvalidPrerequisiteError']. DOC101: Function `ansible_version`: Docstring contains fewer arguments than in function signature. DOC103: Function `ansible_version`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [version: str]. DOC201: Function `ansible_version` does not have a return section in docstring DOC501: Function `ansible_version` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Function `ansible_version` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['MissingAnsibleError']. DOC604: Class `AnsibleConfig`: Attributes are the same in docstring and class def, but are in a different order. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC605: Class `AnsibleConfig`: Attribute names match, but type hints in these attributes do not match: action_warnings, agnostic_become_prompt, allow_world_readable_tmpfiles, ansible_connection_path, ansible_cow_acceptlist, ansible_cow_path, ansible_cow_selection, ansible_force_color, ansible_nocolor, ansible_nocows, ansible_pipelining, any_errors_fatal, become_allow_same_user, become_plugin_path, cache_plugin, cache_plugin_connection, cache_plugin_prefix, cache_plugin_timeout, callable_accept_list, callbacks_enabled, collections_on_ansible_version_mismatch, collections_paths, collections_scan_sys_path, color_changed, color_console_prompt, color_debug, color_deprecate, color_diff_add, color_diff_lines, color_diff_remove, color_error, color_highlight, color_ok, color_skip, color_unreachable, color_verbose, color_warn, command_warnings, conditional_bare_vars, connection_facts_modules, controller_python_warning, coverage_remote_output, coverage_remote_paths, default_action_plugin_path, default_allow_unsafe_lookups, default_ask_pass, default_ask_vault_pass, default_become, default_become_ask_pass, default_become_exe, default_become_flags, default_become_method, default_become_user, default_cache_plugin_path, default_callback_plugin_path, default_cliconf_plugin_path, default_connection_plugin_path, default_debug, default_executable, default_fact_path, default_filter_plugin_path, default_force_handlers, default_forks, default_gather_subset, default_gather_timeout, default_gathering, default_handler_includes_static, default_hash_behaviour, default_host_list, default_httpapi_plugin_path, default_internal_poll_interval, default_inventory_plugin_path, default_jinja2_extensions, default_jinja2_native, default_keep_remote_files, default_libvirt_lxc_noseclabel, default_load_callback_plugins, default_local_tmp, default_log_filter, default_log_path, default_lookup_plugin_path, default_managed_str, default_module_args, default_module_compression, default_module_name, default_module_path, default_module_utils_path, default_netconf_plugin_path, default_no_log, default_no_target_syslog, default_null_representation, default_poll_interval, default_private_key_file, default_private_role_vars, default_remote_port, default_remote_user, default_collections_path, default_roles_path, default_selinux_special_fs, default_stdout_callback, default_strategy, default_strategy_plugin_path, default_su, default_syslog_facility, default_task_includes_static, default_terminal_plugin_path, default_test_plugin_path, default_timeout, default_transport, default_undefined_var_behavior, default_vars_plugin_path, default_vault_encrypt_identity, default_vault_id_match, default_vault_identity, default_vault_identity_list, default_vault_password_file, default_verbosity, deprecation_warnings, devel_warning, diff_always, diff_context, display_args_to_stdout, display_skipped_hosts, docsite_root_url, doc_fragment_plugin_path, duplicate_yaml_dict_key, enable_task_debugger, error_on_missing_handler, facts_modules, galaxy_cache_dir, galaxy_display_progress, galaxy_ignore_certs, galaxy_role_skeleton, galaxy_role_skeleton_ignore, galaxy_server, galaxy_server_list, galaxy_token_path, host_key_checking, host_pattern_mismatch, inject_facts_as_vars, interpreter_python, interpreter_python_distro_map, interpreter_python_fallback, invalid_task_attribute_failed, inventory_any_unparsed_is_failed, inventory_cache_enabled, inventory_cache_plugin, inventory_cache_plugin_connection, inventory_cache_plugin_prefix, inventory_cache_timeout, inventory_enabled, inventory_export, inventory_ignore_exts, inventory_ignore_patterns, inventory_unparsed_is_failed, localhost_warning, max_file_size_for_diff, module_ignore_exts, netconf_ssh_config, network_group_modules, old_plugin_cache_clearing, paramiko_host_key_auto_add, paramiko_look_for_keys, persistent_command_timeout, persistent_connect_retry_timeout, persistent_connect_timeout, persistent_control_path_dir, playbook_dir, playbook_vars_root, plugin_filters_cfg, python_module_rlimit_nofile, retry_files_enabled, retry_files_save_path, run_vars_plugins, show_custom_stats, string_conversion_action, string_type_filters, system_warnings, tags_run, tags_skip, task_debugger_ignore_errors, task_timeout, transform_invalid_group_chars, use_persistent_connections, variable_plugins_enabled, variable_precedence, verbose_to_stderr, win_async_startup_timeout, worker_shutdown_poll_count, worker_shutdown_poll_delay, yaml_filename_extensions (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC101: Method `AnsibleConfig.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `AnsibleConfig.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [cache_dir: Path | None, config_dump: str | None, data: dict[str, object] | None]. DOC101: Method `AnsibleConfig.__getattribute__`: Docstring contains fewer arguments than in function signature. DOC103: Method `AnsibleConfig.__getattribute__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [attr_name: str]. DOC201: Method `AnsibleConfig.__getattribute__` does not have a return section in docstring DOC101: Method `AnsibleConfig.__getitem__`: Docstring contains fewer arguments than in function signature. DOC103: Method `AnsibleConfig.__getitem__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [name: str]. DOC201: Method `AnsibleConfig.__getitem__` does not have a return section in docstring DOC201: Method `AnsibleConfig.__copy__` does not have a return section in docstring DOC101: Method `AnsibleConfig.__deepcopy__`: Docstring contains fewer arguments than in function signature. DOC103: Method `AnsibleConfig.__deepcopy__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [memo: object]. DOC201: Method `AnsibleConfig.__deepcopy__` does not have a return section in docstring -------------------- src/ansible_compat/errors.py DOC601: Class `AnsibleCompatError`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `AnsibleCompatError`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [code: ]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC101: Method `AnsibleCompatError.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `AnsibleCompatError.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [message: str | None, proc: CompletedProcess[Any] | None]. DOC101: Method `AnsibleCommandError.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `AnsibleCommandError.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [proc: CompletedProcess[Any]]. DOC601: Class `MissingAnsibleError`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `MissingAnsibleError`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [code: ]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC101: Method `MissingAnsibleError.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `MissingAnsibleError.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [message: str | None, proc: CompletedProcess[Any] | None]. DOC601: Class `InvalidPrerequisiteError`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `InvalidPrerequisiteError`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [code: ]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) -------------------- src/ansible_compat/loaders.py DOC101: Function `yaml_from_file`: Docstring contains fewer arguments than in function signature. DOC103: Function `yaml_from_file`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [path: Path]. DOC201: Function `yaml_from_file` does not have a return section in docstring DOC101: Function `colpath_from_path`: Docstring contains fewer arguments than in function signature. DOC103: Function `colpath_from_path`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [path: Path]. DOC201: Function `colpath_from_path` does not have a return section in docstring DOC501: Function `colpath_from_path` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Function `colpath_from_path` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['InvalidPrerequisiteError']. -------------------- src/ansible_compat/runtime.py DOC601: Class `Collection`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `Collection`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [name: str, path: Path, version: str]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC101: Method `CollectionVersion.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `CollectionVersion.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [version: str]. DOC601: Class `Plugins`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `Plugins`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [become: dict[str, str], cache: dict[str, str], callback: dict[str, str], cliconf: dict[str, str], connection: dict[str, str], filter: dict[str, str], httpapi: dict[str, str], inventory: dict[str, str], keyword: dict[str, str], lookup: dict[str, str], module: dict[str, str], netconf: dict[str, str], role: dict[str, str], runtime: Runtime, shell: dict[str, str], strategy: dict[str, str], test: dict[str, str], vars: dict[str, str]]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC101: Method `Plugins.__getattribute__`: Docstring contains fewer arguments than in function signature. DOC103: Method `Plugins.__getattribute__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [attr: str]. DOC201: Method `Plugins.__getattribute__` does not have a return section in docstring DOC501: Method `Plugins.__getattribute__` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Plugins.__getattribute__` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['AnsibleCompatError']. DOC601: Class `Runtime`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `Runtime`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [_has_playbook_cache: dict[tuple[str, Path | None], bool], _version: Version | None, cache_dir: Path, collections: OrderedDict[str, Collection], initialized: bool, plugins: Plugins, require_module: bool]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC101: Method `Runtime.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [environ: dict[str, str] | None, isolated: bool, max_retries: int, min_required_version: str | None, project_dir: Path | None, require_module: bool, verbosity: int]. DOC501: Method `Runtime.__init__` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime.__init__` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['RuntimeError']. DOC101: Function `warning`: Docstring contains fewer arguments than in function signature. DOC103: Function `warning`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [formatted: bool, msg: str, self: Display]. DOC101: Method `Runtime.initialize_logger`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.initialize_logger`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [level: int]. DOC501: Method `Runtime.load_collections` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime.load_collections` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['RuntimeError', 'TypeError']. DOC501: Method `Runtime._ensure_module_available` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime._ensure_module_available` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['RuntimeError']. DOC101: Method `Runtime.run`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.run`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [args: str | list[str], cwd: Path | None, env: dict[str, str] | None, retry: bool, set_acp: bool, tee: bool]. DOC201: Method `Runtime.run` does not have a return section in docstring DOC501: Method `Runtime.version` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime.version` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['MissingAnsibleError']. DOC101: Method `Runtime.version_in_range`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.version_in_range`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [lower: str | None, upper: str | None]. DOC201: Method `Runtime.version_in_range` does not have a return section in docstring DOC101: Method `Runtime.has_playbook`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.has_playbook`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [basedir: Path | None, playbook: str]. DOC201: Method `Runtime.has_playbook` does not have a return section in docstring DOC101: Method `Runtime.install_collection`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.install_collection`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [collection: str | Path, destination: Path | None, force: bool]. DOC501: Method `Runtime.install_collection` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime.install_collection` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['InvalidPrerequisiteError']. DOC101: Method `Runtime.install_collection_from_disk`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.install_collection_from_disk`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [destination: Path | None, path: Path]. DOC101: Method `Runtime.install_requirements`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.install_requirements`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [offline: bool, requirement: Path, retry: bool]. DOC501: Method `Runtime.install_requirements` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime.install_requirements` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['AnsibleCommandError', 'InvalidPrerequisiteError']. DOC101: Method `Runtime.prepare_environment`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime.prepare_environment`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [install_local: bool, offline: bool, required_collections: dict[str, str] | None, retry: bool, role_name_check: int]. DOC501: Method `Runtime.require_collection` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime.require_collection` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['InvalidPrerequisiteError']. DOC501: Method `Runtime._prepare_ansible_paths` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime._prepare_ansible_paths` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['RuntimeError']. DOC201: Method `Runtime._get_roles_path` does not have a return section in docstring DOC501: Method `Runtime._install_galaxy_role` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `Runtime._install_galaxy_role` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['InvalidPrerequisiteError']. DOC101: Method `Runtime._update_env`: Docstring contains fewer arguments than in function signature. DOC103: Method `Runtime._update_env`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [default: str, value: list[str], varname: str]. DOC101: Function `_get_role_fqrn`: Docstring contains fewer arguments than in function signature. DOC103: Function `_get_role_fqrn`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [galaxy_infos: dict[str, Any], project_dir: Path]. DOC201: Function `_get_role_fqrn` does not have a return section in docstring DOC101: Function `_get_galaxy_role_ns`: Docstring contains fewer arguments than in function signature. DOC103: Function `_get_galaxy_role_ns`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [galaxy_infos: dict[str, Any]]. DOC201: Function `_get_galaxy_role_ns` does not have a return section in docstring DOC501: Function `_get_galaxy_role_ns` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Function `_get_galaxy_role_ns` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['AnsibleCompatError']. DOC101: Function `_get_galaxy_role_name`: Docstring contains fewer arguments than in function signature. DOC103: Function `_get_galaxy_role_name`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [galaxy_infos: dict[str, Any]]. DOC201: Function `_get_galaxy_role_name` does not have a return section in docstring DOC101: Function `search_galaxy_paths`: Docstring contains fewer arguments than in function signature. DOC103: Function `search_galaxy_paths`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [search_dir: Path]. DOC101: Function `is_url`: Docstring contains fewer arguments than in function signature. DOC103: Function `is_url`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [name: str]. DOC201: Function `is_url` does not have a return section in docstring -------------------- src/ansible_compat/schema.py DOC601: Class `JsonSchemaError`: Class docstring contains fewer class attributes than actual class attributes. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC603: Class `JsonSchemaError`: Class docstring attributes are different from actual class attributes. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Attributes in the class definition but not in the docstring: [data_path: str, expected: bool | int | str, found: str, json_path: str, message: str, relative_schema: str, schema_path: str, validator: str]. (Please read https://jsh9.github.io/pydoclint/checking_class_attributes.html on how to correctly document class attributes.) DOC201: Method `JsonSchemaError.to_friendly` does not have a return section in docstring -------------------- test/conftest.py DOC101: Function `runtime`: Docstring contains fewer arguments than in function signature. DOC103: Function `runtime`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [scope: str]. DOC402: Function `runtime` has "yield" statements, but the docstring does not have a "Yields" section DOC101: Function `runtime_tmp`: Docstring contains fewer arguments than in function signature. DOC103: Function `runtime_tmp`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [scope: str, tmp_path: pathlib.Path]. DOC402: Function `runtime_tmp` has "yield" statements, but the docstring does not have a "Yields" section DOC101: Function `query_pkg_version`: Docstring contains fewer arguments than in function signature. DOC103: Function `query_pkg_version`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [pkg: str]. DOC201: Function `query_pkg_version` does not have a return section in docstring DOC201: Function `pkg_version` does not have a return section in docstring DOC101: Method `VirtualEnvironment.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `VirtualEnvironment.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [path: Path]. DOC101: Method `VirtualEnvironment.install`: Docstring contains fewer arguments than in function signature. DOC103: Method `VirtualEnvironment.install`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [*packages: str]. DOC101: Method `VirtualEnvironment.python_script_run`: Docstring contains fewer arguments than in function signature. DOC103: Method `VirtualEnvironment.python_script_run`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [script: str]. DOC201: Method `VirtualEnvironment.python_script_run` does not have a return section in docstring DOC201: Method `VirtualEnvironment.site_package_dirs` does not have a return section in docstring DOC501: Method `VirtualEnvironment.site_package_dirs` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `VirtualEnvironment.site_package_dirs` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['TypeError']. DOC101: Function `venv_module`: Docstring contains fewer arguments than in function signature. DOC103: Function `venv_module`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [tmp_path_factory: pytest.TempPathFactory]. DOC201: Function `venv_module` does not have a return section in docstring -------------------- test/test_config.py DOC101: Function `test_ansible_version_missing`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_ansible_version_missing`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. -------------------- test/test_runtime.py DOC101: Function `test_runtime_version`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_version`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_runtime_version_outdated`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_version_outdated`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [require_module: bool]. DOC101: Function `test_runtime_missing_ansible_module`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_missing_ansible_module`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. DOC101: Method `RaiseException.__init__`: Docstring contains fewer arguments than in function signature. DOC103: Method `RaiseException.__init__`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [**kwargs: Any, *args: Any]. DOC501: Method `RaiseException.__init__` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Method `RaiseException.__init__` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['ModuleNotFoundError']. DOC101: Function `test_runtime_mismatch_ansible_module`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_mismatch_ansible_module`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. DOC101: Function `test_runtime_version_fail_module`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_version_fail_module`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [mocker: MockerFixture]. DOC101: Function `test_runtime_version_fail_cli`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_version_fail_cli`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [mocker: MockerFixture]. DOC101: Function `test_runtime_install_role`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_install_role`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture, folder: str, isolated: bool, role_name: str]. DOC101: Function `test_prepare_environment_with_collections`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_prepare_environment_with_collections`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_runtime_install_requirements_invalid_file`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_install_requirements_invalid_file`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [exc: type[Any], file: Path, msg: str]. DOC101: Function `cwd`: Docstring contains fewer arguments than in function signature. DOC103: Function `cwd`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [path: Path]. DOC101: Function `test_prerun_reqs_v1`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_prerun_reqs_v1`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture]. DOC101: Function `test_prerun_reqs_v2`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_prerun_reqs_v2`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture]. DOC101: Function `test__update_env_no_old_value_no_default_no_value`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env_no_old_value_no_default_no_value`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. DOC101: Function `test__update_env_no_old_value_no_value`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env_no_old_value_no_value`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. DOC101: Function `test__update_env_no_default_no_value`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env_no_default_no_value`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. DOC101: Function `test__update_env_no_old_value_no_default`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env_no_old_value_no_default`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch, result: str, value: list[str]]. DOC101: Function `test__update_env_no_old_value`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env_no_old_value`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [default: str, monkeypatch: MonkeyPatch, result: str, value: list[str]]. DOC101: Function `test__update_env_no_default`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env_no_default`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch, old_value: str, result: str, value: list[str]]. DOC101: Function `test__update_env`: Docstring contains fewer arguments than in function signature. DOC103: Function `test__update_env`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [default: str, monkeypatch: MonkeyPatch, old_value: str, result: str, value: list[str]]. DOC101: Function `test_require_collection_wrong_version`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_require_collection_wrong_version`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_require_collection_invalid_name`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_require_collection_invalid_name`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_require_collection_invalid_collections_path`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_require_collection_invalid_collections_path`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_require_collection_preexisting_broken`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_require_collection_preexisting_broken`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_require_collection_install`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_require_collection_install`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_require_collection_missing`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_require_collection_missing`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [install: bool, name: str, runtime: Runtime, version: str]. DOC101: Function `test_install_collection`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_collection`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_install_collection_git`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_collection_git`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_install_collection_dest`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_collection_dest`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime, tmp_path: pathlib.Path]. DOC501: Function `test_install_collection_dest` has "raise" statements, but the docstring does not have a "Raises" section DOC503: Function `test_install_collection_dest` exceptions in the "Raises" section in the docstring do not match those in the function body. Raised exceptions in the docstring: []. Raised exceptions in the body: ['AssertionError']. DOC101: Function `test_install_collection_fail`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_collection_fail`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_install_galaxy_role`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_galaxy_role`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_install_galaxy_role_unlink`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_galaxy_role_unlink`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture]. DOC101: Function `test_install_galaxy_role_bad_namespace`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_galaxy_role_bad_namespace`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_install_galaxy_role_no_meta`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_galaxy_role_no_meta`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_install_galaxy_role_name_role_name_check_equals_to_1`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_galaxy_role_name_role_name_check_equals_to_1`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture, galaxy_info: str, runtime_tmp: Runtime]. DOC101: Function `test_install_galaxy_role_no_checks`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_galaxy_role_no_checks`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_upgrade_collection`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_upgrade_collection`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime_tmp: Runtime]. DOC101: Function `test_runtime_env_ansible_library`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_env_ansible_library`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: MonkeyPatch]. DOC101: Function `test_runtime_version_in_range`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_version_in_range`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [expected: bool, lower: str | None, upper: str | None]. DOC101: Function `test_install_collection_from_disk`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_install_collection_from_disk`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [expected_collections: list[str], path: str, scenario: str]. DOC101: Function `test_load_plugins`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_load_plugins`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [expected_plugins: list[str], path: str]. DOC101: Function `test_load_collections_failure`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_load_collections_failure`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [mocker: MockerFixture]. DOC101: Function `test_load_collections_garbage`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_load_collections_garbage`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [mocker: MockerFixture, value: str]. DOC101: Function `test_load_collections_invalid_json`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_load_collections_invalid_json`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [mocker: MockerFixture, value: str]. DOC101: Function `test_prepare_environment_offline_role`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_prepare_environment_offline_role`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture]. DOC101: Function `test_runtime_run`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_run`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_runtime_exec_cwd`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_exec_cwd`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_runtime_exec_env`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_exec_env`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_runtime_plugins`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_plugins`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [runtime: Runtime]. DOC101: Function `test_galaxy_path`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_galaxy_path`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [path: Path, result: list[Path]]. DOC101: Function `test_is_url`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_is_url`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [name: str, result: bool]. DOC101: Function `test_prepare_environment_symlink`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_prepare_environment_symlink`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [caplog: pytest.LogCaptureFixture, dest: str | Path, message: str]. DOC101: Function `test_runtime_exception`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_runtime_exception`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [monkeypatch: pytest.MonkeyPatch]. -------------------- test/test_schema.py DOC101: Function `json_from_asset`: Docstring contains fewer arguments than in function signature. DOC103: Function `json_from_asset`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [file_name: str]. DOC201: Function `json_from_asset` does not have a return section in docstring DOC101: Function `jsonify`: Docstring contains fewer arguments than in function signature. DOC103: Function `jsonify`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [data: Any]. DOC201: Function `jsonify` does not have a return section in docstring DOC101: Function `test_schema`: Docstring contains fewer arguments than in function signature. DOC103: Function `test_schema`: Docstring arguments are different from function arguments. (Or could be other formatting issues: https://jsh9.github.io/pydoclint/violation_codes.html#notes-on-doc103 ). Arguments in the function signature but not in the docstring: [index: int]. -------------------- ansible-compat-25.1.4/.config/python3-ansible-compat.spec000066400000000000000000000025251475513774500232260ustar00rootroot00000000000000# spell-checker:ignore bcond pkgversion buildrequires autosetup PYTHONPATH noarch buildroot bindir sitelib numprocesses clib # All tests require Internet access # to test in mock use: --enable-network --with check # to test in a privileged environment use: # --with check --with privileged_tests %bcond_with check %bcond_with privileged_tests Name: ansible-compat Version: VERSION_PLACEHOLDER Release: 1%{?dist} Summary: Ansible-compat library License: GPL-3.0-or-later URL: https://github.com/ansible/ansible-compat Source0: %{pypi_source} BuildArch: noarch BuildRequires: python%{python3_pkgversion}-devel %if %{with check} # These are required for tests: BuildRequires: python%{python3_pkgversion}-pytest BuildRequires: python%{python3_pkgversion}-pytest-xdist BuildRequires: python%{python3_pkgversion}-libselinux BuildRequires: git-core %endif Requires: git-core %description Ansible-compat. %prep %autosetup %generate_buildrequires %pyproject_buildrequires %build %pyproject_wheel %install %pyproject_install %pyproject_save_files ansible_compat %check %pyproject_check_import %if %{with check} %pytest \ -v \ --disable-pytest-warnings \ --numprocesses=auto \ test %endif %files -f %{pyproject_files} %license LICENSE %doc docs/ README.md %changelog ansible-compat-25.1.4/.config/requirements-docs.in000066400000000000000000000000561475513774500220500ustar00rootroot00000000000000argparse-manpage black mkdocs-ansible>=24.3.1 ansible-compat-25.1.4/.config/requirements-test.in000066400000000000000000000001261475513774500220750ustar00rootroot00000000000000coverage pip pytest-instafail pytest-mock pytest-plus>=0.6.1 pytest>=7.2.0 uv>=0.4.30 ansible-compat-25.1.4/.config/requirements.in000066400000000000000000000002501475513774500211160ustar00rootroot00000000000000# https://docs.ansible.com/ansible/latest/reference_appendices/release_and_maintenance.html ansible-core>=2.16 packaging PyYAML subprocess-tee>=0.4.1 jsonschema>=4.6.0 ansible-compat-25.1.4/.git_archival.txt000066400000000000000000000002031475513774500177710ustar00rootroot00000000000000node: af45514a5aeae2acf1879ade9d7da941325e5186 node-date: 2025-02-18T12:14:13-05:00 describe-name: v25.1.4 ref-names: tag: v25.1.4 ansible-compat-25.1.4/.gitattributes000066400000000000000000000002211475513774500174110ustar00rootroot00000000000000# Force LF line endings for text files * text=auto eol=lf *.png binary # Needed for setuptools-scm-git-archive .git_archival.txt export-subst ansible-compat-25.1.4/.github/000077500000000000000000000000001475513774500160635ustar00rootroot00000000000000ansible-compat-25.1.4/.github/CODEOWNERS000066400000000000000000000000321475513774500174510ustar00rootroot00000000000000* @ansible/devtools ansible-compat-25.1.4/.github/CODE_OF_CONDUCT.md000066400000000000000000000002421475513774500206600ustar00rootroot00000000000000# Community Code of Conduct Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html). ansible-compat-25.1.4/.github/ISSUE_TEMPLATE/000077500000000000000000000000001475513774500202465ustar00rootroot00000000000000ansible-compat-25.1.4/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000032371475513774500227450ustar00rootroot00000000000000--- name: Bug report about: > Create a bug report. Ensure that it does reproduce on the main branch with python >=3.10. For anything else, please use the discussion link below. labels: bug, new --- ##### Summary ##### Issue Type - Bug Report ##### OS / ENVIRONMENT ```console (paste below) python -c "import ansible_compat; print(ansible_compat.__version__)" ``` - ansible installation method: one of source, pip, OS package - ansible-compat installation method: one of source, pip, OS package ##### STEPS TO REPRODUCE ```console (paste below) ``` ##### Desired Behavior Possible security bugs should be reported via email to `security@ansible.com` ##### Actual Behavior Please give some details of what is happening. Include a [minimum complete verifiable example]. ```paste below ``` [minimum complete verifiable example]: http://stackoverflow.com/help/mcve ansible-compat-25.1.4/.github/dependabot.yml000066400000000000000000000006431475513774500207160ustar00rootroot00000000000000--- version: 2 updates: - package-ecosystem: pip directory: /.config/ schedule: day: sunday interval: weekly labels: - dependabot-deps-updates - skip-changelog groups: dependencies: patterns: - "*" - package-ecosystem: "github-actions" directory: "/" schedule: interval: daily labels: - "dependencies" - "skip-changelog" ansible-compat-25.1.4/.github/release-drafter.yml000066400000000000000000000001231475513774500216470ustar00rootroot00000000000000--- # see https://github.com/ansible/team-devtools _extends: ansible/team-devtools ansible-compat-25.1.4/.github/workflows/000077500000000000000000000000001475513774500201205ustar00rootroot00000000000000ansible-compat-25.1.4/.github/workflows/ack.yml000066400000000000000000000004171475513774500214030ustar00rootroot00000000000000# See https://github.com/ansible/team-devtools/blob/main/.github/workflows/ack.yml name: ack on: pull_request_target: types: [opened, labeled, unlabeled, synchronize] jobs: ack: uses: ansible/team-devtools/.github/workflows/ack.yml@main secrets: inherit ansible-compat-25.1.4/.github/workflows/push.yml000066400000000000000000000003761475513774500216300ustar00rootroot00000000000000# See https://github.com/ansible/team-devtools/blob/main/.github/workflows/push.yml name: push on: push: branches: - main - "releases/**" - "stable/**" jobs: ack: uses: ansible/team-devtools/.github/workflows/push.yml@main ansible-compat-25.1.4/.github/workflows/release.yml000066400000000000000000000015751475513774500222730ustar00rootroot00000000000000name: release on: release: types: [published] jobs: release: name: release ${{ github.event.ref }} # unable to use environment with uses/with, basically cannot reuse release pipelines environment: release runs-on: ubuntu-24.04 permissions: id-token: write env: FORCE_COLOR: 1 PY_COLORS: 1 TOX_PARALLEL_NO_SPINNER: 1 steps: - name: Switch to using Python 3.12 by default uses: actions/setup-python@v5 with: python-version: "3.12" - name: Install tox run: python3 -m pip install --user "tox>=4.0.0" - name: Check out src from Git uses: actions/checkout@v4 with: fetch-depth: 0 # needed by setuptools-scm - name: Build dists run: python3 -m tox -e pkg - name: Publish to pypi.org uses: pypa/gh-action-pypi-publish@unstable/v1 ansible-compat-25.1.4/.github/workflows/tox.yml000066400000000000000000000013371475513774500214610ustar00rootroot00000000000000--- name: tox on: push: branches: - "main" - "releases/**" - "stable/**" pull_request: branches: - "main" workflow_call: concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true jobs: tox: uses: ansible/team-devtools/.github/workflows/tox.yml@main with: jobs_producing_coverage: 9 other_names: | docs lint pkg py310-ansible217 py312-ansible216 py312-ansible217 py312-ansible218 py312-devel py313-ansible218 py313-devel py310-macos:tox -e py310 py313-macos:tox -e py313 smoke skip_explode: "1" ansible-compat-25.1.4/.gitignore000066400000000000000000000036751475513774500165260ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *$py.class # C extensions *.so # Distribution / packaging .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ pip-wheel-metadata/ share/python-wheels/ *.egg-info/ .installed.cfg *.egg MANIFEST # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest /*.spec rpm/*.spec *.rpm # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .nox/ .coverage .cache nosetests.xml coverage.xml *.cover *.py,cover .hypothesis/ .pytest_cache/ # Translations *.mo *.pot # Django stuff: *.log local_settings.py db.sqlite3 db.sqlite3-journal # Flask stuff: instance/ .webassets-cache # Scrapy stuff: .scrapy # Sphinx documentation docs/_build/ # PyBuilder target/ # Jupyter Notebook .ipynb_checkpoints # IPython profile_default/ ipython_config.py # pyenv .python-version # pipenv # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. # However, in case of collaboration, if having platform-specific dependencies or dependencies # having no cross-platform support, pipenv may install dependencies that don't work, or not # install all needed dependencies. #Pipfile.lock # PEP 582; used by e.g. github.com/David-OConnor/pyflow __pypackages__/ # Celery stuff celerybeat-schedule celerybeat.pid # SageMath parsed files *.sage.py # Environments .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ # Spyder project settings .spyderproject .spyproject # Rope project settings .ropeproject # mkdocs documentation /site # mypy .mypy_cache/ .dmypy.json dmypy.json # Pyre type checker .pyre/ .test-results *.lcov ansible_collections # Generated by setuptools-scm src/ansible_compat/_version.py node_modules _readthedocs test/roles/acme.missing_deps/.ansible .ansible ansible-compat-25.1.4/.packit.yaml000066400000000000000000000030261475513774500167410ustar00rootroot00000000000000--- # https://packit.dev/docs/configuration/ # Test locally running: packit build locally # spell-checker:ignore packit specfile copr epel specfile_path: dist/python3-ansible-compat.spec actions: create-archive: - sh -c "rm dist/*.tar.gz || true" - python3 -m build --sdist --outdir dist - sh -c "ls dist/ansible_compat-*.tar.gz" get-current-version: - ./tools/get-version.sh post-upstream-clone: - ./tools/update-spec.sh srpm_build_deps: - python3-build - python3-setuptools_scm - python3-pytest - python3-pytest-mock jobs: - job: copr_build trigger: pull_request branch: main require: label: present: - bug - dependencies - enhancement - major - minor absent: - skip-changelog targets: - fedora-rawhide-x86_64 - fedora-rawhide-aarch64 - fedora-latest-x86_64 - fedora-latest-aarch64 # Missing python3-build see https://bugzilla.redhat.com/show_bug.cgi?id=2129071 # - centos-stream-9-aarch64 # - centos-stream-9-x86_64 - job: tests trigger: pull_request branch: main require: label: present: - bug - dependencies - enhancement - major - minor absent: - skip-changelog targets: - fedora-latest - fedora-rawhide # - job: propose_downstream # trigger: release # metadata: # dist-git-branch: master notifications: pull_request: successful_build: false ansible-compat-25.1.4/.pre-commit-config.yaml000066400000000000000000000103471475513774500210110ustar00rootroot00000000000000--- ci: # format compatible with commitlint autoupdate_commit_msg: "chore: pre-commit autoupdate" autoupdate_schedule: monthly autofix_commit_msg: "chore: auto fixes from pre-commit.com hooks" skip: # https://github.com/pre-commit-ci/issues/issues/55 - ccv - pip-compile # No docker on pre-commit.ci - validate-config-in-container default_language_version: # Needed in order to make pip-compile output predictable. python: python3.10 exclude: | (?x)^( test/assets/.* )$ repos: - repo: https://github.com/astral-sh/ruff-pre-commit rev: "v0.9.4" hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] # https://github.com/pappasam/toml-sort/issues/69 # - repo: https://github.com/pappasam/toml-sort # rev: v0.23.1 # hooks: # - id: toml-sort-fix - repo: https://github.com/rbubley/mirrors-prettier # keep it before yamllint rev: "v3.4.2" hooks: - id: prettier additional_dependencies: - prettier - prettier-plugin-toml - prettier-plugin-sort-json - repo: https://github.com/pre-commit/pre-commit-hooks.git rev: v5.0.0 hooks: - id: end-of-file-fixer - id: trailing-whitespace exclude: > (?x)^( examples/playbooks/(with-skip-tag-id|unicode).yml| examples/playbooks/example.yml )$ - id: mixed-line-ending - id: fix-byte-order-marker - id: check-executables-have-shebangs - id: check-merge-conflict - id: debug-statements language_version: python3 - repo: https://github.com/codespell-project/codespell rev: v2.4.1 hooks: - id: codespell - repo: https://github.com/jsh9/pydoclint rev: 0.6.0 hooks: - id: pydoclint # This allows automatic reduction of the baseline file when needed. entry: sh -ec "pydoclint . && pydoclint --generate-baseline=1 ." pass_filenames: false - repo: https://github.com/adrienverge/yamllint.git rev: v1.35.1 hooks: - id: yamllint files: \.(yaml|yml)$ types: [file, yaml] entry: yamllint --strict - repo: https://github.com/pappasam/toml-sort rev: v0.24.2 hooks: - id: toml-sort-fix - repo: https://github.com/psf/black rev: 25.1.0 hooks: - id: black language_version: python3 - repo: https://github.com/pre-commit/mirrors-mypy rev: v1.14.1 hooks: - id: mypy # empty args needed in order to match mypy cli behavior args: ["--strict"] additional_dependencies: - ansible-core>=2.16.0 - cached_property - packaging - pytest - pytest-mock - subprocess-tee>=0.4.1 - "typing-extensions>=4.5.0;python_version<'3.10'" - types-PyYAML - types-setuptools - types-jsonschema>=4.4.9 - repo: https://github.com/pycqa/pylint rev: v3.3.4 hooks: - id: pylint additional_dependencies: - PyYAML - pytest - typing_extensions # Keep last due to being considerably slower than the others: - repo: local hooks: - id: pip-compile-upgrade # To run it execute: `pre-commit run pip-compile-upgrade --hook-stage manual` name: Upgrade constraints files and requirements files: ^(pyproject\.toml|requirements\.txt)$ language: python entry: python3 -m uv pip compile -q --all-extras --output-file=.config/constraints.txt pyproject.toml --upgrade pass_filenames: false stages: - manual additional_dependencies: - uv>=0.4.3 - id: pip-compile name: Check constraints files and requirements files: ^(pyproject\.toml|requirements\.txt)$ language: python entry: python3 -m uv pip compile -q --all-extras --output-file=.config/constraints.txt pyproject.toml pass_filenames: false additional_dependencies: - uv>=0.4.3 - repo: https://github.com/packit/pre-commit-hooks rev: v1.2.0 hooks: - id: validate-config-in-container name: packit alias: packit - repo: https://github.com/mashi/codecov-validator rev: "1.0.1" hooks: - id: ccv name: codecov ansible-compat-25.1.4/.prettierignore000066400000000000000000000001131475513774500175610ustar00rootroot00000000000000test/assets/ # Generated by setuptools-scm src/ansible_compat/_version.py ansible-compat-25.1.4/.prettierrc.yaml000066400000000000000000000005431475513774500176520ustar00rootroot00000000000000--- proseWrap: always jsonRecursiveSort: true # prettier-plugin-sort-json tabWidth: 2 useTabs: false overrides: - files: - "*.md" options: # compatibility with markdownlint proseWrap: always printWidth: 80 - files: - "*.yaml" - "*.yml" options: # compatibility with yamllint proseWrap: preserve ansible-compat-25.1.4/.readthedocs.yml000066400000000000000000000005641475513774500176160ustar00rootroot00000000000000--- version: 2 mkdocs: fail_on_warning: true configuration: mkdocs.yml build: os: ubuntu-24.04 tools: python: "3.11" commands: - pip install --user tox - python3 -m tox -e docs python: install: - method: pip path: tox - method: pip path: . extra_requirements: - docs submodules: include: all recursive: true ansible-compat-25.1.4/.sonarcloud.properties000066400000000000000000000001211475513774500210620ustar00rootroot00000000000000sonar.python.version=3.10, 3.11, 3.12, 3.13 sonar.sources=src/ sonar.tests=test/ ansible-compat-25.1.4/.taplo.toml000066400000000000000000000004321475513774500166140ustar00rootroot00000000000000[formatting] # compatibility between toml-sort-fix pre-commit hook and panekj.even-betterer-toml extension align_comments = false array_trailing_comma = false compact_arrays = true compact_entries = false compact_inline_tables = true inline_table_expand = false reorder_keys = true ansible-compat-25.1.4/.vscode/000077500000000000000000000000001475513774500160645ustar00rootroot00000000000000ansible-compat-25.1.4/.vscode/extensions.json000066400000000000000000000005651475513774500211640ustar00rootroot00000000000000{ "recommendations": [ "charliermarsh.ruff", "esbenp.prettier-vscode", "gruntfuggly.triggertaskonsave", "markis.code-coverage", "ms-python.black-formatter", "ms-python.debugpy", "ms-python.mypy-type-checker", "ms-python.pylint", "ms-python.python", "sonarsource.sonarlint-vscode", "streetsidesoftware.code-spell-checker" ] } ansible-compat-25.1.4/.vscode/settings.json000066400000000000000000000031441475513774500206210ustar00rootroot00000000000000{ "[json]": { "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[jsonc]": { "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[markdown]": { "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[python]": { "editor.codeActionsOnSave": { "source.fixAll": "explicit", "source.organizeImports": "explicit" }, "editor.defaultFormatter": "ms-python.black-formatter", "editor.formatOnSave": true }, "[toml]": { "editor.defaultFormatter": "panekj.even-betterer-toml" }, "editor.formatOnSave": true, "evenBetterToml.formatter.alignComments": false, "evenBetterToml.formatter.arrayTrailingComma": true, "files.exclude": { "*.egg-info": true, ".pytest_cache": true, ".tox": true, "__pycache__": true, "build": true }, "git.ignoreLimitWarning": true, "grammarly.domain": "technical", "grammarly.files.include": ["**/*.txt", "**/*.md"], "grammarly.hideUnavailablePremiumAlerts": true, "grammarly.showExamples": true, "markiscodecoverage.searchCriteria": "coverage.lcov", "mypy-type-checker.importStrategy": "fromEnvironment", "mypy-type-checker.preferDaemon": true, "mypy-type-checker.reportingScope": "workspace", "python.analysis.exclude": ["build"], "python.terminal.activateEnvironment": true, "python.testing.pytestArgs": ["tests"], "python.testing.pytestEnabled": true, "python.testing.unittestEnabled": false, "sortLines.filterBlankLines": true, "yaml.completion": true, "yaml.customTags": ["!encrypted/pkcs1-oaep scalar", "!vault scalar"], "yaml.format.enable": false, "yaml.validate": true } ansible-compat-25.1.4/.yamllint000066400000000000000000000002571475513774500163610ustar00rootroot00000000000000rules: document-start: disable indentation: level: error indent-sequences: consistent ignore: | .tox # ignore added because this file includes on-purpose errors ansible-compat-25.1.4/LICENSE000066400000000000000000000021071475513774500155300ustar00rootroot00000000000000MIT License Copyright (c) 2021 Community managed Ansible repositories Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ansible-compat-25.1.4/README.md000066400000000000000000000033251475513774500160050ustar00rootroot00000000000000# ansible-compat [![pypi](https://img.shields.io/pypi/v/ansible-compat.svg)](https://pypi.org/project/ansible-compat/) [![docs](https://readthedocs.org/projects/ansible-compat/badge/?version=latest)](https://ansible.readthedocs.io/projects/compat/) [![gh](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml/badge.svg)](https://github.com/ansible/ansible-compat/actions/workflows/tox.yml) [![codecov.io](https://codecov.io/github/ansible/ansible-compat/coverage.svg?branch=main)](https://codecov.io/github/ansible/ansible-compat?branch=main) A python package contains functions that facilitate working with various versions of Ansible. ## Documentation Documentation is available at [ansible.readthedocs.io/projects/compat/](https://ansible.readthedocs.io/projects/compat/). ## Communication Join the Ansible forum to ask questions, get help, and interact with the community. - [Get Help](https://forum.ansible.com/c/help/6): get help or help others. Please add appropriate tags if you start new discussions. - [Social Spaces](https://forum.ansible.com/c/chat/4): meet and interact with fellow enthusiasts. - [News & Announcements](https://forum.ansible.com/c/news/5): track project-wide announcements including social events. To get release announcements and important changes from the community, see the [Bullhorn newsletter](https://docs.ansible.com/ansible/devel/community/communication.html#the-bullhorn). For more information about getting in touch, see the [Ansible communication guide](https://docs.ansible.com/ansible/devel/community/communication.html). ## Code of Conduct Please see the official [Ansible Community Code of Conduct](https://docs.ansible.com/ansible/latest/community/code_of_conduct.html). ansible-compat-25.1.4/ansible.cfg000066400000000000000000000001321475513774500166150ustar00rootroot00000000000000[defaults] # isolate testing of ansible-compat from user local setup collections_path = . ansible-compat-25.1.4/codecov.yml000066400000000000000000000001641475513774500166710ustar00rootroot00000000000000codecov: require_ci_to_pass: true comment: false coverage: status: patch: true # we want github annotations ansible-compat-25.1.4/docs/000077500000000000000000000000001475513774500154535ustar00rootroot00000000000000ansible-compat-25.1.4/docs/api.md000066400000000000000000000002521475513774500165450ustar00rootroot00000000000000# API ::: ansible_compat.config ::: ansible_compat.errors ::: ansible_compat.loaders ::: ansible_compat.prerun ::: ansible_compat.runtime ::: ansible_compat.schema ansible-compat-25.1.4/docs/images/000077500000000000000000000000001475513774500167205ustar00rootroot00000000000000ansible-compat-25.1.4/docs/images/favicon.ico000066400000000000000000000360561475513774500210530ustar00rootroot00000000000000 h6  (ž00 h&Æ(  •0”–”‘Ø“‘ø“‘ø”‘Ø”–•0ŽŽ ”’˜“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý”’˜ŽŽ ŽŽ “’¿“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¿ŽŽ ”’˜“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’˜•0“‘ý“‘ÿ“‘ÿÔÓ›ÿÅÄxÿ“‘ÿ“‘ÿ“‘ÿ”’ÿÈÇ~ÿââ¼ÿ“‘ÿ“‘ÿ“‘ý•0”–“‘ÿ“‘ÿ“‘ÿ±¯Gÿòòáÿ”’ÿ“‘ÿ˜– ÿÜÜ­ÿÿÿÿÿÍÌŠÿ“‘ÿ“‘ÿ“‘ÿ”–”‘Ø“‘ÿ“‘ÿ“‘ÿ”’ÿïïÚÿ³±Kÿ¢ $ÿììÓÿÕÔÿùùòÿ¡Ÿ"ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ø“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿÃÂrÿêêÏÿóóãÿº¸\ÿ½¼dÿßß´ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿš™ÿûûõÿ²°Iÿ“‘ÿéèËÿ²±Kÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø”‘Ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÖÕžÿÈÇÿ©§4ÿððÜÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ø”–“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ©§4ÿòòáÿÕÕžÿÄÃuÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”–•0“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿèçÉÿúúõÿ›™ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý•0”’˜“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿµ´RÿÌˇÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’˜ŽŽ “’¿“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¿ŽŽ ŽŽ ”’˜“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý”’˜ŽŽ •0”–”‘Ø“‘ø“‘ø”‘Ø”–•0( @ ’’”‘X”‘™”‘È“‘ê’ù’ù“‘ꔑȔ‘™”‘X’’‘‘%’‘›“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ó’‘›‘‘%™™ “‘‰“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘‰™™ ••$“‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï••$••0“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••0••$“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••$™™ “‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï™™ “‘‰“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‰‘‘%“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÃÂrÿééÍÿÛÛ¬ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ½¼dÿòñàÿ¿¾jÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø‘‘%”‘›“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ§¥/ÿþþþÿÿÿþÿ¨§3ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ–”ÿÒÑ•ÿÿÿþÿÿÿÿÿÙÙ§ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘›ˆˆ“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿåäÂÿÿÿÿÿÓÒ˜ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›ÿääÀÿÿÿÿÿÿÿÿÿÿÿÿÿ²°Jÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘ò’’”‘X“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¹·Yÿÿÿÿÿøøðÿ˜– ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ©§4ÿòòàÿÿÿÿÿÿÿÿÿÿÿÿÿïïÛÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘X”‘™“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ–”ÿõôæÿÿÿÿÿ½»dÿ“‘ÿ“‘ÿ“‘ÿº¹\ÿûúõÿÿÿÿÿááºÿîîØÿÿÿÿÿÄÃtÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘™”‘È“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿËÊ„ÿÿÿÿÿèçÈÿ“‘ÿ•“ÿÎÍŒÿþþþÿþþýÿÌˈÿ¨¦2ÿþþþÿúúõÿ›™ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘È“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ žÿýýúÿþþþÿ¯­Cÿáà¸ÿÿÿÿÿùùñÿ¶µTÿ“‘ÿÑÑ”ÿÿÿÿÿÖÕŸÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ê’ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÝܰÿÿÿÿÿýýûÿÿÿÿÿíí×ÿ¥£*ÿ“‘ÿ˜– ÿøøïÿÿÿþÿ©¨6ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’ù’ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ°¯FÿÿÿÿÿÿÿÿÿÝܯÿ™—ÿ“‘ÿ“‘ÿ½¼dÿÿÿÿÿèçÉÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’ù“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿîîØÿÿÿÿÿ¾¼eÿ“‘ÿ“‘ÿ“‘ÿèèÊÿÿÿÿÿ»º`ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ꔑȓ‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÃÁqÿÿÿÿÿèèÊÿ“‘ÿ“‘ÿ©§4ÿÿÿþÿööêÿ—• ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘È”‘™“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›™ÿúúôÿþþþÿ¨¦3ÿ“‘ÿÔÓ›ÿÿÿÿÿÍÌŠÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘™”‘X“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÕÔœÿÿÿÿÿÓÒ˜ÿ™—ÿùùòÿýýûÿ¢ $ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Xˆˆ“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ©§4ÿþþþÿøøðÿÅÄxÿÿÿÿÿßßµÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ó’’”‘›“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿççÇÿÿÿÿÿüüùÿÿÿÿÿ²±Kÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘›‘‘%“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ»¹^ÿÿÿÿÿÿÿÿÿððÜÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø‘‘%“‘‰“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ—• ÿööêÿÿÿÿÿÄÃuÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‰™™ ”‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿµ´RÿÔÓšÿ™—ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï™™ ••$“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••$••0“‘ç“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ç••0••$“‘Ï“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘Ï••$™™ “‘‰“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘‰™™ ‘‘%”‘›“‘ó“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ó”‘›‘‘%ˆˆ”‘X”‘™”‘È“‘ê’ù’ù“‘ꔑȔ‘™”‘Xˆˆ(0` €€’’#•‘H“‘‚“’¯”‘Ö’‘ê“‘ø“‘ø’‘ꔑ֓’¯“‘‚•‘H’’#€€™™’’1”‘r“’¯“‘á“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü“‘á“’¯”‘r’’1™™— “‘—“â“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“â“‘—— ÿÿ’’“‘m“‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘í“‘m’’ÿÿ““;“’´’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“’´““;€€•’`“‘å“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“‘å•’`€€’’•“j“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•“j’’€€•“j’‘ë“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ë•“j€€•’`“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•’`ÿÿ““;“‘å“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘å““;ÿÿ’’“’´“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“’´’’“‘m’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“‘m— “‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ®­AÿÚÚ«ÿÛÛ­ÿÖÖ ÿ¨¦1ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ—• ÿ²±JÿææÅÿÛÛ¬ÿ ž ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘í— ™™“‘—“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ£¢&ÿññÞÿÿÿÿÿýýúÿÉÇÿ•“ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ™˜ÿÈÇ~ÿ÷÷íÿÿÿÿÿþþþÿÂÁqÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘—™™’’1“â“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ—• ÿÕÔœÿÿÿþÿÿÿÿÿååÂÿ›ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›™ÿÛÛ«ÿûûöÿÿÿÿÿÿÿÿÿÿÿþÿº¹^ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“â’’1€€”‘r“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ·µUÿùùðÿÿÿÿÿüû÷ÿ¯­Bÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿ¨§2ÿèèÊÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿÿïïÚÿ¡Ÿ ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø”‘r€€“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›ÿêêÎÿÿÿÿÿÿÿÿÿÙØ§ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿ·¶VÿìëÒÿþþþÿÿÿÿÿÿÿÿÿÿÿÿÿþþýÿÑДÿ—• ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯’’#“‘á“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÅÄvÿÿÿÿÿÿÿÿÿúúõÿ¢ #ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ–• ÿÉÉ€ÿøøîÿÿÿÿÿÿÿÿÿÿÿþÿÿÿÿÿÿÿÿÿø÷îÿ³²Mÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘á’’#•‘H“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ›™ÿ÷öìÿÿÿÿÿÿÿÿÿÌˈÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿžœÿÓÑ–ÿýüùÿÿÿÿÿýýûÿìëÓÿáá¹ÿÿÿÿÿÿÿÿÿèèÊÿ™˜ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü•‘H“‘‚“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÎÍ‹ÿÿÿþÿÿÿÿÿííÔÿžÿ“‘ÿ“‘ÿ“‘ÿª¨6ÿßß´ÿýýûÿÿÿÿÿûû÷ÿ××¢ÿª©8ÿòòáÿÿÿÿÿÿÿÿÿ¿¾iÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‚“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ§¥0ÿ÷öëÿÿÿÿÿúùòÿ¹¸Zÿ“‘ÿ”’ÿ´³OÿïïÚÿÿÿþÿÿÿÿÿýýúÿÇÆ{ÿš˜ÿ¿¾hÿÿÿþÿÿÿÿÿóóäÿ™—ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯”‘Ö“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ™˜ÿÜÛ¬ÿÿÿþÿÿÿþÿÖÕžÿœšÿ¼»aÿ÷÷ëÿÿÿÿÿþþýÿððÝÿ¶´Rÿ“‘ÿ˜—ÿèèÈÿÿÿÿÿþþþÿÈÇ~ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ö“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿ¾¼eÿûúôÿÿÿÿÿòòáÿÚÙ¨ÿûûöÿÿÿÿÿÿÿþÿââ¼ÿ­«>ÿ”’ÿ“‘ÿ±°Hÿ÷÷ìÿÿÿÿÿóóãÿ¥¤,ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ê“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¡Ÿ!ÿððÜÿÿÿÿÿÿÿþÿþþþÿÿÿÿÿýýûÿÒÑ–ÿ›ÿ“‘ÿ“‘ÿ–”ÿÎÍÿþþýÿÿÿÿÿØ×£ÿ˜– ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿÐÏÿÿÿÿÿÿÿÿÿÿÿÿÿ÷öìÿÅÅyÿ–• ÿ“‘ÿ“‘ÿ“‘ÿŸžÿëëÑÿÿÿÿÿúúòÿº¸\ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“‘ê“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¤¢)ÿúúòÿÿÿÿÿÿÿÿÿÑÐ’ÿ–”ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¸·ZÿýýúÿÿÿÿÿììÓÿžÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ꔑ֓‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿØØ¤ÿÿÿÿÿÿÿÿÿââ»ÿ–”ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿååÃÿÿÿÿÿÿÿÿÿÉÈ€ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”‘Ö“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ­¬>ÿúúôÿÿÿÿÿõõèÿ¬ª;ÿ“‘ÿ“‘ÿ“‘ÿ«ª;ÿþþýÿÿÿÿÿøøðÿžœÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯“‘‚“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿœšÿâá»ÿÿÿÿÿýýúÿÈÇÿ•“ÿ“‘ÿ•“ÿÙØ¥ÿÿÿÿÿÿÿÿÿÒÑ•ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘‚•‘H“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿÄÃuÿüüùÿÿÿÿÿåäÂÿœ›ÿ“‘ÿ¦¤.ÿòòáÿÿÿÿÿøøïÿ©¨5ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü•‘H’’#“‘á“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ§¥0ÿóòâÿÿÿÿÿûû÷ÿ®­Bÿ”’ÿÃÁqÿüüøÿÿÿÿÿÞݱÿš˜ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘á’’#“’¯“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ•“ÿÙØ¥ÿÿÿÿÿÿÿÿÿÙØ§ÿ›™ÿàß¶ÿÿÿÿÿûûöÿÀ¾kÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“’¯€€”‘r“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ«©:ÿþþüÿÿÿÿÿúúõÿ¹·Zÿùùñÿÿÿÿÿññßÿ¢¡%ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø”‘r€€’’1“â“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ”’ÿãã¿ÿÿÿÿÿÿÿÿÿø÷îÿÿÿÿÿÿÿÿÿÓÒ˜ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“â’’1™™“‘—“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ¶µTÿüüøÿÿÿÿÿÿÿÿÿÿÿÿÿûûõÿ§¥.ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘—™™— “‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿžÿéèËÿÿÿÿÿÿÿÿÿÿÿÿÿÜÛ«ÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘í— “‘m’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ•“ÿËÊ…ÿþþüÿÿÿÿÿûûöÿ¯®Dÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“‘m’’“’´“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ«ª:ÿòòáÿþþþÿàà·ÿ›šÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“’´’’ÿÿ““;“‘å“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ•“ÿ«ª:ÿ¿¾jÿ¤¢)ÿ”’ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘å““;ÿÿ•’`“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•’`€€•“j’‘ë“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ë•“j€€’’•“j“‘ñ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ñ•“j’’€€“‘a“‘å“‘ý“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ý“‘å•’`€€““;“’´’‘ù“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ’‘ù“’´““;ÿÿ’’“‘m“‘í“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘í“‘m’’ÿÿ— “‘—“â“‘ø“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ø“â“‘—— ™™’’1”‘r“’¯“‘á“‘ü“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ÿ“‘ü“‘á“’¯”‘r’’1™™€€’’#•‘H“‘‚“’¯”‘Ö“‘ê“‘ø“‘ø“‘ꔑ֓’¯“‘‚•‘H’’#€€ansible-compat-25.1.4/docs/images/logo.png000066400000000000000000000253611475513774500203750ustar00rootroot00000000000000‰PNG  IHDR\r¨f´iTXtXML:com.adobe.xmp devtools-logos-all _‘[>iCCPsRGB IEC61966-2.1(‘u‘Ï+DQÇ?3CÄøÅÂbš†Õ£ÄF %Mc”ÁfæÍ/5oæõÞH“­²U”Øøµà/`«¬•"R²eMlÐsž™šIæÜû½çœî=ì¡´¢U} frzpÂïš/¸jž±ÑH nÚ#Š¡ÓT´;‰»é±jUŽû×êcqC[­ðˆ¢é9áIáéÕœfñ¶p›’ŠÄ„O…½º\PøÖÒ£~±8Yà/‹õPp ìÍ®dGËXI骰¼š^QŠ÷±^âŒgæfeuËìÄ È~\L1΃ô3,~|ôÊŽ ù}¿ù3d%W¯‘Gg™$)rxE]‘êqY¢Çe¤É[ýÿÛW#1à+Twú¡úÉ4ߺ f ¾7MóóÐ4¿Àñ™R~ö†ÞEß,iž}hZ‡³Ë’Ýó èxÐ"zäWrÈ´'ðz ah½†ºÅBÏŠçßChM¾ê v÷ [â›–~=ÎgÓP6:© pHYs.#.#x¥?v IDATxœíÝyxUÕ½ðñoFSHÊ”@H VDPÁðTœ(£ZE«"Ø}÷½ß¶÷í;]Û·ƒ÷>ûîÛVA­b«LbÕ: ±V™Â<d&€„@’÷H2œsöÚ{ísÎïó<çyrÖþgýÎÚk¯õ[ ˆèfÙ‰@o È:†ù8æë °؃iT{ýGÞIЀ‘e§ãtòƯþ@[MQU[q’A×iœÔ“ƒ$€ ±ìdàJàZ`;z–ΰ"P7JØ ”+5˜Æ­Q‰$èæ á£j_×sqhkNK%Àr ¡—$¿YvÎ7z]‡ tÑ“>Gp’Áœ[‡­ÅI~°ì,à6.vúhÎû¥ ',ÞÄ4jŽ'æIðŠe·î¾ ŒõuªÅÀà5Lã¬æxb’$•œûùë€û‰Äî½¼ßÊ…8É`¹Ì¨# @Ë.Àù¦¿è£9šX·xxÓØ¢;˜h' R– LÆù¶¦9šxõ Ψ`¦qTw0ÑH@¸,»ðcà Us4ÂQ ¼ü ÓØ®;˜h" T–]ü˜ŠLèU5ððKLc£î`¢$€ÖXö`àgÀxÝ¡ˆÕ‹€'0Ïtd’šcÙÃq:þ-ºC®¼ ü¦±Rw A$  >g•ÞhœŽ£Þ`„bO“Õ†I¨cÙ׿FfôcÝ'À1åº I–ݧã߯;á«98‰àî@tŠß`ÙIÀtà@ºæh„'pn÷žÆ4ªt£C|&Ë ügß½k€˜ÆjÝø-¾€ewÁ™šN¼ýÙEkj€§ŸbÇuã—øèÎ&pîõ/Ó¶ÃÀ€9ñ°é(ö€e—†ëED•8·ëtâ¥ØMÎ3ýÇ'‘5û"2•À÷€ßÇêÚØLNÝgoéEÄ„…ÀñXé8ö€e_ ÌúêEÄ”À$Lcî@TŠ C~á½JàûÀïbå– 6€egà ùeÇžðÃ+Àwbá– ú€eÁòçêEÄ•ÀdLãSݸ½ ÀòÀoÍшøtç–à¿¢õ– :€e·þˆSGÝ^Ä4Îé$\Ñ—,»#ð*ξ}!‚â}`<¦qJw ሮ`ÙÝp*¼ ÖŠMXÜM[Œ£'Xv.ð.§;!Z° ¸ÓØ©;PDGu[˾çxiéü"èò€•µŸÙÀ ~°ì€eÈš"zdËj?»ì`Ùãqˆì¤;!ÂÔ XŒeß­;–7Xö£À îP„ˆP`!–ýˆî@šÌ`Ù?Æ©ÎÌø„]"0«ö38Á{ `ÙÓ§t‡!„¦c³tQ_°€sÏ¿ù汩˜€i¼ª;:ÁIÎŒébäž_ĶsÀLãCÝ@P€óÌt2Û/âC9p=¦ñ¹î@ô'Ëî‹S€Qžó‹xr¸V÷ŠA½÷ÚÎÚþÅHçñ' x·¶h£/8»úÞB–÷Šø•¼U۴Гœýü¯"Gs q%°¨¶OøÎÿàTòyÙÏ/D›€?Öö _éÀ ×"Ȧßõû¢þf˾ gÆ_jø q©ó8O|+4ê_pJw¯Eª÷ Ñ’À`Lã„ó'8÷6 ‘ºýÒ5-]:“Ó©m“’ØU~Š-'N°«¼\whñî`¢•†“½¾@­Ç‘λtæ'C†05¿?ɉ—N½»gÿç“Õ¬(+ÓÀ9Ór&ð;¯/äýÀ9¸cr\W L8gnERBëÿôÿúñ*žXÕç^D³JœùOÏ"Lò²ñÚSzß.óô:"$ß½|OIb`T¯ž$ìßïm`¢)IÀMÜrë ¼ó¶gç x÷йï9¥7ò2ÒyrÄð°‡|?6”Áݺz“hU_`¶—ë¼\0˜àaû" ¿>œÔ¤ð| À“#F¨H„j"0ëƽI–]ü»'m‹°gfrW¿Èb7öìÁµÙÙ #aúÚ>¥œú`Ù‰ÀI¿À˜9Èýgçqmˆˆ¥¿÷âVÀ‹ÀýÀpÚ蘚ʷ¸ngB^?º¥¥)ˆHDhNßRJm°ì.Ào”¶)\¹@Rܯ¼NMJâáâ" ~ƒewVÙ êÀÈ#¿@™Q¢nè>½¸(¤õÂ3]qú˜2ꀳÑgº²ö„k7ôèAQfeíõîØ‘q¹9ÊÚy¬¶¯)¡&XvÎÄŸ|=ˆw*&…+ 8‚Jñ©LGªûJvûö®ý5ç¿õîMÿŒ å튰 UÑû`ÙÝ_¸E¨ôHQ!)Mlôq+Ä/TUñ ù5® ¡Hrb"{Öþ´h—ì×FRÑŒ œ¾çŠ»`Ù×ãÁ³IáÎ}séÑ¡½gíg´iÃ=ùžµ/Bö–}›"OΪ$yæ@~ Ñå6 0~ãf… ›Àh`¨‹÷  èÜ™Q={z~+ºvåšl9Ï%†£"}³›ð3ï™1È»{ÿÆf*\d$\‰¸/F–,{8pc¤ÞhŸ’Âúv½‰ýóè*û‚`$–}m$oŒt ßþtoA>é©þmÂl“”ÄÃE…¾]O´(¢>~°ìÁÀ-‘\LxKÇÄÜô’âKŒ OÝŠe_î›"È· ÏÎæòËü߇ÕGöIØ}3¼`Ù…Hyï@ÒùXN&c<–Ö$P¸#€Ÿ„ùóÂ]ÓÒ˜×OÛõoîÓ›¼ Y  „ÙGCO–ݸ'Ì€„.*Œ¨à§* ¨­; \¹ËyX8#€‡ùó‰ &û‚"çD¡fµ–dò/€†vïÎn®ëAòôº —üÚÎòrÞݽÇU»9:q[NŽ«6„2-öáæ€e·Ã9šXŒŠoÿ#g+XT;ùרSëe20†LIJ›-ÚÐÒàÀý ¡T—¶m™œßßu;sJK9W;ùר;v²ÿôWíéÓ›~é²? :áôå&µ”dø@¤­‚uÿõ'ÿ«ª©áÙî&ýú—(  …¾Üt°ì,`ŒWшȨÚtóáþýl>~¼ÅŸ™½~U5î¦e@`Œ©íÓ—hnp[ ¿'4Û§}Ó;¹n§¥oÿ:ûNŸæÍ»\]§KÛ¶LQp»"\Knmê7šëä#½‹EDJÅÄÚÑŠ ^ÙÖôä_cO+˜ ôâ€R‘&ûô¥ À9d â:ãÂ9:qkŽû%s65?ùר;»÷°«¼ÜÕõ®ìÖ¡Ý»»jC(1ª©DšÙÞÇ#¡jqM(Ãÿ:Õ55Ìv¹2dß.ÙÐToÿ€i“”ÄC…îëý/Û€ÒV&ÿ{nÃFÎWW»ºîäüþd*X¶,\»¤oKˆªàçÛ¿ÎÁ¯¾âµ;\]·MRߑ󂠕àl À€Q1„>VQÁÂmÛ"z¯Šm²? F6ÞÔx0èâ_<¢5WtUSdcNéæ'ÿ[²w[m Wn§NÜÒGö•i– 4ø6iœdø0ªÖÔ?íâ[¼†Èn{ür™ €}¼qá€d(*´¹ü@ø“ýq㦈GuÆôî­d!“p¥A¿˜,;¸ÁïhDó¦ @;KiU|{‡³€¨9‰ r~€~7Ôöu áàJd÷_`$3TMþmlò¯1“ª63‰ˆu×ýOýÑùâ£{õ"?#Ãu;sJ7Sárè^gùl:PƦcî&ÿ›SZÊW.¸jcH·n\ÕÝ}I31IA6½¤ˆ$%ëþÕ~ûœ8wŽy[¶ºnçñAƒD#"Ô(Xv: Gº@Jb"¹nçXE Mþ5¦b›°ìÐ*ËîGòíãóú‘Õ®ëv^T8ùר'¿ä³Ã‡]µÑ6)‰‡äü @@à¨Ú:«âÙKTLÊþ­$Mqf&×}ã®ÛYQVæúq]kþ¼y3§*+]µÑ7½cn*"" hÔ=úóöÛàôùó¼´e‹ëvd2PIAÒ15•û¬û?~îóÌÒ‡BÅÊÀ±}z“ÛIöhP›œýÁRºU³ûÐ15Õu;/––z6ùר?ᓃ_ºj#1!ƒäü úcÙ‰‰@o@žÇh¦j“ŒÃÿúT<|¨°Pöø/ 蕈 ÿµ»¡GŠ2Ý×aYYVƆ£ÞNþ56wËVNœ;çªLElj°$" €´SõèïiŸ¿ýÎ^¸ÀœÒR×íÈd Y‰È`­²Û·ç®~}]·ãçä_c*Ö\Õ½›’#ÏEX:JÐì‘¢BRÝŸÂö'Wþµfã±c,?pÀu;²KÐw’tJNLäÑb53à~Oþ5¦â‘àäþýé"ûü$ @§;ûæÒ£C{×í¬,+cýÑ£ "ŠÜ+Û¶säl…«6Ò’“•€"B& @'U+ÿvœ,G÷ŠúsUU<¿i“ëv+)Öþg‰#’tй3£zöTÒÖ} xíöÛHW°È§×¯§ÆeýÒÓ+çøE€.ªW¿Ýž›Ëê)“”¬'ˆÔ¶'Y²wŸëvTŒD«$èÐ>%…ª¿×ퟑÁ'“&2©žò¶C¥b2ðÖœ>äÈþ?HÐáÞ‚|φëíSR˜wËX~;b¸’²báúËŽüê+Wm8çÈþHÐÁ!î÷_Á{wß©äTáp\¨®æ¹ ]·óPa!md€×$øíÚìl.¿ì2_®5²gOÖL™ì{ÞYë7P]ãn:ð²4ÙàƒŽ‰@ÝQÄUëþCÕ«c–Oøß)ò¯þÞîS§xg÷×íÌ”cļÖÑýT²®iiLÈëçûuÛ$%ñÌèQÌ5’TŸ†Õ*¶ ËêΕ²?ÀS‰ÀiÝAÄ‹‡‹ }ë€My¤¸ˆeßOÏÞúÞܹ‹½§Ü´fJ±/JNéŽ"$&$0=3Ûòº³fêdnèÑÃÓëTÕÔðÌ÷û¦æçËþïHð˸Üút Æ|k·´4Þ¿ûN¾wÅ7=½Î36r¡ºÚUiÉÉ<(û¼" À/A›ÐJNLäÉëFðòØ1´KNný 8pæ ݹËu;ËþHðC^F:7´þý”üþ¬š<‘~ééž´ÿ”‚ÉÀ¼ŒtÆôï/ÊIðÃŒ’’@ƒ•dfòé”IÜ–“£¼íwwïagy¹ëvd€'$x--9™i(ikú’¥¬óhßF›6¼qÇ8þ×°¡J“U jЕܖœ9”" ÀkSòÕT¹Ùq²œÙë7pÍü…¼²m»‚È.•üÏaCyýöqd´i£¬Ýç6lä¼ËÉÀÄ„ ÀS”# Àkª†®ÏlØ@ pæüy&¾õ6ÿúñ*×{ï›3.7‡Õ“'Qœ™©¤½CgÏòêö®Ûy¸¨Hö¨% ÀKC»wWRéö|u5Ïm¼Xm§xbõ§ÜùÆ›”»< ³9y鬚4AÙz|Û„/Kk«u«s ’à%UßþoìÜÉ—Ml±}cçN†Í[À–'”\§±ö))Ì;†'¯A²ËÊÅK÷ícóñã®c’É@¥$x¥‹ÂÓnZšD+=~œ¡sçóÖ®ÝJ®Õ”ï]ñMÞ»ëNº¹ÜZ¬b2ðê¬,wëêºÔ&€ƒº£ˆETrÞÝ®òrÞÛ³·ÅŸ9YYÉíoü•_~ºÆõõšscϬ™:™¡Ý»GÜÆó›Ô\´EUQì`"°Yw±&u‡}>»aSH{ë«kjøéÊ™ôö;œ9^ɵëÙ¡Ë&Œç‘⢈Þ¬¢‚[·¹Žãž‚|¥O)âØæD`à® »h`lŸ>ôMw_ÓîBu5Ïm ¯ºÎ‚­Û¸vÁBv)X|Ó”6IIÌ5’Ù£GE4#¯b2Pö(qØ›ˆiTz•‹Qª&ªÞܵ›g΄ý¾/ŽeÈÜù,Ùç¾Bos.*dÙ„ð·¯,+S²˜ifÀWWF­˜FuÝÔ®Ü(’Ó©·æ¨©k?ËÅ:ú£ŒùËëXÿø\I,MÚ½;k§NæÆžám-~ZÁ( Èû+¢Äfp ‚|ý?½ÇJŠITPwï©Ó®Ëj]¨®æŸ—-gÚ{ï{vph×´4Þ¿û.¾?øŠßóbéf%ó2èŠ$ÕÚ$%);ÛîÙ]Ö¬ó¦R®_ø ûO‡;Ф„~;b8sÇŽ¡}JJ«?_^YÉ\G™©ÆB’ ÚÄþyJÊpWÕÔð¬‚ÒÚõ­þòCæÎceY™Òv뛜ߟU“&—ÑúÖb“A©²¥$¨¦jHúö®Ýì;­¾TãÁ¯¾b䢿0Ûãċ33ùtÊdÆåæ´øsŸ:ÄšC‡\_OwÅ(V/˜ÆIdA+WtíÊ5ÙYJÚR±b®9•UU<ºd)3—~àz‡^sÒSSyýöqü難µ8Sÿ´‚?g×´4&æÉþ€0•aåpq2 pEÕ£¿ý§ÏðÖ®]JÚjÉÖ­gô¢¿pèìYOÚO~>ô*Þ¸£ù­Å/mÞ¢d3“ßg-Ä€¯ûº$ÒSS¹§ _I[ÏmÜH•¢É¿Ö,?p€!sç)Š7ç¶œ>2‰’&¶Ÿ9ž?•ºÿØ]“Å7»úsÚRŒh2”j$&L+¨¤°fuM Ï(žükÍÞS§¹ná"þ¼Ù»üß/=U“'25ÿÒ$©â€Ç RÒNœh2¬ÔHÔK@Ýðñž=ì9åÿæÌ³.pßâ÷øÁG+<}´KN楱7ó×_×`kñGŽòq™ûé'Ù–uÿQ?¬A¶‡mt¯^ägd(iËËÉ¿P<¹ö3nyíuŽUx·5䟿y9ïßÝpk±ŠÊÁíÖ^ŒqåÀÚºÿ¹˜Lã𡆀¢šªoÿ2E5ôÝzoÏ^®š7Ÿõ¸¡GÖN°,gkñü-[9~îœëvg ’ý!ø°¶¯ GK}&ªõìÐ;úæ*ië7¹>EG•'˹fþBm÷¦ø(@íYö­ñL/.¦¢ªŠ6¹Ÿ‚ÊÏÈà¦Þ½DÓôñÆ `‰D½é%E$)X÷_Ìöyò¯5§ÏŸg›oó?>þij⣩II<5êFž=ŠçëÕêÖµÙÙ\~™ì¨gImßn ¹ã^d"°÷( cjª’¶foˆ®oÿÆ>?r„«æÍg©‡ÅGÝQ@MöéæÀ›@tܘúLU½ÿÊÌÔíÈÙ nö¸øh¤î)È']Q²ŽrUÀ[MýFÓ À4‹= (*ÝУE™]”´õ¦R*=*Ô鷺⣾÷7ÎèÏÔ>%…ir~ÀâÚ>}‰–N|œãQ0QKåÁ”º7þxáùM›¸~á"ÏŠFb¦ì€úrK à5dwàײ۷çî~}•´õÁ¾ýlõpQNÿòKÏ‹†#?#ƒÑ½âz@9ðzs¿Ù|0³ÀŠJ’âòˆì:Ñ>ù×?І#Î'Ôöå&µö‰–Û 91‘G‹Õ”Ÿ>ZQÁ+Û¼Ûe~ Õí}séÕ1¼#ÌbH‹}¸µ°þÚOÝ‘›Kí•´5gSi &ʼö‡uë¹éUÂÙ—çì>jéZNÎÆÕÅ¿\&ÿÜX¶ß)>ºöÐam1Äéù/6ÞüÓX(7µqtį̂ž=•´µüÀJWÒV´Ù{ê4#¾ÂK›/Yê‹îíÚñ­~q·? Õ¾Ûz0-À'*¢‰F3©:eRL—³.pïâw=->Ú•#¹(° ÓhõÆP§µãr2°}J  T³äø¹s,غMI[ÑîɵŸqëko(©ŽáÙÙ ºìÒó bTH}6Ô0ð®$L@Ý«p)é‹¥¥žÑÞݳ‡«æz[|´)qR2¬˜Ê†–Lã(ð‚‹€¢’¬üóÖö“'=/>ÚØ½ùtáó(÷|mŸmU8+[~EmR¹ô㲃l8*¥›RW|ô竼+>Z_û”e‡¸TN_ Iè À4¶/EPTR¹zL¾ý[Vüß¿¯æ®¿¾É)‹Ö¹&+¦ÀK˜FÈÛLÃ]ÛúË0>*uUXRêĹsÌÛÚêd¬^ß±“aó½-> pMv¶§íkTC˜}4¼`Ea½' ©\4òçÍ[8àÒYA³é˜÷ÅGvéìYÛš-Â4Â:`!’Ý-OD𞨑˜ÀôuÏþg):ý6žÔýÕšµ­ÿpü~òࣰûfø À4Öo‡ý¾(1.7‡>Š–øû—_òÅ‘˜ý°yªº¦†ÿ¾b%SÞY¬¼øèŠÁت¬Ø[˜Ægá¾)Òý­1; ˜©¨äÈäŸ ó¶leø‚…ìVxlúJÇ‘PD}2²`+€"zo€õKOçæ>½•´U^YÉÜ-2ù§Â?aÈÜyJŠn\NÙ™àT*VèLcš›T$€î8ç ªé=BD Cíÿ+»uãÀ™3¬;r”MÇŽÅòÌ@¦qÈM#jæ¼,{&ð;%m !B1ÓøƒÛFÔÔ¹†§`Ÿ-Dìø˜¥¢!u‡¦XöU83’r‹Þ©†bŸªhLÕLc5ÎH@á§Tu~P™?bîA«q§)£6˜Æ1à‡JÛBÔù!¦¡´®¼ê8ÕHWxЮñì#<¨ÎíÍ„e—àÌTª)©+D|«†`ëT7ìÅ€Ú@¿çIÛBÄŸñ¢óƒW Àñ{`¡‡í ®ü4ÇÛgö–¬úzz!bÓ`0¦qÒ« x9 6ðIÄá©BB¸T Lò²óƒ× À4Öß÷ü:BÄ–ïÕöOyŸ¿#ʉ ¡ÈBœ94Ïù·nß²3pær}»¦ÑÇóûþúü€iœÀ™ByB4í<0Ù¯Î~& vÃ|½¦Ñãû*7ú„Âßà°—5\Wˆ {ø/¿/ªgï¾e·ÞFk¹¾Áò>0Ó8ç÷…õï°ìŽÀRàJm1¡ß§À(LCÝÑGaÐ[½Ç²»áìÌÓ‡zlF¸-ìé†þò]– ¬²t‡"„×b;u¡c°!ç/`,P®;!|RŒÑÝù! À4>î|ŸÂgç€Û1/tAI¦ñ!pP­;!0RçÆžHDWj·MÞ†Áñ2p›®-½nD_j 'Ü üRcPèS À½˜FTž}=“€Í±ì!À|¤Ú°ðלƒ;¢zN*:Gõ9E¯èEÄ…8¥»£ºóC,$¨+9>ø.r ™ðN%ÎgÌó#»üý·Yö•8·r ©Pi0ÓX«;•bcPŸ3,ŒM.ÔY€3ä©Î±˜ þ©ÄrK ܨÇçÓzü{·Yv ζâáºCQå#`&¦±Nw ^ŠÍ@}Î?àõÀƒÀaÍшà; L®õÎñ0¨Ï²;OovÑšà)àg˜ÆqÝÁø%>;e_…s[ § pN癉i¬Öˆßbÿ )Î?ô0`&pBs4BŸ8Ÿ«ã±óC¼ŽêsŽ'û5ð€îP„¯^~m»÷T“PDz¯~ƒ32±ëà‡˜ÆrÝA|Þ4Åù@\ƒSŽ|©æh„zKqþm¯‘ΑŒšcÙ×?nÕŠpå-à Lc¥î@‚H@k,{0ðS`<ò÷-jpv‡þÓøLw0A&èPYv!ð`*AÂ4ª5Ç3$xŲӀ;qFc‰ÃpU‹q:ýë˜ÆYÍñÄ$I~°ì,œõ£j_Ùz ¬2œC_–oa5Çó$øÍ²€|.&ƒ‘@¦Ö˜ô9ŠÓÙ—Ô¾¶`5zCŠ/’tsæ J¸˜n vçʹØé×Éý¼^’‚Ʋ“qŠš ê½¢í¶¡ Ø\ïµX‹i\Еh@@´°ìN4Lu¯| ­¦¨*€-4ìèÎË4Ê5Å$ Ú9·½p’AÎíC8/€Sa¾âtô½2„nÿ½<¥•ù |IEND®B`‚ansible-compat-25.1.4/docs/images/logo.svg000066400000000000000000000020271475513774500204020ustar00rootroot00000000000000 ansible-compat-25.1.4/docs/index.md000066400000000000000000000005501475513774500171040ustar00rootroot00000000000000# Examples ## Using Ansible runtime ```python title="example.py" {!../test/test_runtime_example.py!} ``` ## Access to Ansible configuration As you may not want to parse `ansible-config dump` yourself, you can make use of a simple python class that facilitates access to it, using python data types. ```python {!../test/test_configuration_example.py!} ``` ansible-compat-25.1.4/examples/000077500000000000000000000000001475513774500163415ustar00rootroot00000000000000ansible-compat-25.1.4/examples/reqs_broken/000077500000000000000000000000001475513774500206535ustar00rootroot00000000000000ansible-compat-25.1.4/examples/reqs_broken/requirements.yml000066400000000000000000000001751475513774500241240ustar00rootroot00000000000000roles: [] collections: [] integration_tests_dependencies: [] # <-- invalid key unit_tests_dependencies: [] # <-- invalid key ansible-compat-25.1.4/examples/reqs_v1/000077500000000000000000000000001475513774500177215ustar00rootroot00000000000000ansible-compat-25.1.4/examples/reqs_v1/requirements.yml000066400000000000000000000002631475513774500231700ustar00rootroot00000000000000# v1 requirements test file # ansible-galaxy role install -r requirements.yml -p roles - src: git+https://github.com/geerlingguy/ansible-role-docker.git name: geerlingguy.mysql ansible-compat-25.1.4/examples/reqs_v2/000077500000000000000000000000001475513774500177225ustar00rootroot00000000000000ansible-compat-25.1.4/examples/reqs_v2/community-molecule-0.1.0.tar.gz000066400000000000000000000166771475513774500253530ustar00rootroot00000000000000‹·—€_ÿcommunity-molecule-0.1.0.tarí=ksÛ¶²ý¬_*wj;G¤øÔ«uÎ8¶œhŽ_c+ÍÉM2 H‚kŠÔáÃŽÆõ¿»øì¤N›:§·B;‘„Çb±X,v øxïdt8¼«¿¤qôÝŸ“4HËâŸÖ?uÛÔ¾ÓÍ®¥éºÙét¿ÃOÝúŽhß=BÊÓŒ&„|•AÖ÷I7 Òtã0dnÄÑ$ˆü¸9 Kš³tA]Pg>Ï£ [6[EfÏch™‡Lä^±$(X ©ºª‰\šg³8I!÷-ü$Í‹8 "ráÐ$b”lOƒl–;í4;M¨ôž7LõD7çýƒã¡:÷ÄŒN+p+(@K³ šVP<–ºI°È$b{Q8!#Dz©F_£ŒQJ ˜$]07ð—,Â| %ª@ \¥¬Äáx4®:”…?±F”‡¡DdÁ"EnÀû›[9ÈEœYœ,»Y–-ÒA»-ˆ¢ÕÛT ¬Tx¦ír6ÔÕ ðb7Ÿ3„mÙ÷,ž³²¯ÐG¦9ÀïÔ–0iÐDBMæ4 |˜»‚l7u6; /¸ˆ8øÙrÁ xežåÎ.Ó|>) Ò5ìÎJ‘Ì‚B£ß3]¦ý®Ëz†C;Ô1Y·ï2‡z¶Ó× ßê÷MÖéwuJû¾ÝõmS7ËëÚ®éy¾D"Næ4xz1Ž*ãö¯²þ+Êþy}ü†üïXzgMþkÝŽ¶‘ÿ%ÿqòô†Ë³bÝ©R¨–ëÍ ™µºÞ Asw±U%õõÂÌZgû/÷N^ N_¨Iš­w\.ôÏ­ôû–z:¾ ¿|æQÇ겞ϺZ§ßëuܾÜfû¶fY=ß3úžæØF×ïØ^ߡݮivX¯ù0Üåöð˜ä’]¶2K¾]ÏíBªOÜ8aêbùµ¦Î²<Ïa¬«w{¶fxVG³=jQ¿c±ŽßuMj2¦»FG×-­giÌ빆ß7AÀ,w;ôS7‹Ó,ýZ8{}­C™ëttdíwº^ß÷Y¯gèn×Ñ,Gë˜~×ðü¾×g¦í˜&óÍŽ£»š×ïùzç8¯êC_o@¦ãêšÃÇrz¶å›u©Õwz}‹ù½vÂn¯ß×ùj¡=§géºß·ü¶Õyð2¡K'Ž/y¡ÈNÛW4 <š1u9¿åüžÑsuê‚Bá÷]Çì2_7}ðÍ.¥ 6€–at;¶Ö§S»ïX´Ó÷Sƒv¤œº¤ó0 ¢¯&ÝN×…¥å3ÝÑ(µÌ.`i1½gz¾Ñ³:žíÚ–œjömæÀ”k®aX}æûÐŽvü‡ Æ£Ñþðäbøµ°îÔw“ö˜¦÷û=J]£Ïú–nPÃí0˜•š (ëƒôèšêyÀ¹]Ûôzšþ@¬ç,£É¢Ø_;É£,˜Uæô|½§wûŽcø¶eu©Ý¡nß‘IûºÞåÝï dÒÃÕzŒÑŽóîv ­mh¦ÿ@reñG5ˆ‚¯Æš~Ïù¢÷AÂÛžïõtD=Õ{®ÆL×Ò©iŽæè /MÓ×\RTƒé‡õbŒ>kwF£) ãé£J£ª×vù•/î¯E=Ø*û¾ôêz~×é‚ cã*°§ÖÖó:ZÏÔ™–ëûfÏê[ ˜Ý±L§ùåãˆ#?øºƒ0`g÷Àë1ÓœmÃð=Ê,0L[l§Oaß„ f×ÖN_·|×7uØøa¥÷AoøòAø ¢éü­˜¡ì¿­^2¶øZ„úi®eÅ뻺«[}Ÿ¾åÂFÞñ¾a] «Î,. L ÷|6yt.û¡ÛÓ"A£>2åO`Të<£çÛ[‡8 ÒÛ.í›6L¼íê&ŒšÙ7zÆc™ }ù݈ Û·z÷o² î¯ùKZí_/­]߯þ×l]/죫èÿ5ôîÆþŒô:Câ0’/P1öˆ³$„Pà€ Sª ŠÄ$Š3¼ #Ù,HÉœF9 Ãå÷Æcä³¾@éÿkß¹í„±ÓžÓ j{±[ÛSdH‹“ O—(zháÿY|M²˜ä)»ÓFcÆÜKçùðáþ]êÃ8K‰ÈÍ^%oâœD è=ˆ¶„f$d4Åa3€˜YÈ@ÀBpÃl0{Ñõ_Øòf¸º¶ýÉõßW×?ìðÆwÄÞ¬ÿG›éËù/™T¹Íüƒù_÷å=Æþo릵6ÿ¶ié›ýÿ1R³y÷DôóBªBYÃOâ9™Lü<Ë6™`¾ˆ“ŒP'àb”‚EWü¨§*ü P ¦,ÛnŸ ÷_ 'ÓŸ'xÀØl‘“8b;8Ô]m䯋åö/ ü»=ˆÙ¨ZJE.e™¨‚s‡õ¶áWënsÙk-Wˆ¤Vó'Û«Ec6_àä¼mö€ù43 &@”XVÓçM8lµ–+ØHt,™yPM©Ï&aL½í; v$+fñ}ŒxÈ5`Éq)y³w|¤S";H³d»êÄËç £´f| ÞË~¿Åw%õ?Ovù.Y£Ô½Uq bmÉÈ+„¿ø§æõ“•1ÝîGüh•ëg ró ëÝ~ØN]ÊÕÑÉÆõ©sýc;v~anM‚\IF¡IWc ÃxE®gÍj;e¡¿SQ¢’ DŽ ÝÀÈZ55¬¨)q¼YJ³T_`¨å÷Öj¹¯@ ùm­\lP,¾¬•Þ#Û¡ê=¹U»Û¿ªKäo•xôßÜÇoùÁÞ_÷ÿ¶½±ÿ#…±KCd‚Íjý[¦2’èÛ­«k­ŸÿhV§³Yÿ‘j&Ý~³Ýh¼ýþíþèýöïðFeÀãë´½?j;Ô›25½šþ“]n³»ÈÓÙÎNEÑAÿW×x­áGÑôÅÿäAÂ5Ê´ÑŽ=°BŠ‚;.¶u/)žzq·–¨#‹žíSí4F‘æè“î„Fã§ï…¼ !ý¸ìÂð©æG„¾GûF:ðð¤AŸ‘W£·œ0*¤Òáõe‹,ãœÌé²<–cº¾ˆ¸ÖÔ‚òRÛ&,s¹{°Eмƒˆ›V@VVPûÐíîWèðàÝUgŒ*Q”g|¯Ò š ï[[8R9~’Æ`æ•ÜKè¢ò^L¥„cNñç|$ÄCK—¦P 5ŽùƒÍ$xªú¶`Jå*îÄ©’)ëk¨¾¥4k#óL¦yà±v­ý$Gˆê,›‡;™91 %«í:@á<ãTˆïâ±ì ƉÉýˆ` Ÿ#éY4E.âlóù*33zw±eK‹‰ÛÀÃÖÀ}xR¬¯¢ÁÍ9ÃSXnS²TP©û¶<áý#ò$TÄ!ôJ<ÄŽì:¦Þœ.$Ÿò«D4T‹EÀñHDNÊ5Bµøb’žãEŒ‹;ƒiË SHà4K`¾¦K`)I"”V„, ?q¾˜&`µÅi=q—.ÌyɯÇ8…5N–qï,¯kî2æp ßÐ ìyS½u‹¤¹;Co#ÊrDqšÄù‚Ì€5•BíÑù>‘S˜òµ·rIR…ÀËÝlB„æY¹"@JxÄ Ù/&A~‘âá_í`}ßà+aµk¹€îŒè¸É…óúðáU1!bû ¢èøåy‚q’Dd¹§ê_ &îðºNC–úµ ;Oõ¾ê‚8¦Á4Rhâ΂+Þƒkÿ3ßµ;^¯k1ÍèêºÆLÛcTcšéê?Þ®×阶o[†ÆÙç&Eß êÀú5Ò/'¹¸üÖ¶ì¾jøuA Z×ñhüGÄXK#îw~3.¥ ŽÿoŠÿÐ ÍÚœÿ?Îùÿ}—#×ÿÓµ;kóoâ•ðý÷I!Ü8 •#ˆ)E•m⃸ƒŸ†)ƒÜšð®k…Ü•APÑôjÈ*蛈 Ïî¶P…ùð›×l éJö˜“O+gù<Hóæ†lm‘_?ïö¶ùp¤Oüë!Qü",Jßý×㉘Üÿò^Ñ7ôÿÚÆºÿW×Lc³þ#¡]y°Ê=æÐ ̈ùx ¸øc£‘ñï$Ô-ðæô£Â‡H­"‚ê QãˆÙæ’e_ت°œœ ÃPRêpá¬Ê”ÌËlVÏÎ’<›-E'4 ãkæ)°¡ž5 o· ”mµÈ—dø%޶ðäòɽaË^°”µ)k‹£ÐÙtKç‚pÎcUsiâTÈüµü¿RQü–ç?†fÚwδÿ÷QÒ~¼X&Át–ˆNVÞfi4ÎX2ÖG+ vt8K2M(žO·„ë­¿…ÅÒž¶%k7E³ÜAo'šK”`„ÖävûÙ5M„‘¦iìüêÁšË%v_óB¶hîðNÀX eä=)Џ«  ˜—I /÷Ð EqÌÙwÛàÐSyŸ ÅñÄ2/ðñ“ña-rlëÆ8¦Âìcèqä;/ܽÓS4eaˆÀ›µÂNº€b¤ ô/IÄû½“tu$@"?O"èR8<½HÆ{݃"TÄQÌ ¯`$ü7¨t¡ÕL §tË™â P•^(€¾¨fU¥3*®…_ÈKkÃI°{X)Qí1Ö’;Ö†©Bÿ/‡äâôpüzï|HFäìüôçÑÁð€4÷.àw³E^Æ/O_ Ô8ß;¿!§‡dïä ù×èä E†ÿ>;^\Ós2:>; !ot²ôê`tò‚<‡v'§cr4K€ŽO v(A†ìxxŽ^¼ñÞóÑÑhü¦EGã„y@÷ÈÙÞùx´ÿêh½:?;½B÷ödtrx½ ‡'cz…<2ü~‹—{GG¼«½W€ý9ÇoÿôìÍùèÅË1yyzt0„ÌçCÀlïùÑPtƒÚ?Ú·ÈÁÞñÞ‹!ou PÎy5‰Ýë—CžýíÁÿûãÑé cÿôd|?[0ÊóqÙôõèbØ"{ç£ $Èáù)€GrB‹SÚ $5Y™¨‚¿_] +\†{Gë×+«›#áÿÿ‰ß<„ø/¼ÿ_6þŸG›ÿÚËßÂþÓîœÿæÆÿóHþEiHg:‘næÙz¶k¨}U×¶6{Àÿï$Ÿîø†öŸÒ~ýþ¿¾YÿÞ¼õHE òè«ZƒEW⺅ðÏ‘¸„{ü‹Ác«*êm¶†tÇ7,H…KrÒ:Qt"E ‘ÝH˳Ó2»-³w{'C¹ñÐ)t‹×ƨ­eVÖñmAº—§'0-¯Ïíç@ÉýÓƒ¡ìK”¾:yþêðpx><Ù“á¿Á8¿ØUB¢(n|µ[†7)JcŽGŠOY®à3¿°p>ÎÃ"ÆSNk&Œ¨BÔÄà‡Ýz¹h–ò¨(Ç{#ŠŒ#d¯ÞðØ¢Sžð¢ËùCN×T­Ÿà›¾&¨WêÁâ2x9ôDùÇi|;÷kÃÀQÐ`À»r?­g»6c³„–(pø«{Æ"öæò.+JIþå:ˆ¼ø:müH¤¬I' ›Èâ.Æ%Õ˜’(x[—øDÁ…©,h6#[+L‹îéVQô´=™,–.ù0™ð|~!i멺X¾ußCŽ‡Ò”ÉÓ—o¢_ŒP9Ù¹'7ŸV×äŸ6‰‚‚Ž$s¢$þywóî–üãÇ­F1,IIëÕR¦"^R D‚û_ý1e°P¨B¹Y!Ûá%P”W¸M¡À*º"h Ãà›qýEêrSã"u¿-™ðNîr”!1<Ô±’‡°Q0ìXÆV¤mJµwppz6¾Ø-w•yÑLY€ÔÀŠàK†ƒP¸a<ûA'ÏV— /VáwS¢ªˆ(2<Ë›a¼%PˆKmq%/ÍÈÆ"º¶¸¡ûß’¤ãëL®ÕâÆ6Ôýß<QÁ@olbÌ„¬”®låPªÀB&X´ —GRnÜŠ_«$ྦྷ°]Œl4®gAÆP›±«/!“ŠksIàJÿàª{ߨ=zÛÚy¥Ü‹…\iÂ#úu}Âw±ï±£Âür‰GàâI4ìsÍê/^Õé%ë5ש°I­R1ª¢·Rb‘ Ô³]]Õ{ª^‰®¹<»‹Ç{lu§A Õ°Q/]ö(ºŠ/ñ ÷Y}T„†‡×›ÅX×/ÇÇGB£å73I©ö-–&ñrV¼3ûÄßÚÂM KÊé<|Vð™· |`Pã$!’ˆ,+Ê:ªE`ZÝÙ†Tµ†ÅÚ0+ ‚<]¾Ð^×&°š1dòNÆ (—‡òºÊpˆÜQ`%ÁÒ.ó=Ò¬KUþ$þƒ²Í²ÏþLÍ6—qOÈ>ÁP‘æð‰z‡–âˆ\Êb§,¹bµÁšæ)º.ôO”ðýH¶d>‚› ¸Ý¢L=ƒÏí¤Žds‡´ùµîuL$<ȲxÏ ÖF]µˆ¾„Züi‹m¿ù.:X9«¢W '¹i’Gzòï¯íö»› î»ÛxY~\ ;iqîúP0LJ•WH`rrq²„m¥@ ¶ 5w¶îl¼éÚzPB°‚†ï |~i à\["þ›®­”>½Y]UÙmu×*É‚5Í{­RUvû[\>©°¯ñ%Æ,WŒ)ø±¦)­U™®ª·~ÒÑôý ]K³w¢B—+³¸]žò;øbëDÐ Ãsµ³á™ei¨-€BØ-œê±Ô?ÏFP˜Ãèp3 óa…n(?ðè$„ȇºää/‰@â44Ûy]óãÛûÄ)JÁh++•…êy• 5Oqæ CBÜ+¡k.tŒ»E…‚¯w°—5Uè™ ¶°õ®ÈéyÒ¤†:2'‹AHñ: È 6Í‘‰»(†ª¡|Y,óh¨’ÂM#»^½Æ?€ wÉšWïß0ÕHü/:ÿÓMÝÜœÿ=òü¯>Gý˜çúzü—¥›øÏGIò&ZŠj³ü­åíÜG=ÿ‡Õn®ßÿ°,s³þuþ¹‘ŠNÒI&¿U]½Û¸§¶<Л€î!Ùš¬“ЧûÈê¦Rò[¦4{te7ð¼pR¾_\:ÁœF.Ø_q2vFÄ®'òÆ÷„ú>?ÄïVˆ‡$½‰Ï(>W™6øeaДñY$ º¶†8NRyÕ±hÊåà$ÍÁlH–wjñgŸå-ä Y-­]ˆQà¿9ý%N&r”òÞÇ1æ‘ý2× ¢»õ0o­÷«âéåjÕç2»¨MÚä c2!ç^•­aNæòóÍ’$ÀAYB«ls‡†¢Á¹È.k“mtqž†ËZ?; Ð%O‚l \ð±„q!3É¡Ìä#̧õ:Ï«ŸXzÅ×ÑDœ\ÊÿÂ,2YrZ—’ñËb¼_¨}ß{ü* áÚÔËÜÆß]þWâ›ëÿ¦©éýÿ›Í?ÿ $¨ÿß™ËØüýßÇIŸøÃ-©ÿ™ºÞ½£ÿu6óÿHöß"–Ú ~ܱT²H{𗥕 uC¾šy5 W¶Ú‘þÙ^&/.¨*$ð¤¨ü¹.¸«³íàc"åI6mhjÏù$hÞ öÆo4Íé´TJåAÚCÇW[ ¼·U\®¸ÏùS¸°ÈSb_A½WA&Y)Í„  )ü\—_‚\)ŸC Oá×Pá°-J¹ëWq–SâÄc‰ÚçåZ¢Nv•*øÞ·’ΘtIï©Ë/8ñeï|ø=c…¿›*ÔæßIãìþ‡¶Åqñ*QMµ§ZŸ¢©hPv^½T3©Ç.Ôß”U~º·ÄÈ[Õ%_¥¢R<`¯ˆÅûªpÞâ1.÷ƒgˆx"Œç×¶dŒ?À‡÷Ý<]c@]5ìOs`ѨD„_¢wê6ýºœ‡;ÿSbP ^ÅJ-Þö}í¥õ,YVñ„ߪûÈG^Š€•{`©¦j ÿz»jÎÃkºL'IIë¯(ÀƒÄÒ­½SP½µÌÏÚð…e~°[<µÆ_A¢éJoP™‰£¹ipÅÐ äÍâØ<ø <>¢ 8,^a "üÛ^NCAnµÖè4*CAõŠÑ€ œ8ekc–óò¬Æ+ÃP°¬]&õ•«/Y9Oøkð¶{È“2nW>H V †A®¬ˆòõ‡Cm“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6i“6é¥ÿ9R‘ ansible-compat-25.1.4/examples/reqs_v2/requirements.yml000066400000000000000000000014561475513774500231760ustar00rootroot00000000000000--- # For local install run: # ansible-galaxy role install -r requirements.yml -p roles # ansible-galaxy collection install -r requirements.yml -p collections # # Current test file avoids using galaxy server on purpose, for resiliency roles: - src: git+https://github.com/geerlingguy/ansible-role-docker.git name: geerlingguy.mysql collections: - name: community-molecule-0.1.0.tar.gz # Also needed for testing purposes as this should trigger addition of --pre # argument as this is required due to # https://github.com/ansible/ansible-lint/issues/3686 # https://github.com/ansible/ansible/issues/79109 - name: https://github.com/ansible-collections/amazon.aws.git type: git version: main - name: https://github.com/ansible-collections/community.aws.git type: git version: main ansible-compat-25.1.4/mkdocs.yml000066400000000000000000000044751475513774500165400ustar00rootroot00000000000000--- site_name: Ansible Compat Library site_url: https://ansible.readthedocs.io/projects/compat/ repo_url: https://github.com/ansible/ansible-compat edit_uri: blob/main/docs/ copyright: Copyright © 2023 Red Hat, Inc. docs_dir: docs # strict: true watch: - mkdocs.yml - src - docs theme: name: ansible features: - content.code.copy - content.action.edit - navigation.expand - navigation.sections - navigation.instant - navigation.indexes - navigation.tracking - toc.integrate extra: social: - icon: fontawesome/brands/github-alt link: https://github.com/ansible/ansible-compat name: GitHub - icon: fontawesome/brands/discourse link: https://forum.ansible.com/c/project/7 name: Ansible forum nav: - examples: index.md - api: api.md plugins: - autorefs - search - material/social - material/tags - mkdocstrings: handlers: python: inventories: - https://docs.python.org/3/objects.inv options: # heading_level: 2 docstring_style: google docstring_options: ignore_init_summary: yes show_submodules: no docstring_section_style: list members_order: alphabetical show_category_heading: no # cannot merge init into class due to parse error... # merge_init_into_class: yes # separate_signature: yes show_root_heading: yes show_signature_annotations: yes separate_signature: yes # show_bases: false # options: # show_root_heading: true # docstring_style: sphinx markdown_extensions: - markdown_include.include: base_path: docs - admonition - def_list - footnotes - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite - pymdownx.superfences - pymdownx.magiclink: repo_url_shortener: true repo_url_shorthand: true social_url_shorthand: true social_url_shortener: true user: facelessuser repo: pymdown-extensions normalize_issue_symbols: true - pymdownx.tabbed: alternate_style: true - toc: toc_depth: 2 permalink: true - pymdownx.superfences: custom_fences: - name: mermaid class: mermaid format: "" ansible-compat-25.1.4/package-lock.json000066400000000000000000000012541475513774500177410ustar00rootroot00000000000000{ "name": "ansible-compat", "lockfileVersion": 3, "requires": true, "packages": { "": { "devDependencies": { "prettier": "^3.4.2" } }, "node_modules/prettier": { "version": "3.4.2", "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.4.2.tgz", "integrity": "sha512-e9MewbtFo+Fevyuxn/4rrcDAaq0IYxPGLvObpQjiZBMAzB9IGmzlnG9RZy3FFas+eBMu2vA0CszMeduow5dIuQ==", "dev": true, "license": "MIT", "bin": { "prettier": "bin/prettier.cjs" }, "engines": { "node": ">=14" }, "funding": { "url": "https://github.com/prettier/prettier?sponsor=1" } } } } ansible-compat-25.1.4/package.json000066400000000000000000000000701475513774500170060ustar00rootroot00000000000000{ "devDependencies": { "prettier": "^3.4.2" } } ansible-compat-25.1.4/pyproject.toml000066400000000000000000000407131475513774500174440ustar00rootroot00000000000000[build-system] build-backend = "setuptools.build_meta" requires = [ "setuptools >= 65.3.0", # required by pyproject+setuptools_scm integration and editable installs "setuptools_scm[toml] >= 7.0.5" # required for "no-local-version" scheme ] [project] authors = [{"email" = "ssbarnea@redhat.com", "name" = "Sorin Sbarnea"}] classifiers = [ "Development Status :: 5 - Production/Stable", "Environment :: Console", "Intended Audience :: Developers", "Intended Audience :: Information Technology", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python", "Topic :: System :: Systems Administration", "Topic :: Software Development :: Bug Tracking", "Topic :: Software Development :: Quality Assurance", "Topic :: Software Development :: Testing", "Topic :: Utilities" ] description = "Ansible compatibility goodies" dynamic = ["version", "dependencies", "optional-dependencies"] keywords = ["ansible"] license = {text = "MIT"} maintainers = [{"email" = "ssbarnea@redhat.com", "name" = "Sorin Sbarnea"}] name = "ansible-compat" readme = "README.md" # https://peps.python.org/pep-0621/#readme requires-python = ">=3.10" [project.urls] changelog = "https://github.com/ansible/ansible-compat/releases" documentation = "https://ansible.readthedocs.io/projects/compat/" homepage = "https://github.com/ansible/ansible-compat" repository = "https://github.com/ansible/ansible-compat" [tool.coverage.report] exclude_also = ["pragma: no cover", "if TYPE_CHECKING:"] fail_under = 100 show_missing = true skip_covered = true [tool.coverage.run] # Do not use branch until bug is fixes: # https://github.com/nedbat/coveragepy/issues/605 branch = false concurrency = ["multiprocessing", "thread"] parallel = true source = ["src"] [tool.isort] profile = "black" [tool.mypy] color_output = true disallow_any_generics = true disallow_untyped_calls = true disallow_untyped_defs = true error_summary = true # disallow_any_unimported = True # ; warn_redundant_casts = True # warn_return_any = True # warn_unused_configs = True exclude = "test/local-content" python_version = "3.10" [[tool.mypy.overrides]] ignore_missing_imports = true module = "ansible.*" [[tool.mypy.overrides]] ignore_errors = true # generated by setuptools-scm, can be missing during linting module = "ansible_compat._version" [tool.pydoclint] allow-init-docstring = true arg-type-hints-in-docstring = false baseline = ".config/pydoclint-baseline.txt" check-return-types = false check-yield-types = false exclude = '\.ansible|\.cache|\.git|\.tox|build|ansible_collections|out|venv' should-document-private-class-attributes = true show-filenames-in-every-violation-message = true skip-checking-short-docstrings = false style = "google" [tool.pylint.BASIC] good-names = [ "f", # filename "i", "j", "k", "ns", # namespace "ex", "Run", "_" ] [tool.pylint.IMPORTS] preferred-modules = ["unittest:pytest"] [tool.pylint.MASTER] # _version.py is generated by setuptools-scm. ignore-paths = "^src/ansible_compat/_version.py" [tool.pylint."MESSAGES CONTROL"] disable = [ "unknown-option-value", # https://gist.github.com/cidrblock/ec3412bacfeb34dbc2d334c1d53bef83 "C0103", # invalid-name / ruff N815 "C0105", # typevar-name-incorrect-variance / ruff PLC0105 "C0112", # empty-docstring / ruff D419 "C0113", # unneeded-not / ruff SIM208 "C0114", # missing-module-docstring / ruff D100 "C0115", # missing-class-docstring / ruff D101 "C0116", # missing-function-docstring / ruff D103 "C0121", # singleton-comparison / ruff PLC0121 "C0123", # unidiomatic-typecheck / ruff E721 "C0131", # typevar-double-variance / ruff PLC0131 "C0132", # typevar-name-mismatch / ruff PLC0132 "C0198", # bad-docstring-quotes / ruff Q002 "C0199", # docstring-first-line-empty / ruff D210 "C0201", # consider-iterating-dictionary / ruff SIM118 "C0202", # bad-classmethod-argument / ruff PLC0202 "C0205", # single-string-used-for-slots / ruff PLC0205 "C0208", # use-sequence-for-iteration / ruff PLC0208 "C0301", # line-too-long / ruff E501 "C0303", # trailing-whitespace / ruff W291 "C0304", # missing-final-newline / ruff W292 "C0321", # multiple-statements / ruff PLC0321 "C0410", # multiple-imports / ruff E401 "C0411", # wrong-import-order / ruff I001 "C0412", # ungrouped-imports / ruff I001 "C0413", # wrong-import-position / ruff E402 "C0414", # useless-import-alias / ruff PLC0414 "C0415", # import-outside-toplevel / ruff PLC0415 "C0501", # consider-using-any-or-all / ruff PLC0501 "C1901", # compare-to-empty-string / ruff PLC1901 "C2201", # misplaced-comparison-constant / ruff SIM300 "C2401", # non-ascii-name / ruff PLC2401 "C2403", # non-ascii-module-import / ruff PLC2403 "C2701", # import-private-name / ruff PLC2701 "C2801", # unnecessary-dunder-call / ruff PLC2801 "C3001", # unnecessary-lambda-assignment / ruff E731 "C3002", # unnecessary-direct-lambda-call / ruff PLC3002 "E0001", # syntax-error / ruff E999 "E0100", # init-is-generator / ruff PLE0100 "E0101", # return-in-init / ruff PLE0101 "E0102", # function-redefined / ruff F811 "E0103", # not-in-loop / ruff PLE0103 "E0104", # return-outside-function / ruff F706 "E0105", # yield-outside-function / ruff F704 "E0107", # nonexistent-operator / ruff B002 "E0112", # too-many-star-expressions / ruff F622 "E0115", # nonlocal-and-global / ruff PLE0115 "E0116", # continue-in-finally / ruff PLE0116 "E0117", # nonlocal-without-binding / ruff PLE0117 "E0118", # used-prior-global-declaration / ruff PLE0118 "E0211", # no-method-argument / ruff N805 "E0213", # no-self-argument / ruff N805 "E0237", # assigning-non-slot / ruff PLE0237 "E0241", # duplicate-bases / ruff PLE0241 "E0302", # unexpected-special-method-signature / ruff PLE0302 "E0303", # invalid-length-returned / ruff PLE0303 "E0304", # invalid-bool-returned / ruff PLE0304 "E0305", # invalid-index-returned / ruff PLE0305 "E0308", # invalid-bytes-returned / ruff PLE0308 "E0309", # invalid-hash-returned / ruff PLE0309 "E0402", # relative-beyond-top-level / ruff TID252 "E0602", # undefined-variable / ruff F821 "E0603", # undefined-all-variable / ruff F822 "E0604", # invalid-all-object / ruff PLE0604 "E0605", # invalid-all-format / ruff PLE0605 "E0643", # potential-index-error / ruff PLE0643 "E0704", # misplaced-bare-raise / ruff PLE0704 "E0711", # notimplemented-raised / ruff F901 "E1132", # repeated-keyword / ruff PLE1132 "E1142", # await-outside-async / ruff PLE1142 "E1205", # logging-too-many-args / ruff PLE1205 "E1206", # logging-too-few-args / ruff PLE1206 "E1300", # bad-format-character / ruff PLE1300 "E1301", # truncated-format-string / ruff F501 "E1302", # mixed-format-string / ruff F506 "E1303", # format-needs-mapping / ruff F502 "E1304", # missing-format-string-key / ruff F524 "E1305", # too-many-format-args / ruff F522 "E1306", # too-few-format-args / ruff F524 "E1307", # bad-string-format-type / ruff PLE1307 "E1310", # bad-str-strip-call / ruff PLE1310 "E1519", # singledispatch-method / ruff PLE1519 "E1520", # singledispatchmethod-function / ruff PLE5120 "E1700", # yield-inside-async-function / ruff PLE1700 "E2502", # bidirectional-unicode / ruff PLE2502 "E2510", # invalid-character-backspace / ruff PLE2510 "E2512", # invalid-character-sub / ruff PLE2512 "E2513", # invalid-character-esc / ruff PLE2513 "E2514", # invalid-character-nul / ruff PLE2514 "E2515", # invalid-character-zero-width-space / ruff PLE2515 "E4703", # modified-iterating-set / ruff PLE4703 "R0123", # literal-comparison / ruff F632 "R0124", # comparison-with-itself / ruff PLR0124 "R0133", # comparison-of-constants / ruff PLR0133 "R0202", # no-classmethod-decorator / ruff PLR0202 "R0203", # no-staticmethod-decorator / ruff PLR0203 "R0205", # useless-object-inheritance / ruff UP004 "R0206", # property-with-parameters / ruff PLR0206 "R0904", # too-many-public-methods / ruff PLR0904 "R0911", # too-many-return-statements / ruff PLR0911 "R0912", # too-many-branches / ruff PLR0912 "R0913", # too-many-arguments / ruff PLR0913 "R0914", # too-many-locals / ruff PLR0914 "R0915", # too-many-statements / ruff PLR0915 "R0916", # too-many-boolean-expressions / ruff PLR0916 "R0917", # too-many-positional-arguments / ruff PLR0917 "R1260", # too-complex / ruff C901 "R1701", # consider-merging-isinstance / ruff PLR1701 "R1702", # too-many-nested-blocks / ruff PLR1702 "R1703", # simplifiable-if-statement / ruff SIM108 "R1704", # redefined-argument-from-local / ruff PLR1704 "R1705", # no-else-return / ruff RET505 "R1706", # consider-using-ternary / ruff PLR1706 "R1707", # trailing-comma-tuple / ruff COM818 "R1710", # inconsistent-return-statements / ruff PLR1710 "R1711", # useless-return / ruff PLR1711 "R1714", # consider-using-in / ruff PLR1714 "R1715", # consider-using-get / ruff SIM401 "R1717", # consider-using-dict-comprehension / ruff C402 "R1718", # consider-using-set-comprehension / ruff C401 "R1719", # simplifiable-if-expression / ruff PLR1719 "R1720", # no-else-raise / ruff RET506 "R1721", # unnecessary-comprehension / ruff C416 "R1722", # consider-using-sys-exit / ruff PLR1722 "R1723", # no-else-break / ruff RET508 "R1724", # no-else-continue / ruff RET507 "R1725", # super-with-arguments / ruff UP008 "R1728", # consider-using-generator / ruff C417 "R1729", # use-a-generator / ruff C419 "R1730", # consider-using-min-builtin / ruff PLR1730 "R1731", # consider-using-max-builtin / ruff PLR1730 "R1732", # consider-using-with / ruff SIM115 "R1733", # unnecessary-dict-index-lookup / ruff PLR1733 "R1734", # use-list-literal / ruff C405 "R1735", # use-dict-literal / ruff C406 "R1736", # unnecessary-list-index-lookup / ruff PLR1736 "R2004", # magic-value-comparison / ruff PLR2004 "R2044", # empty-comment / ruff PLR2044 "R5501", # else-if-used / ruff PLR5501 "R6002", # consider-using-alias / ruff UP006 "R6003", # consider-alternative-union-syntax / ruff UP007 "R6104", # consider-using-augmented-assign / ruff PLR6104 "R6201", # use-set-for-membership / ruff PLR6201 "R6301", # no-self-use / ruff PLR6301 "W0102", # dangerous-default-value / ruff B006 "W0104", # pointless-statement / ruff B018 "W0106", # expression-not-assigned / ruff B018 "W0107", # unnecessary-pass / ruff PIE790 "W0108", # unnecessary-lambda / ruff PLW0108 "W0109", # duplicate-key / ruff F601 "W0120", # useless-else-on-loop / ruff PLW0120 "W0122", # exec-used / ruff S102 "W0123", # eval-used / ruff PGH001 "W0127", # self-assigning-variable / ruff PLW0127 "W0129", # assert-on-string-literal / ruff PLW0129 "W0130", # duplicate-value / ruff B033 "W0131", # named-expr-without-context / ruff PLW0131 "W0133", # pointless-exception-statement / ruff PLW0133 "W0150", # lost-exception / ruff B012 "W0160", # consider-ternary-expression / ruff SIM108 "W0177", # nan-comparison / ruff PLW0117 "W0199", # assert-on-tuple / ruff F631 "W0211", # bad-staticmethod-argument / ruff PLW0211 "W0212", # protected-access / ruff SLF001 "W0245", # super-without-brackets / ruff PLW0245 "W0301", # unnecessary-semicolon / ruff E703 "W0401", # wildcard-import / ruff F403 "W0404", # reimported / ruff F811 "W0406", # import-self / ruff PLW0406 "W0410", # misplaced-future / ruff F404 "W0511", # fixme / ruff PLW0511 "W0602", # global-variable-not-assigned / ruff PLW0602 "W0603", # global-statement / ruff PLW0603 "W0604", # global-at-module-level / ruff PLW0604 "W0611", # unused-import / ruff F401 "W0612", # unused-variable / ruff F841 "W0613", # unused-argument / ruff ARG001 "W0622", # redefined-builtin / ruff A001 "W0640", # cell-var-from-loop / ruff B023 "W0702", # bare-except / ruff E722 "W0705", # duplicate-except / ruff B014 "W0706", # try-except-raise / ruff TRY302 "W0707", # raise-missing-from / ruff TRY200 "W0711", # binary-op-exception / ruff PLW0711 "W0718", # broad-exception-caught / ruff PLW0718 "W0719", # broad-exception-raised / ruff TRY002 "W1113", # keyword-arg-before-vararg / ruff B026 "W1201", # logging-not-lazy / ruff G "W1202", # logging-format-interpolation / ruff G "W1203", # logging-fstring-interpolation / ruff G "W1300", # bad-format-string-key / ruff PLW1300 "W1301", # unused-format-string-key / ruff F504 "W1302", # bad-format-string / ruff PLW1302 "W1303", # missing-format-argument-key / ruff F524 "W1304", # unused-format-string-argument / ruff F507 "W1305", # format-combined-specification / ruff F525 "W1308", # duplicate-string-formatting-argument / ruff PLW1308 "W1309", # f-string-without-interpolation / ruff F541 "W1310", # format-string-without-interpolation / ruff F541 "W1401", # anomalous-backslash-in-string / ruff W605 "W1404", # implicit-str-concat / ruff ISC001 "W1405", # inconsistent-quotes / ruff Q000 "W1406", # redundant-u-string-prefix / ruff UP025 "W1501", # bad-open-mode / ruff PLW1501 "W1508", # invalid-envvar-default / ruff PLW1508 "W1509", # subprocess-popen-preexec-fn / ruff PLW1509 "W1510", # subprocess-run-check / ruff PLW1510 "W1514", # unspecified-encoding / ruff PLW1514 "W1515", # forgotten-debug-statement / ruff T100 "W1518", # method-cache-max-size-none / ruff B019 "W1641", # eq-without-hash / ruff PLW1641 "W2101", # useless-with-lock / ruff PLW2101 "W2402", # non-ascii-file-name / ruff N999 "W2901", # redefined-loop-name / ruff PLW2901 "W3201", # bad-dunder-name / ruff PLW3201 "W3301", # nested-min-max / ruff PLW3301 "duplicate-code", "fixme", "too-few-public-methods", "unsubscriptable-object", # On purpose disabled as we rely on black "line-too-long", "protected-access", # covered by ruff SLF001 # local imports do not work well with pre-commit hook "import-error", # already covered by ruff which is faster "too-many-arguments", # PLR0913 "raise-missing-from", # Temporary disable duplicate detection we remove old code from prerun "duplicate-code" ] [tool.pytest.ini_options] addopts = "-p no:pytest_cov --durations=10 --durations-min=1.0 --failed-first --instafail" # ensure we treat warnings as error filterwarnings = [ "error", # py312 ansible-core # https://github.com/ansible/ansible/issues/81906 "ignore:'importlib.abc.TraversableResources' is deprecated and slated for removal in Python 3.14:DeprecationWarning" ] testpaths = ["test"] [tool.ruff] extend-include = ["src/ansible_compat/_version.py"] preview = true target-version = "py310" [tool.ruff.format] docstring-code-format = true [tool.ruff.lint] external = [ "DOC" # pydoclint ] ignore = [ # Disabled on purpose: "CPY001", "D203", # incompatible with D211 "D211", "D213", # incompatible with D212 "E501", # we use black "RET504", # Unnecessary variable assignment before `return` statement # Temporary disabled during adoption: "S607", # Starting a process with a partial executable path "PLR0912", # Bug https://github.com/charliermarsh/ruff/issues/4244 "PLR0913", # Bug https://github.com/charliermarsh/ruff/issues/4244 "RUF012", "PERF203", "DOC201", # preview "PLC0415", "DOC501" # preview ] select = ["ALL"] [tool.ruff.lint.flake8-pytest-style] parametrize-values-type = "tuple" [tool.ruff.lint.isort] known-first-party = ["ansible_compat"] known-third-party = ["packaging"] [tool.ruff.lint.per-file-ignores] "test/**/*.py" = [ "DOC402", "DOC501", "FBT001", "PLC2701", "PLR0917", "S101", "S404", "S603", # subprocess "S607", # subprocess "SLF001" ] "tools/*.py" = ["S603"] [tool.ruff.lint.pydocstyle] convention = "google" [tool.setuptools.dynamic] dependencies = {file = [".config/requirements.in"]} optional-dependencies.docs = {file = [".config/requirements-docs.in"]} optional-dependencies.test = {file = [".config/requirements-test.in"]} [tool.setuptools_scm] # To prevent accidental pick of mobile version tags such 'v6' git_describe_command = [ "git", "describe", "--dirty", "--tags", "--long", "--match", "v*.*" ] local_scheme = "no-local-version" tag_regex = "^(?Pv)?(?P\\d+[^\\+]*)(?P.*)?$" write_to = "src/ansible_compat/_version.py" [tool.tomlsort] in_place = true sort_inline_tables = true sort_table_keys = true [tool.uv.pip] annotation-style = "line" custom-compile-command = "tox run deps" no-emit-package = ["ansible-core", "pip", "resolvelib", "typing_extensions", "uv"] ansible-compat-25.1.4/src/000077500000000000000000000000001475513774500153125ustar00rootroot00000000000000ansible-compat-25.1.4/src/ansible_compat/000077500000000000000000000000001475513774500202725ustar00rootroot00000000000000ansible-compat-25.1.4/src/ansible_compat/__init__.py000066400000000000000000000003671475513774500224110ustar00rootroot00000000000000"""ansible_compat package.""" from importlib.metadata import PackageNotFoundError, version try: __version__ = version("ansible-compat") except PackageNotFoundError: # pragma: no cover __version__ = "0.1.dev1" __all__ = ["__version__"] ansible-compat-25.1.4/src/ansible_compat/config.py000066400000000000000000000524321475513774500221170ustar00rootroot00000000000000"""Store configuration options as a singleton.""" from __future__ import annotations import ast import copy import os import re import subprocess # noqa: S404 from collections import UserDict from typing import TYPE_CHECKING, Literal from packaging.version import Version from ansible_compat.constants import ANSIBLE_MIN_VERSION from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError from ansible_compat.ports import cache if TYPE_CHECKING: # pragma: no cover from pathlib import Path def parse_ansible_version(stdout: str) -> Version: """Parse output of 'ansible --version'.""" # Ansible can produce extra output before displaying version in debug mode. # ansible-core 2.11+: 'ansible [core 2.11.3]' match = re.search( r"^ansible \[(?:core|base) (?P[^\]]+)\]", stdout, re.MULTILINE, ) if match: return Version(match.group("version")) msg = f"Unable to parse ansible cli version: {stdout}\nKeep in mind that only {ANSIBLE_MIN_VERSION } or newer are supported." raise InvalidPrerequisiteError(msg) @cache def ansible_version(version: str = "") -> Version: """Return current Version object for Ansible. If version is not mentioned, it returns current version as detected. When version argument is mentioned, it return converts the version string to Version object in order to make it usable in comparisons. """ if version: return Version(version) proc = subprocess.run( # noqa: S603 ["ansible", "--version"], text=True, check=False, capture_output=True, ) if proc.returncode != 0: raise MissingAnsibleError(proc=proc) return parse_ansible_version(proc.stdout) class AnsibleConfig( UserDict[str, object], ): # pylint: disable=too-many-ancestors # noqa: DOC605 """Interface to query Ansible configuration. This should allow user to access everything provided by `ansible-config dump` without having to parse the data himself. Attributes: _aliases: action_warnings: agnostic_become_prompt: allow_world_readable_tmpfiles: ansible_connection_path: ansible_cow_acceptlist: ansible_cow_path: ansible_cow_selection: ansible_force_color: ansible_nocolor: ansible_nocows: ansible_pipelining: any_errors_fatal: become_allow_same_user: become_plugin_path: cache_plugin: cache_plugin_connection: cache_plugin_prefix: cache_plugin_timeout: callable_accept_list: callbacks_enabled: collections_on_ansible_version_mismatch: collections_paths: collections_scan_sys_path: color_changed: color_console_prompt: color_debug: color_deprecate: color_diff_add: color_diff_lines: color_diff_remove: color_error: color_highlight: color_ok: color_skip: color_unreachable: color_verbose: color_warn: command_warnings: conditional_bare_vars: connection_facts_modules: controller_python_warning: coverage_remote_output: coverage_remote_paths: default_action_plugin_path: default_allow_unsafe_lookups: default_ask_pass: default_ask_vault_pass: default_become: default_become_ask_pass: default_become_exe: default_become_flags: default_become_method: default_become_user: default_cache_plugin_path: default_callback_plugin_path: default_cliconf_plugin_path: default_collections_path: default_connection_plugin_path: default_debug: default_executable: default_fact_path: default_filter_plugin_path: default_force_handlers: default_forks: default_gather_subset: default_gather_timeout: default_gathering: default_handler_includes_static: default_hash_behaviour: default_host_list: default_httpapi_plugin_path: default_internal_poll_interval: default_inventory_plugin_path: default_jinja2_extensions: default_jinja2_native: default_keep_remote_files: default_libvirt_lxc_noseclabel: default_load_callback_plugins: default_local_tmp: default_log_filter: default_log_path: default_lookup_plugin_path: default_managed_str: default_module_args: default_module_compression: default_module_name: default_module_path: default_module_utils_path: default_netconf_plugin_path: default_no_log: default_no_target_syslog: default_null_representation: default_poll_interval: default_private_key_file: default_private_role_vars: default_remote_port: default_remote_user: default_roles_path: default_selinux_special_fs: default_stdout_callback: default_strategy: default_strategy_plugin_path: default_su: default_syslog_facility: default_task_includes_static: default_terminal_plugin_path: default_test_plugin_path: default_timeout: default_transport: default_undefined_var_behavior: default_vars_plugin_path: default_vault_encrypt_identity: default_vault_id_match: default_vault_identity: default_vault_identity_list: default_vault_password_file: default_verbosity: deprecation_warnings: devel_warning: diff_always: diff_context: display_args_to_stdout: display_skipped_hosts: doc_fragment_plugin_path: docsite_root_url: duplicate_yaml_dict_key: enable_task_debugger: error_on_missing_handler: facts_modules: galaxy_cache_dir: galaxy_display_progress: galaxy_ignore_certs: galaxy_role_skeleton: galaxy_role_skeleton_ignore: galaxy_server: galaxy_server_list: galaxy_token_path: host_key_checking: host_pattern_mismatch: inject_facts_as_vars: interpreter_python: interpreter_python_distro_map: interpreter_python_fallback: invalid_task_attribute_failed: inventory_any_unparsed_is_failed: inventory_cache_enabled: inventory_cache_plugin: inventory_cache_plugin_connection: inventory_cache_plugin_prefix: inventory_cache_timeout: inventory_enabled: inventory_export: inventory_ignore_exts: inventory_ignore_patterns: inventory_unparsed_is_failed: localhost_warning: max_file_size_for_diff: module_ignore_exts: netconf_ssh_config: network_group_modules: old_plugin_cache_clearing: paramiko_host_key_auto_add: paramiko_look_for_keys: persistent_command_timeout: persistent_connect_retry_timeout: persistent_connect_timeout: persistent_control_path_dir: playbook_dir: playbook_vars_root: plugin_filters_cfg: python_module_rlimit_nofile: retry_files_enabled: retry_files_save_path: run_vars_plugins: show_custom_stats: string_conversion_action: string_type_filters: system_warnings: tags_run: tags_skip: task_debugger_ignore_errors: task_timeout: transform_invalid_group_chars: use_persistent_connections: variable_plugins_enabled: variable_precedence: verbose_to_stderr: win_async_startup_timeout: worker_shutdown_poll_count: worker_shutdown_poll_delay: yaml_filename_extensions: """ _aliases = { "COLLECTIONS_PATH": "COLLECTIONS_PATHS", # 2.9 -> 2.10 } # Expose some attributes to enable auto-complete in editors, based on # https://docs.ansible.com/ansible/latest/reference_appendices/config.html action_warnings: bool = True agnostic_become_prompt: bool = True allow_world_readable_tmpfiles: bool = False ansible_connection_path: str | None = None ansible_cow_acceptlist: list[str] ansible_cow_path: str | None = None ansible_cow_selection: str = "default" ansible_force_color: bool = False ansible_nocolor: bool = False ansible_nocows: bool = False ansible_pipelining: bool = False any_errors_fatal: bool = False become_allow_same_user: bool = False become_plugin_path: list[str] = [ "~/.ansible/plugins/become", "/usr/share/ansible/plugins/become", ] cache_plugin: str = "memory" cache_plugin_connection: str | None = None cache_plugin_prefix: str = "ansible_facts" cache_plugin_timeout: int = 86400 callable_accept_list: list[str] = [] callbacks_enabled: list[str] = [] collections_on_ansible_version_mismatch: Literal["warning", "ignore"] = "warning" collections_paths: list[str] = [ "~/.ansible/collections", "/usr/share/ansible/collections", ] collections_scan_sys_path: bool = True color_changed: str = "yellow" color_console_prompt: str = "white" color_debug: str = "dark gray" color_deprecate: str = "purple" color_diff_add: str = "green" color_diff_lines: str = "cyan" color_diff_remove: str = "red" color_error: str = "red" color_highlight: str = "white" color_ok: str = "green" color_skip: str = "cyan" color_unreachable: str = "bright red" color_verbose: str = "blue" color_warn: str = "bright purple" command_warnings: bool = False conditional_bare_vars: bool = False connection_facts_modules: dict[str, str] controller_python_warning: bool = True coverage_remote_output: str | None coverage_remote_paths: list[str] default_action_plugin_path: list[str] = [ "~/.ansible/plugins/action", "/usr/share/ansible/plugins/action", ] default_allow_unsafe_lookups: bool = False default_ask_pass: bool = False default_ask_vault_pass: bool = False default_become: bool = False default_become_ask_pass: bool = False default_become_exe: str | None = None default_become_flags: str default_become_method: str = "sudo" default_become_user: str = "root" default_cache_plugin_path: list[str] = [ "~/.ansible/plugins/cache", "/usr/share/ansible/plugins/cache", ] default_callback_plugin_path: list[str] = [ "~/.ansible/plugins/callback", "/usr/share/ansible/plugins/callback", ] default_cliconf_plugin_path: list[str] = [ "~/.ansible/plugins/cliconf", "/usr/share/ansible/plugins/cliconf", ] default_connection_plugin_path: list[str] = [ "~/.ansible/plugins/connection", "/usr/share/ansible/plugins/connection", ] default_debug: bool = False default_executable: str = "/bin/sh" default_fact_path: str | None = None default_filter_plugin_path: list[str] = [ "~/.ansible/plugins/filter", "/usr/share/ansible/plugins/filter", ] default_force_handlers: bool = False default_forks: int = 5 default_gather_subset: list[str] = ["all"] default_gather_timeout: int = 10 default_gathering: Literal["smart", "explicit", "implicit"] = "smart" default_handler_includes_static: bool = False default_hash_behaviour: str = "replace" default_host_list: list[str] = ["/etc/ansible/hosts"] default_httpapi_plugin_path: list[str] = [ "~/.ansible/plugins/httpapi", "/usr/share/ansible/plugins/httpapi", ] default_internal_poll_interval: float = 0.001 default_inventory_plugin_path: list[str] = [ "~/.ansible/plugins/inventory", "/usr/share/ansible/plugins/inventory", ] default_jinja2_extensions: list[str] = [] default_jinja2_native: bool = False default_keep_remote_files: bool = False default_libvirt_lxc_noseclabel: bool = False default_load_callback_plugins: bool = False default_local_tmp: str = "~/.ansible/tmp" default_log_filter: list[str] = [] default_log_path: str | None = None default_lookup_plugin_path: list[str] = [ "~/.ansible/plugins/lookup", "/usr/share/ansible/plugins/lookup", ] default_managed_str: str = "Ansible managed" default_module_args: str default_module_compression: str = "ZIP_DEFLATED" default_module_name: str = "command" default_module_path: list[str] = [ "~/.ansible/plugins/modules", "/usr/share/ansible/plugins/modules", ] default_module_utils_path: list[str] = [ "~/.ansible/plugins/module_utils", "/usr/share/ansible/plugins/module_utils", ] default_netconf_plugin_path: list[str] = [ "~/.ansible/plugins/netconf", "/usr/share/ansible/plugins/netconf", ] default_no_log: bool = False default_no_target_syslog: bool = False default_null_representation: str | None = None default_poll_interval: int = 15 default_private_key_file: str | None = None default_private_role_vars: bool = False default_remote_port: str | None = None default_remote_user: str | None = None # https://docs.ansible.com/ansible/latest/reference_appendices/config.html#collections-paths default_collections_path: list[str] = [ "~/.ansible/collections", "/usr/share/ansible/collections", ] default_roles_path: list[str] = [ "~/.ansible/roles", "/usr/share/ansible/roles", "/etc/ansible/roles", ] default_selinux_special_fs: list[str] = [ "fuse", "nfs", "vboxsf", "ramfs", "9p", "vfat", ] default_stdout_callback: str = "default" default_strategy: str = "linear" default_strategy_plugin_path: list[str] = [ "~/.ansible/plugins/strategy", "/usr/share/ansible/plugins/strategy", ] default_su: bool = False default_syslog_facility: str = "LOG_USER" default_task_includes_static: bool = False default_terminal_plugin_path: list[str] = [ "~/.ansible/plugins/terminal", "/usr/share/ansible/plugins/terminal", ] default_test_plugin_path: list[str] = [ "~/.ansible/plugins/test", "/usr/share/ansible/plugins/test", ] default_timeout: int = 10 default_transport: str = "smart" default_undefined_var_behavior: bool = True default_vars_plugin_path: list[str] = [ "~/.ansible/plugins/vars", "/usr/share/ansible/plugins/vars", ] default_vault_encrypt_identity: str | None = None default_vault_id_match: bool = False default_vault_identity: str = "default" default_vault_identity_list: list[str] = [] default_vault_password_file: str | None = None default_verbosity: int = 0 deprecation_warnings: bool = False devel_warning: bool = True diff_always: bool = False diff_context: int = 3 display_args_to_stdout: bool = False display_skipped_hosts: bool = True docsite_root_url: str = "https://docs.ansible.com/ansible/" doc_fragment_plugin_path: list[str] = [ "~/.ansible/plugins/doc_fragments", "/usr/share/ansible/plugins/doc_fragments", ] duplicate_yaml_dict_key: Literal["warn", "error", "ignore"] = "warn" enable_task_debugger: bool = False error_on_missing_handler: bool = True facts_modules: list[str] = ["smart"] galaxy_cache_dir: str = "~/.ansible/galaxy_cache" galaxy_display_progress: str | None = None galaxy_ignore_certs: bool = False galaxy_role_skeleton: str | None = None galaxy_role_skeleton_ignore: list[str] = ["^.git$", "^.*/.git_keep$"] galaxy_server: str = "https://galaxy.ansible.com" galaxy_server_list: str | None = None galaxy_token_path: str = "~/.ansible/galaxy_token" # noqa: S105 host_key_checking: bool = True host_pattern_mismatch: Literal["warning", "error", "ignore"] = "warning" inject_facts_as_vars: bool = True interpreter_python: str = "auto_legacy" interpreter_python_distro_map: dict[str, str] interpreter_python_fallback: list[str] invalid_task_attribute_failed: bool = True inventory_any_unparsed_is_failed: bool = False inventory_cache_enabled: bool = False inventory_cache_plugin: str | None = None inventory_cache_plugin_connection: str | None = None inventory_cache_plugin_prefix: str = "ansible_facts" inventory_cache_timeout: int = 3600 inventory_enabled: list[str] = [ "host_list", "script", "auto", "yaml", "ini", "toml", ] inventory_export: bool = False inventory_ignore_exts: str inventory_ignore_patterns: list[str] = [] inventory_unparsed_is_failed: bool = False localhost_warning: bool = True max_file_size_for_diff: int = 104448 module_ignore_exts: str netconf_ssh_config: str | None = None network_group_modules: list[str] = [ "eos", "nxos", "ios", "iosxr", "junos", "enos", "ce", "vyos", "sros", "dellos9", "dellos10", "dellos6", "asa", "aruba", "aireos", "bigip", "ironware", "onyx", "netconf", "exos", "voss", "slxos", ] old_plugin_cache_clearing: bool = False paramiko_host_key_auto_add: bool = False paramiko_look_for_keys: bool = True persistent_command_timeout: int = 30 persistent_connect_retry_timeout: int = 15 persistent_connect_timeout: int = 30 persistent_control_path_dir: str = "~/.ansible/pc" playbook_dir: str | None playbook_vars_root: Literal["top", "bottom", "all"] = "top" plugin_filters_cfg: str | None = None python_module_rlimit_nofile: int = 0 retry_files_enabled: bool = False retry_files_save_path: str | None = None run_vars_plugins: str = "demand" show_custom_stats: bool = False string_conversion_action: Literal["warn", "error", "ignore"] = "warn" string_type_filters: list[str] = [ "string", "to_json", "to_nice_json", "to_yaml", "to_nice_yaml", "ppretty", "json", ] system_warnings: bool = True tags_run: list[str] = [] tags_skip: list[str] = [] task_debugger_ignore_errors: bool = True task_timeout: int = 0 transform_invalid_group_chars: Literal[ "always", "never", "ignore", "silently", ] = "never" use_persistent_connections: bool = False variable_plugins_enabled: list[str] = ["host_group_vars"] variable_precedence: list[str] = [ "all_inventory", "groups_inventory", "all_plugins_inventory", "all_plugins_play", "groups_plugins_inventory", "groups_plugins_play", ] verbose_to_stderr: bool = False win_async_startup_timeout: int = 5 worker_shutdown_poll_count: int = 0 worker_shutdown_poll_delay: float = 0.1 yaml_filename_extensions: list[str] = [".yml", ".yaml", ".json"] def __init__( self, config_dump: str | None = None, data: dict[str, object] | None = None, cache_dir: Path | None = None, ) -> None: """Load config dictionary.""" super().__init__() self.cache_dir = cache_dir if data: self.data = copy.deepcopy(data) else: if not config_dump: env = os.environ.copy() # Avoid possible ANSI garbage env["ANSIBLE_FORCE_COLOR"] = "0" config_dump = subprocess.check_output( # noqa: S603 ["ansible-config", "dump"], universal_newlines=True, env=env, ) for match in re.finditer( r"^(?P[A-Za-z0-9_]+).* = (?P.*)$", config_dump, re.MULTILINE, ): key = match.groupdict()["key"] value = match.groupdict()["value"] try: self[key] = ast.literal_eval(value) except (NameError, SyntaxError, ValueError): self[key] = value if data: return def __getattribute__(self, attr_name: str) -> object: """Allow access of config options as attributes.""" parent_dict = super().__dict__ # pylint: disable=no-member if attr_name in parent_dict: return parent_dict[attr_name] data = super().__getattribute__("data") if attr_name == "data": # pragma: no cover return data name = attr_name.upper() if name in data: return data[name] if name in AnsibleConfig._aliases: return data[AnsibleConfig._aliases[name]] return super().__getattribute__(attr_name) def __getitem__(self, name: str) -> object: """Allow access to config options using indexing.""" return super().__getitem__(name.upper()) def __copy__(self) -> AnsibleConfig: """Allow users to run copy on Config.""" return AnsibleConfig(data=self.data) def __deepcopy__(self, memo: object) -> AnsibleConfig: """Allow users to run deeepcopy on Config.""" return AnsibleConfig(data=self.data) __all__ = [ "AnsibleConfig", "ansible_version", "parse_ansible_version", ] ansible-compat-25.1.4/src/ansible_compat/constants.py000066400000000000000000000030561475513774500226640ustar00rootroot00000000000000"""Constants used by ansible_compat.""" from pathlib import Path META_MAIN = (Path("meta") / Path("main.yml"), Path("meta") / Path("main.yaml")) REQUIREMENT_LOCATIONS = [ "requirements.yml", "roles/requirements.yml", "collections/requirements.yml", # These is more of less the official way to store test requirements in collections so far, comments shows number of repos using this reported by https://sourcegraph.com/ at the time of writing "tests/requirements.yml", # 170 "tests/integration/requirements.yml", # 3 "tests/unit/requirements.yml", # 1 ] # Minimal version of Ansible we support for runtime ANSIBLE_MIN_VERSION = "2.16" # Based on https://docs.ansible.com/ansible/latest/reference_appendices/config.html ANSIBLE_DEFAULT_ROLES_PATH = ( "~/.ansible/roles:/usr/share/ansible/roles:/etc/ansible/roles" ) INVALID_CONFIG_RC = 2 ANSIBLE_MISSING_RC = 4 INVALID_PREREQUISITES_RC = 10 MSG_INVALID_FQRL = """\ Computed fully qualified role name of {0} does not follow current galaxy requirements. Please edit meta/main.yml and assure we can correctly determine full role name: galaxy_info: role_name: my_name # if absent directory name hosting role is used instead namespace: my_galaxy_namespace # if absent, author is used instead Namespace: https://old-galaxy.ansible.com/docs/contributing/namespaces.html#galaxy-namespace-limitations Role: https://old-galaxy.ansible.com/docs/contributing/creating_role.html#role-names As an alternative, you can add 'role-name' to either skip_list or warn_list. """ RC_ANSIBLE_OPTIONS_ERROR = 5 ansible-compat-25.1.4/src/ansible_compat/errors.py000066400000000000000000000030721475513774500221620ustar00rootroot00000000000000"""Module to deal with errors.""" from __future__ import annotations from typing import TYPE_CHECKING, Any from ansible_compat.constants import ANSIBLE_MISSING_RC, INVALID_PREREQUISITES_RC if TYPE_CHECKING: # pragma: no cover from subprocess import CompletedProcess # noqa: S404 class AnsibleCompatError(RuntimeError): """Generic error originating from ansible_compat library.""" code = 1 # generic error def __init__( self, message: str | None = None, proc: CompletedProcess[Any] | None = None, ) -> None: """Construct generic library exception.""" super().__init__(message) self.proc = proc class AnsibleCommandError(RuntimeError): """Exception running an Ansible command.""" def __init__(self, proc: CompletedProcess[Any]) -> None: """Construct an exception given a completed process.""" message = ( f"Got {proc.returncode} exit code while running: {' '.join(proc.args)}" ) super().__init__(message) self.proc = proc class MissingAnsibleError(AnsibleCompatError): """Reports a missing or broken Ansible installation.""" code = ANSIBLE_MISSING_RC def __init__( self, message: str | None = "Unable to find a working copy of ansible executable.", proc: CompletedProcess[Any] | None = None, ) -> None: """.""" super().__init__(message) self.proc = proc class InvalidPrerequisiteError(AnsibleCompatError): """Reports a missing requirement.""" code = INVALID_PREREQUISITES_RC ansible-compat-25.1.4/src/ansible_compat/loaders.py000066400000000000000000000016551475513774500223040ustar00rootroot00000000000000"""Utilities for loading various files.""" from __future__ import annotations from typing import TYPE_CHECKING, Any import yaml from ansible_compat.errors import InvalidPrerequisiteError if TYPE_CHECKING: # pragma: no cover from pathlib import Path def yaml_from_file(path: Path) -> Any: # noqa: ANN401 """Return a loaded YAML file.""" with path.open(encoding="utf-8") as content: return yaml.load(content, Loader=yaml.SafeLoader) def colpath_from_path(path: Path) -> str | None: """Return a FQCN from a path.""" galaxy_file = path / "galaxy.yml" if galaxy_file.exists(): galaxy = yaml_from_file(galaxy_file) for k in ("namespace", "name"): if k not in galaxy: msg = f"{galaxy_file} is missing the following mandatory field {k}" raise InvalidPrerequisiteError(msg) return f"{galaxy['namespace']}/{galaxy['name']}" return None ansible-compat-25.1.4/src/ansible_compat/ports.py000066400000000000000000000001611475513774500220110ustar00rootroot00000000000000"""Portability helpers.""" from functools import cache, cached_property __all__ = ["cache", "cached_property"] ansible-compat-25.1.4/src/ansible_compat/prerun.py000066400000000000000000000061151475513774500221620ustar00rootroot00000000000000"""Utilities for configuring ansible runtime environment.""" import hashlib import os import tempfile import warnings from pathlib import Path def is_writable(path: Path) -> bool: """Check if path is writable, creating if necessary. Args: path: Path to check. Returns: True if path is writable, False otherwise. """ try: path.mkdir(parents=True, exist_ok=True) except OSError: return False return path.exists() and os.access(path, os.W_OK) def get_cache_dir(project_dir: Path, *, isolated: bool = True) -> Path: """Compute cache directory to be used based on project path. Args: project_dir: Path to the project directory. isolated: Whether to use isolated cache directory. Returns: A writable cache directory. Raises: RuntimeError: if cache directory is not writable. OSError: if cache directory cannot be created. """ cache_dir: Path | None = None if "VIRTUAL_ENV" in os.environ: path = Path(os.environ["VIRTUAL_ENV"]).resolve() / ".ansible" if is_writable(path): cache_dir = path else: msg = f"Found VIRTUAL_ENV={os.environ['VIRTUAL_ENV']} but we cannot use it for caching as it is not writable." warnings.warn( message=msg, stacklevel=2, source={"msg": msg}, ) if isolated: project_dir = project_dir.resolve() / ".ansible" if is_writable(project_dir): cache_dir = project_dir else: msg = f"Project directory {project_dir} cannot be used for caching as it is not writable." warnings.warn(msg, stacklevel=2) else: cache_dir = Path(os.environ.get("ANSIBLE_HOME", "~/.ansible")).expanduser() # This code should be never be reached because import from ansible-core # would trigger a fatal error if this location is not writable. if not is_writable(cache_dir): # pragma: no cover msg = f"Cache directory {cache_dir} is not writable." raise OSError(msg) if not cache_dir: # As "project_dir" can also be "/" and user might not be able # to write to it, we use a temporary directory as fallback. checksum = hashlib.sha256( project_dir.as_posix().encode("utf-8"), ).hexdigest()[:4] cache_dir = Path(tempfile.gettempdir()) / f".ansible-{checksum}" cache_dir.mkdir(parents=True, exist_ok=True) msg = f"Using unique temporary directory {cache_dir} for caching." warnings.warn(msg, stacklevel=2) # Ensure basic folder structure exists so `ansible-galaxy list` does not # fail with: None of the provided paths were usable. Please specify a valid path with try: for name in ("roles", "collections"): (cache_dir / name).mkdir(parents=True, exist_ok=True) except OSError as exc: # pragma: no cover msg = "Failed to create cache directory." raise RuntimeError(msg) from exc # We succeed only if the path is writable. return cache_dir ansible-compat-25.1.4/src/ansible_compat/py.typed000066400000000000000000000000001475513774500217570ustar00rootroot00000000000000ansible-compat-25.1.4/src/ansible_compat/runtime.py000066400000000000000000001161261475513774500223360ustar00rootroot00000000000000"""Ansible runtime environment manager.""" # pylint: disable=too-many-lines from __future__ import annotations import contextlib import importlib import json import logging import os import re import shutil import site import subprocess # noqa: S404 import sys import warnings from collections import OrderedDict from dataclasses import dataclass, field from pathlib import Path from typing import TYPE_CHECKING, Any, no_type_check import subprocess_tee from packaging.version import Version from ansible_compat.config import ( AnsibleConfig, parse_ansible_version, ) from ansible_compat.constants import ( META_MAIN, MSG_INVALID_FQRL, RC_ANSIBLE_OPTIONS_ERROR, REQUIREMENT_LOCATIONS, ) from ansible_compat.errors import ( AnsibleCommandError, AnsibleCompatError, InvalidPrerequisiteError, MissingAnsibleError, ) from ansible_compat.loaders import colpath_from_path, yaml_from_file from ansible_compat.prerun import get_cache_dir if TYPE_CHECKING: # pragma: no cover # https://github.com/PyCQA/pylint/issues/3240 # pylint: disable=unsubscriptable-object CompletedProcess = subprocess.CompletedProcess[Any] from collections.abc import Callable else: CompletedProcess = subprocess.CompletedProcess _logger = logging.getLogger(__name__) # regex to extract the first version from a collection range specifier version_re = re.compile(r":[>=<]*([^,]*)") namespace_re = re.compile(r"^[a-z][a-z0-9_]+$") class AnsibleWarning(Warning): """Warnings related to Ansible runtime.""" @dataclass class Collection: """Container for Ansible collection information.""" name: str version: str path: Path class CollectionVersion(Version): """Collection version.""" def __init__(self, version: str) -> None: """Initialize collection version.""" # As packaging Version class does not support wildcard, we convert it # to "0", as this being the smallest version possible. if version == "*": version = "0" super().__init__(version) @dataclass class Plugins: # pylint: disable=too-many-instance-attributes """Dataclass to access installed Ansible plugins, uses ansible-doc to retrieve them.""" runtime: Runtime become: dict[str, str] = field(init=False) cache: dict[str, str] = field(init=False) callback: dict[str, str] = field(init=False) cliconf: dict[str, str] = field(init=False) connection: dict[str, str] = field(init=False) httpapi: dict[str, str] = field(init=False) inventory: dict[str, str] = field(init=False) lookup: dict[str, str] = field(init=False) netconf: dict[str, str] = field(init=False) shell: dict[str, str] = field(init=False) vars: dict[str, str] = field(init=False) module: dict[str, str] = field(init=False) strategy: dict[str, str] = field(init=False) test: dict[str, str] = field(init=False) filter: dict[str, str] = field(init=False) role: dict[str, str] = field(init=False) keyword: dict[str, str] = field(init=False) @no_type_check def __getattribute__(self, attr: str): # noqa: ANN204 """Get attribute.""" if attr in { "become", "cache", "callback", "cliconf", "connection", "httpapi", "inventory", "lookup", "netconf", "shell", "vars", "module", "strategy", "test", "filter", "role", "keyword", }: try: result = super().__getattribute__(attr) except AttributeError as exc: proc = self.runtime.run( ["ansible-doc", "--json", "-l", "-t", attr], ) data = json.loads(proc.stdout) if not isinstance(data, dict): # pragma: no cover msg = "Unexpected output from ansible-doc" raise AnsibleCompatError(msg) from exc result = data else: result = super().__getattribute__(attr) return result # pylint: disable=too-many-instance-attributes class Runtime: """Ansible Runtime manager.""" _version: Version | None = None collections: OrderedDict[str, Collection] = OrderedDict() cache_dir: Path # Used to track if we have already initialized the Ansible runtime as attempts # to do it multiple tilmes will cause runtime warnings from within ansible-core initialized: bool = False plugins: Plugins _has_playbook_cache: dict[tuple[str, Path | None], bool] = {} require_module: bool = False def __init__( self, project_dir: Path | None = None, *, isolated: bool = False, min_required_version: str | None = None, require_module: bool = False, max_retries: int = 0, environ: dict[str, str] | None = None, verbosity: int = 0, ) -> None: """Initialize Ansible runtime environment. :param project_dir: The directory containing the Ansible project. If not mentioned it will be guessed from the current working directory. :param isolated: Assure that installation of collections or roles does not affect Ansible installation, an unique cache directory being used instead. :param min_required_version: Minimal version of Ansible required. If not found, a :class:`RuntimeError` exception is raised. :param require_module: If set, instantiation will fail if Ansible Python module is missing or is not matching the same version as the Ansible command line. That is useful for consumers that expect to also perform Python imports from Ansible. :param max_retries: Number of times it should retry network operations. Default is 0, no retries. :param environ: Environment dictionary to use, if undefined ``os.environ`` will be copied and used. :param verbosity: Verbosity level to use. """ self.project_dir = project_dir or Path.cwd() self.isolated = isolated self.max_retries = max_retries self.environ = environ or os.environ.copy() if "ANSIBLE_COLLECTIONS_PATHS" in self.environ: msg = "ANSIBLE_COLLECTIONS_PATHS was detected, replace it with ANSIBLE_COLLECTIONS_PATH to continue." raise RuntimeError(msg) self.plugins = Plugins(runtime=self) self.verbosity = verbosity self.initialize_logger(level=self.verbosity) # Reduce noise from paramiko, unless user already defined PYTHONWARNINGS # paramiko/transport.py:236: CryptographyDeprecationWarning: Blowfish has been deprecated # https://github.com/paramiko/paramiko/issues/2038 # As CryptographyDeprecationWarning is not a builtin, we cannot use # PYTHONWARNINGS to ignore it using category but we can use message. # https://stackoverflow.com/q/68251969/99834 if "PYTHONWARNINGS" not in self.environ: # pragma: no cover self.environ["PYTHONWARNINGS"] = "ignore:Blowfish has been deprecated" self.cache_dir = get_cache_dir(self.project_dir, isolated=self.isolated) self.config = AnsibleConfig(cache_dir=self.cache_dir) # Add the sys.path to the collection paths if not isolated self._patch_collection_paths() if not self.version_in_range(lower=min_required_version): msg = f"Found incompatible version of ansible runtime {self.version}, instead of {min_required_version} or newer." raise RuntimeError(msg) if require_module: self.require_module = True self._ensure_module_available() # pylint: disable=import-outside-toplevel from ansible.utils.display import Display # pylint: disable=unused-argument def warning( self: Display, # noqa: ARG001 msg: str, *, formatted: bool = False, # noqa: ARG001 ) -> None: # pragma: no cover """Override ansible.utils.display.Display.warning to avoid printing warnings.""" warnings.warn( message=msg, category=AnsibleWarning, stacklevel=2, source={"msg": msg}, ) # Monkey patch ansible warning in order to use warnings module. Display.warning = warning def initialize_logger(self, level: int = 0) -> None: # noqa: PLR6301 """Set up the global logging level based on the verbosity number.""" verbosity_map = { -2: logging.CRITICAL, -1: logging.ERROR, 0: logging.WARNING, 1: logging.INFO, 2: logging.DEBUG, } # Unknown logging level is treated as DEBUG logging_level = verbosity_map.get(level, logging.DEBUG) _logger.setLevel(logging_level) # Use module-level _logger instance to validate it _logger.debug("Logging initialized to level %s", logging_level) def _patch_collection_paths(self) -> None: """Modify Ansible collection path for testing purposes. - Add the sys.path to the end of collection paths. - Add the site-packages to the beginning of collection paths to match ansible-core and ade behavior and trick ansible-galaxy to install default to the venv site-packages location (isolation). """ # ansible-core normal precedence is: adjacent, local paths, configured paths, site paths collections_paths: list[str] = self.config.collections_paths.copy() if self.config.collections_scan_sys_path: for path in sys.path: if ( path not in collections_paths and (Path(path) / "ansible_collections").is_dir() ): collections_paths.append( # pylint: disable=E1101 path, ) # When inside a venv, we also add the site-packages to the end of the # collections path because this is the last place where ansible-core # will look for them. This also ensures that when calling ansible-galaxy # to install content. for path in reversed(site.getsitepackages()): if path not in collections_paths: collections_paths.append(path) if collections_paths != self.config.collections_paths: _logger.info( "Collection paths was patched to include extra directories %s", ",".join(collections_paths), ) else: msg = "ANSIBLE_COLLECTIONS_SCAN_SYS_PATH is disabled, not patching collection paths. This may lead to unexpected behavior when using dev tools and prevent full isolation from user environment." _logger.warning(msg) self.config.collections_paths = collections_paths def load_collections(self) -> None: """Load collection data.""" self.collections = OrderedDict() no_collections_msg = "None of the provided paths were usable" # do not use --path because it does not allow multiple values proc = self.run( [ "ansible-galaxy", "collection", "list", "--format=json", ], ) if proc.returncode == RC_ANSIBLE_OPTIONS_ERROR and ( no_collections_msg in proc.stdout or no_collections_msg in proc.stderr ): # pragma: no cover _logger.debug("Ansible reported no installed collections at all.") return if proc.returncode != 0: _logger.error(proc) msg = f"Unable to list collections: {proc}" raise RuntimeError(msg) try: data = json.loads(proc.stdout) except json.decoder.JSONDecodeError as exc: msg = f"Unable to parse galaxy output as JSON: {proc.stdout}" raise RuntimeError(msg) from exc if not isinstance(data, dict): msg = f"Unexpected collection data, {data}" raise TypeError(msg) for path in data: if not isinstance(data[path], dict): msg = f"Unexpected collection data, {data[path]}" raise TypeError(msg) for collection, collection_info in data[path].items(): if not isinstance(collection_info, dict): msg = f"Unexpected collection data, {collection_info}" raise TypeError(msg) if collection in self.collections: msg = f"Another version of '{collection}' {collection_info['version']} was found installed in {path}, only the first one will be used, {self.collections[collection].version} ({self.collections[collection].path})." _logger.warning(msg) else: self.collections[collection] = Collection( name=collection, version=collection_info["version"], path=path, ) def _ensure_module_available(self) -> None: """Assure that Ansible Python module is installed and matching CLI version.""" ansible_release_module = None with contextlib.suppress(ModuleNotFoundError, ImportError): ansible_release_module = importlib.import_module("ansible.release") if ansible_release_module is None: msg = "Unable to find Ansible python module." raise RuntimeError(msg) ansible_module_version = Version( ansible_release_module.__version__, ) if ansible_module_version != self.version: msg = f"Ansible CLI ({self.version}) and python module ({ansible_module_version}) versions do not match. This indicates a broken execution environment." raise RuntimeError(msg) # We need to initialize the plugin loader # https://github.com/ansible/ansible-lint/issues/2945 if not Runtime.initialized: col_path = [f"{self.cache_dir}/collections"] # noinspection PyProtectedMember # pylint: disable=import-outside-toplevel,no-name-in-module from ansible.plugins.loader import init_plugin_loader from ansible.utils.collection_loader._collection_finder import ( # pylint: disable=import-outside-toplevel _AnsibleCollectionFinder, # noqa: PLC2701 ) _AnsibleCollectionFinder( # noqa: SLF001 paths=col_path, )._remove() # pylint: disable=protected-access init_plugin_loader(col_path) def clean(self) -> None: """Remove content of cache_dir.""" shutil.rmtree(self.cache_dir, ignore_errors=True) def run( # ruff: disable=PLR0913 self, args: str | list[str], *, retry: bool = False, tee: bool = False, env: dict[str, str] | None = None, cwd: Path | None = None, set_acp: bool = True, ) -> CompletedProcess: """Execute a command inside an Ansible environment. :param retry: Retry network operations on failures. :param tee: Also pass captured stdout/stderr to system while running. :param set_acp: Set the ANSIBLE_COLLECTIONS_PATH """ if tee: run_func: Callable[..., CompletedProcess] = subprocess_tee.run else: run_func = subprocess.run env = self.environ if env is None else env.copy() # Presence of ansible debug variable or config option will prevent us # from parsing its JSON output due to extra debug messages on stdout. env["ANSIBLE_DEBUG"] = "0" # https://github.com/ansible/ansible-lint/issues/3522 env["ANSIBLE_VERBOSE_TO_STDERR"] = "True" if set_acp: env["ANSIBLE_COLLECTIONS_PATH"] = ":".join( list(dict.fromkeys(self.config.collections_paths)), ) for _ in range(self.max_retries + 1 if retry else 1): result = run_func( args, universal_newlines=True, check=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env, cwd=str(cwd) if cwd else None, ) if result.returncode == 0: break _logger.debug("Environment: %s", env) if retry: _logger.warning( "Retrying execution failure %s of: %s", result.returncode, " ".join(args), ) return result @property def version(self) -> Version: """Return current Version object for Ansible. If version is not mentioned, it returns current version as detected. When version argument is mentioned, it return converts the version string to Version object in order to make it usable in comparisons. """ if self._version: return self._version proc = self.run(["ansible", "--version"]) if proc.returncode == 0: self._version = parse_ansible_version(proc.stdout) return self._version msg = "Unable to find a working copy of ansible executable." raise MissingAnsibleError(msg, proc=proc) def version_in_range( self, lower: str | None = None, upper: str | None = None, ) -> bool: """Check if Ansible version is inside a required range. The lower limit is inclusive and the upper one exclusive. """ if lower and self.version < Version(lower): return False return not (upper and self.version >= Version(upper)) def has_playbook(self, playbook: str, *, basedir: Path | None = None) -> bool: """Return true if ansible can load a given playbook. This is also used for checking if playbooks from within collections are present and if they pass syntax check. """ if (playbook, basedir) in self._has_playbook_cache: return self._has_playbook_cache[playbook, basedir] proc = self.run(["ansible-playbook", "--syntax-check", playbook], cwd=basedir) result = proc.returncode == 0 if not result: if not basedir: basedir = Path() msg = f"has_playbook returned false for '{basedir / playbook}' due to syntax check returning {proc.returncode}" _logger.debug(msg) # cache the result self._has_playbook_cache[playbook, basedir] = result return result def install_collection( self, collection: str | Path, *, destination: Path | None = None, force: bool = False, ) -> None: """Install an Ansible collection. Can accept arguments like: 'foo.bar:>=1.2.3' 'git+https://github.com/ansible-collections/ansible.posix.git,main' """ cmd = [ "ansible-galaxy", "collection", "install", "-vvv", # this is needed to make ansible display important info in case of failures ] if force: cmd.append("--force") if isinstance(collection, Path): collection = str(collection) # As ansible-galaxy install is not able to automatically determine # if the range requires a pre-release, we need to manually add the --pre # flag when needed. matches = version_re.search(collection) if ( not is_url(collection) and matches and CollectionVersion(matches[1]).is_prerelease ): cmd.append("--pre") cpaths: list[str] = self.config.collections_paths if destination and str(destination) not in cpaths: # we cannot use '-p' because it breaks galaxy ability to ignore already installed collections, so # we hack ANSIBLE_COLLECTIONS_PATH instead and inject our own path there. # pylint: disable=no-member cpaths.insert(0, str(destination)) cmd.append(f"{collection}") _logger.info("Running from %s : %s", Path.cwd(), " ".join(cmd)) process = self.run( cmd, retry=True, env={**self.environ, "ANSIBLE_COLLECTIONS_PATH": ":".join(cpaths)}, ) if process.returncode != 0: msg = f"Command {' '.join(cmd)}, returned {process.returncode} code:\n{process.stdout}\n{process.stderr}" _logger.error(msg) raise InvalidPrerequisiteError(msg) def install_collection_from_disk( self, path: Path, destination: Path | None = None, ) -> None: """Build and install collection from a given disk path.""" self.install_collection(path, destination=destination, force=True) # pylint: disable=too-many-branches def install_requirements( # noqa: C901 self, requirement: Path, *, retry: bool = False, offline: bool = False, ) -> None: """Install dependencies from a requirements.yml. :param requirement: path to requirements.yml file :param retry: retry network operations on failures :param offline: bypass installation, may fail if requirements are not met. """ if not Path(requirement).exists(): return reqs_yaml = yaml_from_file(Path(requirement)) if not isinstance(reqs_yaml, dict | list): msg = f"{requirement} file is not a valid Ansible requirements file." raise InvalidPrerequisiteError(msg) if isinstance(reqs_yaml, dict): for key in reqs_yaml: if key not in {"roles", "collections"}: msg = f"{requirement} file is not a valid Ansible requirements file. Only 'roles' and 'collections' keys are allowed at root level. Recognized valid locations are: {', '.join(REQUIREMENT_LOCATIONS)}" raise InvalidPrerequisiteError(msg) if isinstance(reqs_yaml, list) or "roles" in reqs_yaml: cmd = [ "ansible-galaxy", "role", "install", "-r", f"{requirement}", ] if self.verbosity > 0: cmd.extend(["-" + ("v" * self.verbosity)]) cmd.extend(["--roles-path", f"{self.cache_dir}/roles"]) if offline: _logger.warning( "Skipped installing old role dependencies due to running in offline mode.", ) else: _logger.info("Running %s", " ".join(cmd)) result = self.run(cmd, retry=retry) _logger.debug(result.stdout) if result.returncode != 0: _logger.error(result.stderr) raise AnsibleCommandError(result) # Run galaxy collection install works on v2 requirements.yml if ( isinstance(reqs_yaml, dict) and "collections" in reqs_yaml and reqs_yaml["collections"] is not None ): cmd = [ "ansible-galaxy", "collection", "install", ] if self.verbosity > 0: cmd.extend(["-" + ("v" * self.verbosity)]) for collection in reqs_yaml["collections"]: if isinstance(collection, dict) and collection.get("type", "") == "git": _logger.info( "Adding '--pre' to ansible-galaxy collection install because we detected one collection being sourced from git.", ) cmd.append("--pre") break if offline: _logger.warning( "Skipped installing collection dependencies due to running in offline mode.", ) else: cmd.extend(["-r", str(requirement)]) _logger.info("Running %s", " ".join(cmd)) result = self.run( cmd, retry=retry, ) _logger.debug(result.stdout) if result.returncode != 0: _logger.error(result.stderr) raise AnsibleCommandError(result) if self.require_module: Runtime.initialized = False self._ensure_module_available() # pylint: disable=too-many-locals def prepare_environment( # noqa: C901 self, required_collections: dict[str, str] | None = None, *, retry: bool = False, install_local: bool = False, offline: bool = False, role_name_check: int = 0, ) -> None: """Make dependencies available if needed.""" destination: Path = self.cache_dir / "collections" if required_collections is None: required_collections = {} self._prepare_ansible_paths() # first one is standard for collection layout repos and the last two # are part of Tower specification # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#ansible-galaxy-support # https://docs.ansible.com/ansible-tower/latest/html/userguide/projects.html#collections-support for req_file in REQUIREMENT_LOCATIONS: file_path = Path(req_file) if self.project_dir: file_path = self.project_dir / req_file self.install_requirements(file_path, retry=retry, offline=offline) if not install_local: return for item in search_galaxy_paths(self.project_dir): # processing all found galaxy.yml files if item.exists(): data = yaml_from_file(item) if isinstance(data, dict) and "dependencies" in data: for name, required_version in data["dependencies"].items(): _logger.info( "Provisioning collection %s:%s from galaxy.yml", name, required_version, ) self.install_collection( f"{name}{',' if is_url(name) else ':'}{required_version}", destination=destination, ) for name, min_version in required_collections.items(): self.install_collection( f"{name}:>={min_version}", destination=destination, ) galaxy_path = self.project_dir / "galaxy.yml" if (galaxy_path).exists(): # while function can return None, that would not break the logic colpath = Path( f"{destination}/ansible_collections/{colpath_from_path(self.project_dir)}", ) if colpath.is_symlink(): if os.path.realpath(colpath) == str(Path.cwd()): _logger.warning( "Found symlinked collection, skipping its installation.", ) return _logger.warning( "Collection is symlinked, but not pointing to %s directory, so we will remove it.", Path.cwd(), ) colpath.unlink() # molecule scenario within a collection self.install_collection_from_disk( galaxy_path.parent, destination=destination, ) elif Path.cwd().parent.name == "roles" and Path("../../galaxy.yml").exists(): # molecule scenario located within roles//molecule inside # a collection self.install_collection_from_disk( Path("../.."), destination=destination, ) else: # no collection, try to recognize and install a standalone role self._install_galaxy_role( self.project_dir, role_name_check=role_name_check, ignore_errors=True, ) # reload collections self.load_collections() def require_collection( self, name: str, version: str | None = None, *, install: bool = True, ) -> tuple[CollectionVersion, Path]: """Check if a minimal collection version is present or exits. In the future this method may attempt to install a missing or outdated collection before failing. Args: name: collection name version: minimal version required install: if True, attempt to install a missing collection Returns: tuple of (found_version, collection_path) """ try: ns, coll = name.split(".", 1) except ValueError as exc: msg = f"Invalid collection name supplied: {name}%s" raise InvalidPrerequisiteError( msg, ) from exc paths: list[str] = self.config.collections_paths if not paths or not isinstance(paths, list): msg = f"Unable to determine ansible collection paths. ({paths})" raise InvalidPrerequisiteError( msg, ) for path in paths: collpath = Path(path) / "ansible_collections" / ns / coll if collpath.exists(): mpath = collpath / "MANIFEST.json" if not mpath.exists(): msg = f"Found collection at '{collpath}' but missing MANIFEST.json, cannot get info." _logger.fatal(msg) raise InvalidPrerequisiteError(msg) with mpath.open(encoding="utf-8") as f: manifest = json.loads(f.read()) found_version = CollectionVersion( manifest["collection_info"]["version"], ) if version and found_version < CollectionVersion(version): if install: self.install_collection(f"{name}:>={version}") self.require_collection(name, version, install=False) else: msg = f"Found {name} collection {found_version} but {version} or newer is required." _logger.fatal(msg) raise InvalidPrerequisiteError(msg) return found_version, collpath.resolve() if install: self.install_collection(f"{name}:>={version}" if version else name) return self.require_collection( name=name, version=version, install=False, ) msg = f"Collection '{name}' not found in '{paths}'" _logger.fatal(msg) raise InvalidPrerequisiteError(msg) def _prepare_ansible_paths(self) -> None: """Configure Ansible environment variables.""" try: library_paths: list[str] = self.config.default_module_path.copy() roles_path: list[str] = self.config.default_roles_path.copy() collections_path: list[str] = self.config.collections_paths.copy() except AttributeError as exc: msg = "Unexpected ansible configuration" raise RuntimeError(msg) from exc alterations_list: list[tuple[list[str], str, bool]] = [ (library_paths, "plugins/modules", True), (roles_path, "roles", True), ] alterations_list.extend( ( [ (roles_path, f"{self.cache_dir}/roles", False), (library_paths, f"{self.cache_dir}/modules", False), (collections_path, f"{self.cache_dir}/collections", False), ] if self.isolated else [] ), ) for path_list, path_, must_be_present in alterations_list: path = Path(path_) if not path.exists(): if must_be_present: continue path.mkdir(parents=True, exist_ok=True) if str(path) not in path_list: path_list.insert(0, str(path)) if library_paths != self.config.DEFAULT_MODULE_PATH: self._update_env("ANSIBLE_LIBRARY", library_paths) if collections_path != self.config.default_collections_path: self._update_env("ANSIBLE_COLLECTIONS_PATH", collections_path) if roles_path != self.config.default_roles_path: self._update_env("ANSIBLE_ROLES_PATH", roles_path) def _get_roles_path(self) -> Path: """Return roles installation path. If `self.isolated` is set to `True`, `self.cache_dir` would be created, then it returns the `self.cache_dir/roles`. When `self.isolated` is not mentioned or set to `False`, it returns the first path in `default_roles_path`. """ path = Path(f"{self.cache_dir}/roles") return path def _install_galaxy_role( self, project_dir: Path, role_name_check: int = 0, *, ignore_errors: bool = False, ) -> None: """Detect standalone galaxy role and installs it. Args: project_dir: path to the role role_name_check: logic to used to check role name 0: exit with error if name is not compliant (default) 1: warn if name is not compliant 2: bypass any name checking ignore_errors: if True, bypass installing invalid roles. Our implementation aims to match ansible-galaxy's behavior for installing roles from a tarball or scm. For example ansible-galaxy will install a role that has both galaxy.yml and meta/main.yml present but empty. Also missing galaxy.yml is accepted but missing meta/main.yml is not. """ yaml = None galaxy_info = {} for meta_main in META_MAIN: meta_filename = Path(project_dir) / meta_main if meta_filename.exists(): break else: if ignore_errors: return yaml = yaml_from_file(meta_filename) if yaml and "galaxy_info" in yaml: galaxy_info = yaml["galaxy_info"] fqrn = _get_role_fqrn(galaxy_info, project_dir) if role_name_check in {0, 1}: if not re.match(r"[a-z0-9][a-z0-9_-]+\.[a-z][a-z0-9_]+$", fqrn): msg = MSG_INVALID_FQRL.format(fqrn) if role_name_check == 1: _logger.warning(msg) else: _logger.error(msg) raise InvalidPrerequisiteError(msg) elif "role_name" in galaxy_info: # when 'role-name' is in skip_list, we stick to plain role names role_namespace = _get_galaxy_role_ns(galaxy_info) role_name = _get_galaxy_role_name(galaxy_info) fqrn = f"{role_namespace}{role_name}" else: fqrn = Path(project_dir).absolute().name path = self._get_roles_path() path.mkdir(parents=True, exist_ok=True) link_path = path / fqrn # despite documentation stating that is_file() reports true for symlinks, # it appears that is_dir() reports true instead, so we rely on exists(). target = Path(project_dir).absolute() if not link_path.exists() or ( link_path.is_symlink() and link_path.readlink() != target ): # must call unlink before checking exists because a broken # link reports as not existing and we want to repair it link_path.unlink(missing_ok=True) # https://github.com/python/cpython/issues/73843 link_path.symlink_to(str(target), target_is_directory=True) _logger.info( "Using %s symlink to current repository in order to enable Ansible to find the role using its expected full name.", link_path, ) def _update_env(self, varname: str, value: list[str], default: str = "") -> None: """Update colon based environment variable if needed. New values are prepended to make sure they take precedence. """ if not value: return orig_value = self.environ.get(varname, default) if orig_value: # we just want to avoid repeating the same entries, but order is important value = list(dict.fromkeys([*value, *orig_value.split(":")])) value_str = ":".join(value) if value_str != self.environ.get(varname, ""): self.environ[varname] = value_str _logger.info("Set %s=%s", varname, value_str) def _get_role_fqrn(galaxy_infos: dict[str, Any], project_dir: Path) -> str: """Compute role fqrn.""" role_namespace = _get_galaxy_role_ns(galaxy_infos) role_name = _get_galaxy_role_name(galaxy_infos) if len(role_name) == 0: role_name = Path(project_dir).absolute().name role_name = re.sub(r"(ansible-|ansible-role-)", "", role_name).split( ".", maxsplit=2, )[-1] return f"{role_namespace}{role_name}" def _get_galaxy_role_ns(galaxy_infos: dict[str, Any]) -> str: """Compute role namespace from meta/main.yml, including trailing dot.""" role_namespace = galaxy_infos.get("namespace", "") if len(role_namespace) == 0: role_namespace = galaxy_infos.get("author", "") if not isinstance(role_namespace, str): msg = f"Role namespace must be string, not {role_namespace}" raise AnsibleCompatError(msg) # if there's a space in the name space, it's likely author name # and not the galaxy login, so act as if there was no namespace if not role_namespace or re.match(r"^\w+ \w+", role_namespace): role_namespace = "" else: role_namespace = f"{role_namespace}." return role_namespace def _get_galaxy_role_name(galaxy_infos: dict[str, Any]) -> str: """Compute role name from meta/main.yml.""" result = galaxy_infos.get("role_name", "") if not isinstance(result, str): return "" return result def search_galaxy_paths(search_dir: Path) -> list[Path]: """Search for galaxy paths (only one level deep). Returns: list[Path]: List of galaxy.yml found. """ galaxy_paths: list[Path] = [] for item in [Path(), *search_dir.iterdir()]: # We ignore any folders that are not valid namespaces, just like # ansible galaxy does at this moment. file_path = item.resolve() if file_path.is_file() and file_path.name == "galaxy.yml": galaxy_paths.append(file_path) continue if file_path.is_dir() and namespace_re.match(file_path.name): file_path /= "galaxy.yml" if file_path.exists(): galaxy_paths.append(file_path) return galaxy_paths def is_url(name: str) -> bool: """Return True if a dependency name looks like an URL.""" return bool(re.match(r"^git[+@]", name)) ansible-compat-25.1.4/src/ansible_compat/schema.py000066400000000000000000000065161475513774500221140ustar00rootroot00000000000000"""Utils for JSON Schema validation.""" from __future__ import annotations import json from collections.abc import Mapping, Sequence from dataclasses import dataclass from typing import TYPE_CHECKING import jsonschema from jsonschema.validators import validator_for if TYPE_CHECKING: # pragma: no cover from ansible_compat.types import JSON def to_path(schema_path: Sequence[str | int]) -> str: """Flatten a path to a dot delimited string. Args: schema_path: The schema path Returns: The dot delimited path """ return ".".join(str(index) for index in schema_path) def json_path(absolute_path: Sequence[str | int]) -> str: """Flatten a data path to a dot delimited string. Args: absolute_path: The path Returns: The dot delimited string """ path = "$" for elem in absolute_path: if isinstance(elem, int): path += "[" + str(elem) + "]" else: path += "." + elem return path @dataclass(order=True) class JsonSchemaError: # pylint: disable=too-many-instance-attributes """Data structure to hold a json schema validation error.""" # order of attributes below is important for sorting schema_path: str data_path: str json_path: str message: str expected: bool | int | str relative_schema: str validator: str found: str def to_friendly(self) -> str: """Provide a friendly explanation of the error. Return: The error message """ return f"In '{self.data_path}': {self.message}." def validate( schema: JSON, data: JSON, ) -> list[JsonSchemaError]: """Validate some data against a JSON schema. Args: schema: the JSON schema to use for validation data: The data to validate Returns: Any errors encountered Raises: jsonschema.SchemaError: if the schema is invalid """ errors: list[JsonSchemaError] = [] if isinstance(schema, str): schema = json.loads(schema) try: if not isinstance(schema, Mapping): msg = "Invalid schema, must be a mapping" raise jsonschema.SchemaError(msg) # noqa: TRY301 validator = validator_for(schema) validator.check_schema(schema) except jsonschema.SchemaError as exc: error = JsonSchemaError( message=str(exc), data_path="schema sanity check", json_path="", schema_path="", relative_schema="", expected="", validator="", found="", ) errors.append(error) return errors for validation_error in validator(schema).iter_errors(data): if isinstance(validation_error, jsonschema.ValidationError): error = JsonSchemaError( message=validation_error.message, data_path=to_path(validation_error.absolute_path), json_path=json_path(validation_error.absolute_path), schema_path=to_path(validation_error.schema_path), relative_schema=str(validation_error.schema), expected=str(validation_error.validator_value), validator=str(validation_error.validator), found=str(validation_error.instance), ) errors.append(error) return sorted(errors) ansible-compat-25.1.4/src/ansible_compat/types.py000066400000000000000000000005641475513774500220150ustar00rootroot00000000000000"""Custom types.""" # noqa: A005 from __future__ import annotations from collections.abc import Mapping, Sequence from typing import TypeAlias JSON: TypeAlias = dict[str, "JSON"] | list["JSON"] | str | int | float | bool | None JSON_ro: TypeAlias = ( Mapping[str, "JSON_ro"] | Sequence["JSON_ro"] | str | int | float | bool | None ) __all__ = ["JSON", "JSON_ro"] ansible-compat-25.1.4/test/000077500000000000000000000000001475513774500155025ustar00rootroot00000000000000ansible-compat-25.1.4/test/__init__.py000066400000000000000000000000501475513774500176060ustar00rootroot00000000000000"""Tests for ansible_compat package.""" ansible-compat-25.1.4/test/assets/000077500000000000000000000000001475513774500170045ustar00rootroot00000000000000ansible-compat-25.1.4/test/assets/galaxy_paths/000077500000000000000000000000001475513774500214705ustar00rootroot00000000000000ansible-compat-25.1.4/test/assets/galaxy_paths/.bar/000077500000000000000000000000001475513774500223125ustar00rootroot00000000000000ansible-compat-25.1.4/test/assets/galaxy_paths/.bar/galaxy.yml000066400000000000000000000000001475513774500243100ustar00rootroot00000000000000ansible-compat-25.1.4/test/assets/galaxy_paths/foo/000077500000000000000000000000001475513774500222535ustar00rootroot00000000000000ansible-compat-25.1.4/test/assets/galaxy_paths/foo/galaxy.yml000066400000000000000000000000001475513774500242510ustar00rootroot00000000000000ansible-compat-25.1.4/test/assets/requirements-invalid-collection.yml000066400000000000000000000001461475513774500260300ustar00rootroot00000000000000# "ansible-galaxy collection install" is expected to fail this invalid file collections: - foo: bar ansible-compat-25.1.4/test/assets/requirements-invalid-role.yml000066400000000000000000000001421475513774500246320ustar00rootroot00000000000000# file expected to make "ansible-galaxy role install" to fail roles: - this_role_does_not_exist ansible-compat-25.1.4/test/assets/validate0_data.json000066400000000000000000000000711475513774500225370ustar00rootroot00000000000000{ "environment": { "a": false, "b": true, "c": "foo" } } ansible-compat-25.1.4/test/assets/validate0_expected.json000066400000000000000000000011671475513774500234360ustar00rootroot00000000000000[ { "message": "False is not of type 'string'", "data_path": "environment.a", "json_path": "$.environment.a", "schema_path": "properties.environment.additionalProperties.type", "relative_schema": "{'type': 'string'}", "expected": "string", "validator": "type", "found": "False" }, { "message": "True is not of type 'string'", "data_path": "environment.b", "json_path": "$.environment.b", "schema_path": "properties.environment.additionalProperties.type", "relative_schema": "{'type': 'string'}", "expected": "string", "validator": "type", "found": "True" } ] ansible-compat-25.1.4/test/assets/validate0_schema.json000066400000000000000000000002701475513774500230670ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-07/schema#", "properties": { "environment": { "type": "object", "additionalProperties": { "type": "string" } } } } ansible-compat-25.1.4/test/collections/000077500000000000000000000000001475513774500200205ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.broken/000077500000000000000000000000001475513774500222045ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.broken/galaxy.yml000066400000000000000000000000451475513774500242130ustar00rootroot00000000000000foo: that is not a valid collection! ansible-compat-25.1.4/test/collections/acme.goodies/000077500000000000000000000000001475513774500223555ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/galaxy.yml000066400000000000000000000012511475513774500243640ustar00rootroot00000000000000name: goodies namespace: acme version: 1.0.0 readme: README.md authors: - Red Hat description: Sample collection to use with molecule dependencies: community.molecule: ">=0.1.0" # used to also test '=>' condition ansible.utils: "*" # used to also test '*' git+https://github.com/ansible-collections/community.crypto.git: main # tests ability to install from git build_ignore: - "*.egg-info" - .DS_Store - .eggs - .gitignore - .mypy_cache - .pytest_cache - .stestr - .stestr.conf - .tox - .vscode - MANIFEST.in - build - dist - doc - report.html - setup.cfg - setup.py - "tests/unit/*.*" - README.rst - tox.ini license_file: LICENSE ansible-compat-25.1.4/test/collections/acme.goodies/molecule/000077500000000000000000000000001475513774500241625ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/molecule/default/000077500000000000000000000000001475513774500256065ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/molecule/default/converge.yml000066400000000000000000000002361475513774500301420ustar00rootroot00000000000000--- - name: Converge hosts: localhost tasks: - name: "Include sample role from current collection" include_role: name: acme.goodies.baz ansible-compat-25.1.4/test/collections/acme.goodies/molecule/default/molecule.yml000066400000000000000000000002161475513774500301350ustar00rootroot00000000000000--- dependency: name: galaxy driver: name: delegated platforms: - name: instance provisioner: name: ansible verifier: name: ansible ansible-compat-25.1.4/test/collections/acme.goodies/roles/000077500000000000000000000000001475513774500235015ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/000077500000000000000000000000001475513774500242555ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/molecule/000077500000000000000000000000001475513774500260625ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/000077500000000000000000000000001475513774500306625ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/converge.yml000066400000000000000000000002341475513774500332140ustar00rootroot00000000000000--- - name: Converge hosts: localhost tasks: - name: "Sample testing task part of deep_scenario" include_role: name: acme.goodies.baz ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/molecule/deep_scenario/molecule.yml000066400000000000000000000002161475513774500332110ustar00rootroot00000000000000--- dependency: name: galaxy driver: name: delegated platforms: - name: instance provisioner: name: ansible verifier: name: ansible ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/tasks/000077500000000000000000000000001475513774500254025ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/roles/baz/tasks/main.yml000066400000000000000000000001171475513774500270500ustar00rootroot00000000000000- name: "some task inside foo.bar collection" debug: msg: "hello world!" ansible-compat-25.1.4/test/collections/acme.goodies/tests/000077500000000000000000000000001475513774500235175ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.goodies/tests/requirements.yml000066400000000000000000000000721475513774500267640ustar00rootroot00000000000000collections: - name: ansible.posix version: "1.5.4" ansible-compat-25.1.4/test/collections/acme.minimal/000077500000000000000000000000001475513774500223525ustar00rootroot00000000000000ansible-compat-25.1.4/test/collections/acme.minimal/galaxy.yml000066400000000000000000000006771475513774500243740ustar00rootroot00000000000000name: minimal namespace: acme version: 1.0.0 readme: README.md authors: - Red Hat description: Sample collection to use with molecule build_ignore: - "*.egg-info" - .DS_Store - .eggs - .gitignore - .mypy_cache - .pytest_cache - .stestr - .stestr.conf - .tox - .vscode - MANIFEST.in - build - dist - doc - report.html - setup.cfg - setup.py - "tests/unit/*.*" - README.rst - tox.ini license_file: LICENSE ansible-compat-25.1.4/test/conftest.py000066400000000000000000000070661475513774500177120ustar00rootroot00000000000000"""Pytest fixtures.""" import importlib.metadata import json import pathlib import subprocess import sys from collections.abc import Callable, Generator from pathlib import Path import pytest from ansible_compat.runtime import Runtime @pytest.fixture # pylint: disable=unused-argument def runtime(scope: str = "session") -> Generator[Runtime, None, None]: # noqa: ARG001 """Isolated runtime fixture.""" instance = Runtime(isolated=True) yield instance instance.clean() @pytest.fixture # pylint: disable=unused-argument def runtime_tmp( tmp_path: pathlib.Path, scope: str = "session", # noqa: ARG001 ) -> Generator[Runtime, None, None]: """Isolated runtime fixture using a temp directory.""" instance = Runtime(project_dir=tmp_path, isolated=True) yield instance instance.clean() def query_pkg_version(pkg: str) -> str: """Get the version of a current installed package. :param pkg: Package name :return: Package version """ return importlib.metadata.version(pkg) @pytest.fixture def pkg_version() -> Callable[[str], str]: """Get the version of a current installed package. :return: Callable function to get package version """ return query_pkg_version class VirtualEnvironment: """Virtualenv wrapper.""" def __init__(self, path: Path) -> None: """Initialize. :param path: Path to virtualenv """ self.project = path self.venv_path = self.project / "venv" self.venv_bin_path = self.venv_path / "bin" self.venv_python_path = self.venv_bin_path / "python" def create(self) -> None: """Create virtualenv.""" cmd = [str(sys.executable), "-m", "venv", str(self.venv_path)] subprocess.check_call(args=cmd) # Install this package into the virtual environment self.install(f"{__file__}/../..") def install(self, *packages: str) -> None: """Install packages in virtualenv. :param packages: Packages to install """ cmd = [str(self.venv_python_path), "-m", "pip", "install", *packages] subprocess.check_call(args=cmd) def python_script_run(self, script: str) -> subprocess.CompletedProcess[str]: """Run command in project dir using venv. :param args: Command to run """ proc = subprocess.run( args=[self.venv_python_path, "-c", script], capture_output=True, cwd=self.project, check=False, text=True, ) return proc def site_package_dirs(self) -> list[Path]: """Get site packages. :return: List of site packages dirs """ script = "import json, site; print(json.dumps(site.getsitepackages()))" proc = subprocess.run( args=[self.venv_python_path, "-c", script], capture_output=True, check=False, text=True, ) dirs = json.loads(proc.stdout) if not isinstance(dirs, list): msg = "Expected list of site packages" raise TypeError(msg) sanitized = list({Path(d).resolve() for d in dirs}) return sanitized @pytest.fixture(scope="module") def venv_module(tmp_path_factory: pytest.TempPathFactory) -> VirtualEnvironment: """Create a virtualenv in a temporary directory. :param tmp_path: pytest fixture for temp path :return: VirtualEnvironment instance """ test_project = tmp_path_factory.mktemp(basename="test_project-", numbered=True) venv_ = VirtualEnvironment(test_project) venv_.create() return venv_ ansible-compat-25.1.4/test/roles/000077500000000000000000000000001475513774500166265ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/acme.missing_deps/000077500000000000000000000000001475513774500222165ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/acme.missing_deps/meta/000077500000000000000000000000001475513774500231445ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/acme.missing_deps/meta/main.yml000066400000000000000000000002101475513774500246040ustar00rootroot00000000000000--- galaxy_info: name: missing_deps namespace: acme description: foo license: GPL min_ansible_version: "2.10" platforms: [] ansible-compat-25.1.4/test/roles/acme.missing_deps/requirements.yml000066400000000000000000000002371475513774500254660ustar00rootroot00000000000000collections: - foo.bar # collection that does not exist, so we can test offline mode roles: - this_role_does_not_exist # and also role that does not exist ansible-compat-25.1.4/test/roles/acme.sample2/000077500000000000000000000000001475513774500210755ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/acme.sample2/meta/000077500000000000000000000000001475513774500220235ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/acme.sample2/meta/main.yml000066400000000000000000000004641475513774500234760ustar00rootroot00000000000000--- dependencies: [] galaxy_info: # role_name is missing in order to test deduction from folder name author: acme description: ACME sample role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-25.1.4/test/roles/ansible-role-sample/000077500000000000000000000000001475513774500224615ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/ansible-role-sample/meta/000077500000000000000000000000001475513774500234075ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/ansible-role-sample/meta/main.yml000066400000000000000000000004031475513774500250530ustar00rootroot00000000000000--- dependencies: [] galaxy_info: role_name: sample author: acme description: ACME sample role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-25.1.4/test/roles/sample3/000077500000000000000000000000001475513774500201725ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/sample3/meta/000077500000000000000000000000001475513774500211205ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/sample3/meta/main.yml000066400000000000000000000004641475513774500225730ustar00rootroot00000000000000--- dependencies: [] galaxy_info: # role_name is missing in order to test deduction from folder name author: acme description: ACME samble role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-25.1.4/test/roles/sample4/000077500000000000000000000000001475513774500201735ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/sample4/meta/000077500000000000000000000000001475513774500211215ustar00rootroot00000000000000ansible-compat-25.1.4/test/roles/sample4/meta/main.yml000066400000000000000000000004641475513774500225740ustar00rootroot00000000000000--- dependencies: [] galaxy_info: # role_name is missing in order to test deduction from folder name author: acme description: ACME samble role company: "ACME LTD" license: MIT min_ansible_version: "2.9" platforms: - name: Debian versions: - any galaxy_tags: - samples ansible-compat-25.1.4/test/test_api.py000066400000000000000000000001461475513774500176650ustar00rootroot00000000000000"""Tests for ansible_compat package.""" def test_placeholder() -> None: """Placeholder test.""" ansible-compat-25.1.4/test/test_config.py000066400000000000000000000055321475513774500203650ustar00rootroot00000000000000"""Tests for ansible_compat.config submodule.""" import copy import subprocess import pytest from _pytest.monkeypatch import MonkeyPatch from packaging.version import Version from ansible_compat.config import ( AnsibleConfig, ansible_version, parse_ansible_version, ) from ansible_compat.errors import InvalidPrerequisiteError, MissingAnsibleError def test_config() -> None: """Checks that config vars are loaded with their expected type.""" config = AnsibleConfig() assert isinstance(config.ACTION_WARNINGS, bool) assert isinstance(config.CACHE_PLUGIN_PREFIX, str) assert isinstance(config.CONNECTION_FACTS_MODULES, dict) assert config.ANSIBLE_COW_PATH is None assert isinstance(config.NETWORK_GROUP_MODULES, list) assert isinstance(config.DEFAULT_FORKS, int | type(None)) # check lowercase and older name aliasing assert isinstance(config.collections_paths, list) assert isinstance(config.collections_path, list) assert config.collections_paths == config.collections_path # check if we can access the special data member assert config.data["ACTION_WARNINGS"] == config.ACTION_WARNINGS with pytest.raises(AttributeError): _ = config.THIS_DOES_NOT_EXIST def test_config_with_dump() -> None: """Tests that config can parse given dumps.""" config = AnsibleConfig(config_dump="ACTION_WARNINGS(default) = True") assert config.ACTION_WARNINGS is True def test_config_copy() -> None: """Checks ability to use copy/deepcopy.""" config = AnsibleConfig() new_config = copy.copy(config) assert isinstance(new_config, AnsibleConfig) assert new_config is not config # deepcopy testing new_config = copy.deepcopy(config) assert isinstance(new_config, AnsibleConfig) assert new_config is not config def test_parse_ansible_version_fail() -> None: """Checks that parse_ansible_version raises an error on invalid input.""" with pytest.raises( InvalidPrerequisiteError, match="Unable to parse ansible cli version", ): parse_ansible_version("foo") def test_ansible_version_missing(monkeypatch: MonkeyPatch) -> None: """Validate ansible_version behavior when ansible is missing.""" monkeypatch.setattr( "subprocess.run", lambda *args, **kwargs: subprocess.CompletedProcess( # noqa: ARG005 args=[], returncode=1, ), ) with pytest.raises( MissingAnsibleError, match=r"Unable to find a working copy of ansible executable.", ): # bypassing lru cache ansible_version.__wrapped__() def test_ansible_version() -> None: """Validate ansible_version behavior.""" assert ansible_version() >= Version("1.0") def test_ansible_version_arg() -> None: """Validate ansible_version behavior.""" assert ansible_version("2.0") >= Version("1.0") ansible-compat-25.1.4/test/test_configuration_example.py000066400000000000000000000006571475513774500235050ustar00rootroot00000000000000"""Sample usage of AnsibleConfig.""" from ansible_compat.config import AnsibleConfig def test_example_config() -> None: """Test basic functionality of AnsibleConfig.""" cfg = AnsibleConfig() assert isinstance(cfg.ACTION_WARNINGS, bool) # you can also use lowercase: assert isinstance(cfg.action_warnings, bool) # you can also use it as dictionary assert cfg["action_warnings"] == cfg.action_warnings ansible-compat-25.1.4/test/test_loaders.py000066400000000000000000000004221475513774500205420ustar00rootroot00000000000000"""Test for ansible_compat.loaders module.""" from pathlib import Path from ansible_compat.loaders import colpath_from_path def test_colpath_from_path() -> None: """Test colpath_from_path non existing path.""" assert colpath_from_path(Path("/foo/bar/")) is None ansible-compat-25.1.4/test/test_prerun.py000066400000000000000000000056341475513774500204360ustar00rootroot00000000000000"""Tests for ansible_compat.prerun module.""" from __future__ import annotations import tempfile from pathlib import Path from typing import TYPE_CHECKING import pytest if TYPE_CHECKING: from _pytest.monkeypatch import MonkeyPatch from ansible_compat.prerun import get_cache_dir def test_get_cache_dir_relative() -> None: """Test behaviors of get_cache_dir.""" relative_path = Path() abs_path = relative_path.resolve() assert get_cache_dir(relative_path) == get_cache_dir(abs_path) def test_get_cache_dir_no_isolation_no_venv(monkeypatch: MonkeyPatch) -> None: """Test behaviors of get_cache_dir. Args: monkeypatch: Pytest fixture for monkeypatching """ monkeypatch.delenv("VIRTUAL_ENV", raising=False) monkeypatch.delenv("ANSIBLE_HOME", raising=False) assert get_cache_dir(Path(), isolated=False) == Path("~/.ansible").expanduser() def test_get_cache_dir_isolation_no_venv(monkeypatch: MonkeyPatch) -> None: """Test behaviors of get_cache_dir. Args: monkeypatch: Pytest fixture for monkeypatching """ monkeypatch.delenv("VIRTUAL_ENV", raising=False) monkeypatch.delenv("ANSIBLE_HOME", raising=False) cache_dir = get_cache_dir(Path(), isolated=True) assert cache_dir == Path().cwd() / ".ansible" def test_get_cache_dir_isolation_no_venv_root(monkeypatch: MonkeyPatch) -> None: """Test behaviors of get_cache_dir. Args: monkeypatch: Pytest fixture for monkeypatching """ monkeypatch.delenv("VIRTUAL_ENV", raising=False) monkeypatch.delenv("ANSIBLE_HOME", raising=False) with ( pytest.warns( UserWarning, match=r"Project directory /.ansible cannot be used for caching as it is not writable.", ), pytest.warns( UserWarning, match=r"Using unique temporary directory .* for caching.", ), ): cache_dir = get_cache_dir(Path("/"), isolated=True) assert cache_dir.as_posix().startswith(tempfile.gettempdir()) def test_get_cache_dir_venv_ro_project_ro(monkeypatch: MonkeyPatch) -> None: """Test behaviors of get_cache_dir with read-only virtual environment and read only project directory. Args: monkeypatch: Pytest fixture for monkeypatching """ monkeypatch.setenv("VIRTUAL_ENV", "/") monkeypatch.delenv("ANSIBLE_HOME", raising=False) with ( pytest.warns( UserWarning, match=r"Using unique temporary directory .* for caching.", ), pytest.warns( UserWarning, match=r"Found VIRTUAL_ENV=/ but we cannot use it for caching as it is not writable.", ), pytest.warns( UserWarning, match=r"Project directory .* cannot be used for caching as it is not writable.", ), ): cache_dir = get_cache_dir(Path("/etc"), isolated=True) assert cache_dir.as_posix().startswith(tempfile.gettempdir()) ansible-compat-25.1.4/test/test_runtime.py000066400000000000000000001042731475513774500206050ustar00rootroot00000000000000"""Tests for Runtime class.""" # pylint: disable=protected-access,too-many-lines from __future__ import annotations import logging import os import pathlib import subprocess from contextlib import contextmanager from pathlib import Path from shutil import rmtree from typing import TYPE_CHECKING, Any import pytest from packaging.version import Version from ansible_compat.constants import INVALID_PREREQUISITES_RC from ansible_compat.errors import ( AnsibleCommandError, AnsibleCompatError, InvalidPrerequisiteError, ) from ansible_compat.runtime import ( CompletedProcess, Runtime, _get_galaxy_role_name, is_url, search_galaxy_paths, ) if TYPE_CHECKING: from collections.abc import Iterator from _pytest.monkeypatch import MonkeyPatch from pytest_mock import MockerFixture def test_runtime_version(runtime: Runtime) -> None: """Tests version property.""" version = runtime.version assert isinstance(version, Version) # tests that caching property value worked (coverage) assert version == runtime.version @pytest.mark.parametrize( "require_module", (True, False), ids=("module-required", "module-unrequired"), ) def test_runtime_version_outdated(require_module: bool) -> None: """Checks that instantiation raises if version is outdated.""" with pytest.raises(RuntimeError, match="Found incompatible version of ansible"): Runtime(min_required_version="9999.9.9", require_module=require_module) def test_runtime_missing_ansible_module(monkeypatch: MonkeyPatch) -> None: """Checks that we produce a RuntimeError when ansible module is missing.""" class RaiseException: """Class to raise an exception.""" def __init__( self, *args: Any, # noqa: ARG002,ANN401 **kwargs: Any, # noqa: ARG002,ANN401 ) -> None: raise ModuleNotFoundError monkeypatch.setattr("importlib.import_module", RaiseException) with pytest.raises(RuntimeError, match=r"Unable to find Ansible python module."): Runtime(require_module=True) def test_runtime_mismatch_ansible_module(monkeypatch: MonkeyPatch) -> None: """Test that missing module is detected.""" monkeypatch.setattr("ansible.release.__version__", "0.0.0", raising=False) with pytest.raises(RuntimeError, match="versions do not match"): Runtime(require_module=True) def test_runtime_require_module() -> None: """Check that require_module successful pass.""" Runtime(require_module=True) # Now we try to set the collection path, something to check if that is # causing an exception, as 2.15 introduced new init code. from ansible.utils.collection_loader import ( # pylint: disable=import-outside-toplevel AnsibleCollectionConfig, ) AnsibleCollectionConfig.playbook_paths = "." # Calling it again in order to see that it does not produce UserWarning: AnsibleCollectionFinder has already been configured # which is done by Ansible core 2.15+. We added special code inside Runtime # that should avoid initializing twice and raise that warning. Runtime(require_module=True) def test_runtime_version_fail_module(mocker: MockerFixture) -> None: """Tests for failure to detect Ansible version.""" patched = mocker.patch( "ansible_compat.runtime.parse_ansible_version", autospec=True, ) patched.side_effect = InvalidPrerequisiteError( "Unable to parse ansible cli version", ) runtime = Runtime() with pytest.raises( InvalidPrerequisiteError, match="Unable to parse ansible cli version", ): _ = runtime.version # pylint: disable=pointless-statement def test_runtime_version_fail_cli(mocker: MockerFixture) -> None: """Tests for failure to detect Ansible version.""" mocker.patch( "ansible_compat.runtime.Runtime.run", return_value=CompletedProcess( ["x"], returncode=123, stdout="oops", stderr="some error", ), autospec=True, ) runtime = Runtime() with pytest.raises( RuntimeError, match=r"Unable to find a working copy of ansible executable.", ): _ = runtime.version # pylint: disable=pointless-statement def test_runtime_prepare_ansible_paths_validation() -> None: """Check that we validate collection_path.""" runtime = Runtime() runtime.config.collections_paths = "invalid-value" # type: ignore[assignment] with pytest.raises(RuntimeError, match="Unexpected ansible configuration"): runtime._prepare_ansible_paths() @pytest.mark.parametrize( ("folder", "role_name", "isolated"), ( ("ansible-role-sample", "acme.sample", True), ("acme.sample2", "acme.sample2", True), ("sample3", "acme.sample3", True), ("sample4", "acme.sample4", False), ), ids=("1", "2", "3", "4"), ) def test_runtime_install_role( caplog: pytest.LogCaptureFixture, folder: str, role_name: str, isolated: bool, ) -> None: """Checks that we can install roles.""" caplog.set_level(logging.INFO) project_dir = Path(__file__).parent / "roles" / folder runtime = Runtime(isolated=isolated, project_dir=project_dir) runtime.prepare_environment(install_local=True) # check that role appears as installed now result = runtime.run(["ansible-galaxy", "list"]) assert result.returncode == 0, result assert role_name in result.stdout if isolated: assert pathlib.Path(f"{runtime.cache_dir}/roles/{role_name}").is_symlink() else: assert pathlib.Path( f"{Path(runtime.config.default_roles_path[0]).expanduser()}/{role_name}", ).is_symlink() runtime.clean() def test_prepare_environment_with_collections(runtime_tmp: Runtime) -> None: """Check that collections are correctly installed.""" runtime_tmp.prepare_environment( required_collections={"community.molecule": "0.1.0"}, install_local=True, ) assert "community.molecule" in runtime_tmp.collections def test_runtime_install_requirements_missing_file() -> None: """Check that missing requirements file is ignored.""" # Do not rely on this behavior, it may be removed in the future runtime = Runtime() runtime.install_requirements(Path("/that/does/not/exist")) @pytest.mark.parametrize( ("file", "exc", "msg"), ( ( Path("/dev/null"), InvalidPrerequisiteError, "file is not a valid Ansible requirements file", ), ( Path(__file__).parent / "assets" / "requirements-invalid-collection.yml", AnsibleCommandError, "Got 1 exit code while running: ansible-galaxy", ), ( Path(__file__).parent / "assets" / "requirements-invalid-role.yml", AnsibleCommandError, "Got 1 exit code while running: ansible-galaxy", ), ), ids=("empty", "invalid-collection", "invalid-role"), ) def test_runtime_install_requirements_invalid_file( file: Path, exc: type[Any], msg: str, ) -> None: """Check that invalid requirements file is raising.""" runtime = Runtime() with pytest.raises( exc, match=msg, ): runtime.install_requirements(file) @contextmanager def cwd(path: Path) -> Iterator[None]: """Context manager for temporary changing current working directory.""" old_pwd = Path.cwd() os.chdir(path) try: yield finally: os.chdir(old_pwd) def test_prerun_reqs_v1(caplog: pytest.LogCaptureFixture) -> None: """Checks that the linter can auto-install requirements v1 when found.""" path = Path(__file__).parent.parent / "examples" / "reqs_v1" runtime = Runtime(project_dir=path, verbosity=1) with cwd(path): runtime.prepare_environment() assert any( msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages ) assert all( "Running ansible-galaxy collection install" not in msg for msg in caplog.messages ) def test_prerun_reqs_v2(caplog: pytest.LogCaptureFixture) -> None: """Checks that the linter can auto-install requirements v2 when found.""" path = (Path(__file__).parent.parent / "examples" / "reqs_v2").resolve() runtime = Runtime(project_dir=path, verbosity=1) with cwd(path): runtime.prepare_environment() assert any( msg.startswith("Running ansible-galaxy role install") for msg in caplog.messages ) assert any( msg.startswith("Running ansible-galaxy collection install") for msg in caplog.messages ) def test_prerun_reqs_broken() -> None: """Checks that the we report invalid requirements.yml file.""" path = (Path(__file__).parent.parent / "examples" / "reqs_broken").resolve() runtime = Runtime(project_dir=path, verbosity=1) with cwd(path), pytest.raises(InvalidPrerequisiteError): runtime.prepare_environment() def test__update_env_no_old_value_no_default_no_value(monkeypatch: MonkeyPatch) -> None: """Make sure empty value does not touch environment.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", []) assert "DUMMY_VAR" not in runtime.environ def test__update_env_no_old_value_no_value(monkeypatch: MonkeyPatch) -> None: """Make sure empty value does not touch environment.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", [], "a:b") assert "DUMMY_VAR" not in runtime.environ def test__update_env_no_default_no_value(monkeypatch: MonkeyPatch) -> None: """Make sure empty value does not touch environment.""" monkeypatch.setenv("DUMMY_VAR", "a:b") runtime = Runtime() runtime._update_env("DUMMY_VAR", []) assert runtime.environ["DUMMY_VAR"] == "a:b" @pytest.mark.parametrize( ("value", "result"), ( (["a"], "a"), (["a", "b"], "a:b"), (["a", "b", "c"], "a:b:c"), ), ) def test__update_env_no_old_value_no_default( monkeypatch: MonkeyPatch, value: list[str], result: str, ) -> None: """Values are concatenated using : as the separator.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", value) assert runtime.environ["DUMMY_VAR"] == result @pytest.mark.parametrize( ("default", "value", "result"), ( ("a:b", ["c"], "c:a:b"), ("a:b", ["c:d"], "c:d:a:b"), ), ) def test__update_env_no_old_value( monkeypatch: MonkeyPatch, default: str, value: list[str], result: str, ) -> None: """Values are appended to default value.""" monkeypatch.delenv("DUMMY_VAR", raising=False) runtime = Runtime() runtime._update_env("DUMMY_VAR", value, default) assert runtime.environ["DUMMY_VAR"] == result @pytest.mark.parametrize( ("old_value", "value", "result"), ( ("a:b", ["c"], "c:a:b"), ("a:b", ["c:d"], "c:d:a:b"), ), ) def test__update_env_no_default( monkeypatch: MonkeyPatch, old_value: str, value: list[str], result: str, ) -> None: """Values are appended to preexisting value.""" monkeypatch.setenv("DUMMY_VAR", old_value) runtime = Runtime() runtime._update_env("DUMMY_VAR", value) assert runtime.environ["DUMMY_VAR"] == result @pytest.mark.parametrize( ("old_value", "default", "value", "result"), ( ("", "", ["e"], "e"), ("a", "", ["e"], "e:a"), ("", "c", ["e"], "e"), ("a", "c", ["e:f"], "e:f:a"), ), ) def test__update_env( monkeypatch: MonkeyPatch, old_value: str, default: str, # pylint: disable=unused-argument # noqa: ARG001 value: list[str], result: str, ) -> None: """Defaults are ignored when preexisting value is present.""" monkeypatch.setenv("DUMMY_VAR", old_value) runtime = Runtime() runtime._update_env("DUMMY_VAR", value) assert runtime.environ["DUMMY_VAR"] == result def test_require_collection_wrong_version(runtime: Runtime) -> None: """Tests behavior of require_collection.""" subprocess.check_output( [ "ansible-galaxy", "collection", "install", "examples/reqs_v2/community-molecule-0.1.0.tar.gz", "-p", "~/.ansible/collections", ], ) with pytest.raises(InvalidPrerequisiteError) as pytest_wrapped_e: runtime.require_collection("community.molecule", "9999.9.9") assert pytest_wrapped_e.type == InvalidPrerequisiteError assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC def test_require_collection_invalid_name(runtime: Runtime) -> None: """Check that require_collection raise with invalid collection name.""" with pytest.raises( InvalidPrerequisiteError, match="Invalid collection name supplied:", ): runtime.require_collection("that-is-invalid") def test_require_collection_invalid_collections_path(runtime: Runtime) -> None: """Check that require_collection raise with invalid collections path.""" runtime.config.collections_paths = "/that/is/invalid" # type: ignore[assignment] with pytest.raises( InvalidPrerequisiteError, match="Unable to determine ansible collection paths", ): runtime.require_collection("community.molecule") def test_require_collection_preexisting_broken(runtime_tmp: Runtime) -> None: """Check that require_collection raise with broken pre-existing collection.""" dest_path: str = runtime_tmp.config.collections_paths[0] dest = pathlib.Path(dest_path) / "ansible_collections" / "foo" / "bar" dest.mkdir(parents=True, exist_ok=True) with pytest.raises(InvalidPrerequisiteError, match=r"missing MANIFEST.json"): runtime_tmp.require_collection("foo.bar") def test_require_collection_install(runtime_tmp: Runtime) -> None: """Check that require collection successful install case, including upgrade path.""" runtime_tmp.install_collection("ansible.posix:==1.5.2") runtime_tmp.load_collections() collection = runtime_tmp.collections["ansible.posix"] assert collection.version == "1.5.2" runtime_tmp.require_collection(name="ansible.posix", version="1.5.4", install=True) runtime_tmp.load_collections() collection = runtime_tmp.collections["ansible.posix"] assert Version(collection.version) >= Version("1.5.4") @pytest.mark.parametrize( ("name", "version", "install"), ( ("fake_namespace.fake_name", None, True), ("fake_namespace.fake_name", "9999.9.9", True), ("fake_namespace.fake_name", None, False), ), ids=("a", "b", "c"), ) def test_require_collection_missing( name: str, version: str, install: bool, runtime: Runtime, ) -> None: """Tests behavior of require_collection, missing case.""" with pytest.raises(AnsibleCompatError) as pytest_wrapped_e: runtime.require_collection(name=name, version=version, install=install) assert pytest_wrapped_e.type == InvalidPrerequisiteError assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC def test_install_collection(runtime: Runtime) -> None: """Check that valid collection installs do not fail.""" runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz") def test_install_collection_git(runtime: Runtime) -> None: """Check that valid collection installs do not fail.""" runtime.install_collection( "git+https://github.com/ansible-collections/ansible.posix,main", ) def test_install_collection_dest(runtime: Runtime, tmp_path: pathlib.Path) -> None: """Check that valid collection to custom destination passes.""" # Since Ansible 2.15.3 there is no guarantee that this will install the collection at requested path # as it might decide to not install anything if requirement is already present at another location. runtime.install_collection( "examples/reqs_v2/community-molecule-0.1.0.tar.gz", destination=tmp_path, ) runtime.load_collections() for collection in runtime.collections: if collection == "community.molecule": return msg = "Failed to find collection as installed." raise AssertionError(msg) def test_install_collection_fail(runtime: Runtime) -> None: """Check that invalid collection install fails.""" with pytest.raises(AnsibleCompatError) as pytest_wrapped_e: runtime.install_collection("community.molecule:>=9999.0") assert pytest_wrapped_e.type == InvalidPrerequisiteError assert pytest_wrapped_e.value.code == INVALID_PREREQUISITES_RC def test_install_galaxy_role(runtime_tmp: Runtime) -> None: """Check install role with empty galaxy file.""" pathlib.Path(f"{runtime_tmp.project_dir}/galaxy.yml").touch() pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").touch() # this should only raise a warning runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1) # this should test the bypass role name check path runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2) # this should raise an error with pytest.raises( InvalidPrerequisiteError, match="does not follow current galaxy requirements", ): runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=0) def test_install_galaxy_role_unlink( caplog: pytest.LogCaptureFixture, ) -> None: """Test ability to unlink incorrect symlinked roles.""" runtime_tmp = Runtime(verbosity=1, isolated=True) runtime_tmp.prepare_environment() assert runtime_tmp.cache_dir is not None pathlib.Path(f"{runtime_tmp.cache_dir}/roles").mkdir(parents=True, exist_ok=True) roledir = pathlib.Path(f"{runtime_tmp.cache_dir}/roles/acme.get_rich") if not roledir.exists(): roledir.symlink_to("/dev/null") pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir(exist_ok=True) pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: get_rich namespace: acme """, encoding="utf-8", ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir) assert "symlink to current repository" in caplog.text pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").unlink() def test_install_galaxy_role_bad_namespace(runtime_tmp: Runtime) -> None: """Check install role with bad namespace in galaxy info.""" pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: foo author: bar namespace: ["xxx"] """, encoding="utf-8", ) # this should raise an error regardless the role_name_check value with pytest.raises(AnsibleCompatError, match="Role namespace must be string, not"): runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1) def test_install_galaxy_role_no_meta(runtime_tmp: Runtime) -> None: """Check install role with missing meta/main.yml.""" # This should fail because meta/main.yml is missing with pytest.raises( FileNotFoundError, match=f"No such file or directory: '{runtime_tmp.project_dir.absolute()}/meta/main.yaml'", ): runtime_tmp._install_galaxy_role(runtime_tmp.project_dir) # But ignore_errors will return without doing anything runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, ignore_errors=True) @pytest.mark.parametrize( "galaxy_info", ( """galaxy_info: role_name: foo-bar namespace: acme """, """galaxy_info: role_name: foo-bar """, ), ids=("bad-name", "bad-name-without-namespace"), ) def test_install_galaxy_role_name_role_name_check_equals_to_1( runtime_tmp: Runtime, galaxy_info: str, caplog: pytest.LogCaptureFixture, ) -> None: """Check install role with bad role name in galaxy info.""" caplog.set_level(logging.WARNING) pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( galaxy_info, encoding="utf-8", ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=1) assert "Computed fully qualified role name of " in caplog.text def test_install_galaxy_role_no_checks(runtime_tmp: Runtime) -> None: """Check install role with bad namespace in galaxy info.""" runtime_tmp.prepare_environment() pathlib.Path(f"{runtime_tmp.project_dir}/meta").mkdir() pathlib.Path(f"{runtime_tmp.project_dir}/meta/main.yml").write_text( """galaxy_info: role_name: foo author: bar namespace: acme """, encoding="utf-8", ) runtime_tmp._install_galaxy_role(runtime_tmp.project_dir, role_name_check=2) result = runtime_tmp.run(["ansible-galaxy", "list"]) assert "- acme.foo," in result.stdout assert result.returncode == 0, result def test_upgrade_collection(runtime_tmp: Runtime) -> None: """Check that collection upgrade is possible.""" # ensure that we inject our tmp folders in ansible paths runtime_tmp.prepare_environment() # we install specific outdated version of a collection runtime_tmp.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz") with pytest.raises( InvalidPrerequisiteError, match=r"Found community.molecule collection 0.1.0 but 9.9.9 or newer is required.", ): # we check that when install=False, we raise error runtime_tmp.require_collection("community.molecule", "9.9.9", install=False) # this should not fail, as we have this version runtime_tmp.require_collection("community.molecule", "0.1.0") def test_require_collection_not_isolated() -> None: """Check require_collection without a cache directory.""" runtime = Runtime(isolated=False) runtime.require_collection("community.molecule", "0.1.0", install=True) def test_runtime_env_ansible_library(monkeypatch: MonkeyPatch) -> None: """Verify that custom path specified using ANSIBLE_LIBRARY is not lost.""" path_name = "foo" monkeypatch.setenv("ANSIBLE_LIBRARY", path_name) path_name = os.path.realpath(path_name) runtime = Runtime() runtime.prepare_environment() assert path_name in runtime.config.default_module_path @pytest.mark.parametrize( ("lower", "upper", "expected"), ( ("1.0", "9999.0", True), (None, "9999.0", True), ("1.0", None, True), ("9999.0", None, False), (None, "1.0", False), ), ids=("1", "2", "3", "4", "5"), ) def test_runtime_version_in_range( lower: str | None, upper: str | None, expected: bool, ) -> None: """Validate functioning of version_in_range.""" runtime = Runtime() assert runtime.version_in_range(lower=lower, upper=upper) is expected @pytest.mark.parametrize( ("path", "scenario", "expected_collections"), ( pytest.param( "test/collections/acme.goodies", "default", [ "ansible.posix", # from tests/requirements.yml "ansible.utils", # from galaxy.yml "community.molecule", # from galaxy.yml "community.crypto", # from galaxy.yml as a git dependency ], id="normal", ), pytest.param( "test/collections/acme.goodies/roles/baz", "deep_scenario", ["community.molecule"], id="deep", ), ), ) def test_install_collection_from_disk( path: str, scenario: str, expected_collections: list[str], ) -> None: """Tests ability to install a local collection.""" # ensure we do not have acme.goodies installed in user directory as it may # produce false positives rmtree( pathlib.Path( "~/.ansible/collections/ansible_collections/acme/goodies", ).expanduser(), ignore_errors=True, ) with cwd(Path(path)): runtime = Runtime(isolated=True) # this should call install_collection_from_disk(".") runtime.prepare_environment(install_local=True) # that molecule converge playbook can be used without molecule and # should validate that the installed collection is available. result = runtime.run(["ansible-playbook", f"molecule/{scenario}/converge.yml"]) assert result.returncode == 0, result.stdout runtime.load_collections() for collection_name in expected_collections: assert ( collection_name in runtime.collections ), f"{collection_name} not found in {runtime.collections.keys()}" runtime.clean() @pytest.mark.parametrize( ("path", "expected_plugins"), ( pytest.param( "test/collections/acme.goodies", [ "ansible.posix.patch", # from tests/requirements.yml "community.crypto.acme_account", # from galaxy.yml as a git dependency ], id="modules", ), ), ) def test_load_plugins( path: str, expected_plugins: list[str], ) -> None: """Tests ability to load plugin from a collection installed by requirement.""" with cwd(Path(path)): runtime = Runtime(isolated=True, require_module=True) runtime.prepare_environment(install_local=True) for plugin_name in expected_plugins: assert ( plugin_name in runtime.plugins.module ), f"Unable to load module {plugin_name}" runtime.clean() def test_install_collection_from_disk_fail() -> None: """Tests that we fail to install a broken collection.""" with cwd(Path("test/collections/acme.broken")): runtime = Runtime(isolated=True) with pytest.raises(RuntimeError) as exc_info: runtime.prepare_environment(install_local=True) # based on version of Ansible used, we might get a different error, # but both errors should be considered acceptable assert exc_info.type in { RuntimeError, AnsibleCompatError, AnsibleCommandError, InvalidPrerequisiteError, } assert exc_info.match( "(is missing the following mandatory|Got 1 exit code while running: ansible-galaxy collection build)", ) def test_load_collections_failure(mocker: MockerFixture) -> None: """Tests for ansible-galaxy erroring.""" mocker.patch( "ansible_compat.runtime.Runtime.run", return_value=CompletedProcess( ["x"], returncode=1, stdout="There was an error", stderr="This is the error", ), autospec=True, ) runtime = Runtime() with pytest.raises(RuntimeError, match="Unable to list collections: "): runtime.load_collections() @pytest.mark.parametrize( "value", ("[]", '{"path": "bad data"}', '{"path": {"ansible.posix": 123}}'), ids=["list", "malformed_collection", "bad_collection_data"], ) def test_load_collections_garbage(value: str, mocker: MockerFixture) -> None: """Tests for ansible-galaxy returning bad data.""" mocker.patch( "ansible_compat.runtime.Runtime.run", return_value=CompletedProcess( ["x"], returncode=0, stdout=value, stderr="", ), autospec=True, ) runtime = Runtime() with pytest.raises(TypeError, match="Unexpected collection data, "): runtime.load_collections() @pytest.mark.parametrize( "value", ("", '{"path": {123: 456}}'), ids=["nothing", "bad_collection_name"], ) def test_load_collections_invalid_json(value: str, mocker: MockerFixture) -> None: """Tests for ansible-galaxy returning bad data.""" mocker.patch( "ansible_compat.runtime.Runtime.run", return_value=CompletedProcess( ["x"], returncode=0, stdout=value, stderr="", ), autospec=True, ) runtime = Runtime() with pytest.raises( RuntimeError, match=f"Unable to parse galaxy output as JSON: {value}", ): runtime.load_collections() def test_prepare_environment_offline_role(caplog: pytest.LogCaptureFixture) -> None: """Ensure that we can make use of offline roles.""" with cwd(Path("test/roles/acme.missing_deps")): runtime = Runtime(isolated=True) runtime.prepare_environment(install_local=True, offline=True) assert ( "Skipped installing old role dependencies due to running in offline mode." in caplog.text ) assert ( "Skipped installing collection dependencies due to running in offline mode." in caplog.text ) def test_runtime_run(runtime: Runtime) -> None: """Check if tee and non tee mode return same kind of results.""" result1 = runtime.run(["seq", "10"]) result2 = runtime.run(["seq", "10"], tee=True) assert result1.returncode == result2.returncode assert result1.stderr == result2.stderr assert result1.stdout == result2.stdout def test_runtime_exec_cwd(runtime: Runtime) -> None: """Check if passing cwd works as expected.""" path = Path("/") result1 = runtime.run(["pwd"], cwd=path) result2 = runtime.run(["pwd"]) assert result1.stdout.rstrip() == str(path) assert result1.stdout != result2.stdout def test_runtime_exec_env(runtime: Runtime) -> None: """Check if passing env works.""" result = runtime.run(["printenv", "FOO"]) assert not result.stdout result = runtime.run(["printenv", "FOO"], env={"FOO": "bar"}) assert result.stdout.rstrip() == "bar" runtime.environ["FOO"] = "bar" result = runtime.run(["printenv", "FOO"]) assert result.stdout.rstrip() == "bar" def test_runtime_plugins(runtime: Runtime) -> None: """Tests ability to access detected plugins.""" assert len(runtime.plugins.cliconf) == 0 # ansible.netcommon.restconf might be in httpapi assert isinstance(runtime.plugins.httpapi, dict) # "ansible.netcommon.default" might be in runtime.plugins.netconf assert isinstance(runtime.plugins.netconf, dict) assert isinstance(runtime.plugins.role, dict) assert "become" in runtime.plugins.keyword assert "ansible.builtin.sudo" in runtime.plugins.become assert "ansible.builtin.memory" in runtime.plugins.cache assert "ansible.builtin.default" in runtime.plugins.callback assert "ansible.builtin.local" in runtime.plugins.connection assert "ansible.builtin.ini" in runtime.plugins.inventory assert "ansible.builtin.env" in runtime.plugins.lookup assert "ansible.builtin.sh" in runtime.plugins.shell assert "ansible.builtin.host_group_vars" in runtime.plugins.vars assert "ansible.builtin.file" in runtime.plugins.module assert "ansible.builtin.free" in runtime.plugins.strategy assert "ansible.builtin.is_abs" in runtime.plugins.test assert "ansible.builtin.bool" in runtime.plugins.filter @pytest.mark.parametrize( ("path", "result"), ( pytest.param( Path("test/assets/galaxy_paths"), [Path("test/assets/galaxy_paths/foo/galaxy.yml").resolve()], id="1", ), pytest.param( Path("test/collections"), [], # should find nothing because these folders are not valid namespaces id="2", ), pytest.param( Path("test/assets/galaxy_paths/foo"), [Path("test/assets/galaxy_paths/foo/galaxy.yml").resolve()], id="3", ), ), ) def test_galaxy_path(path: Path, result: list[Path]) -> None: """Check behavior of galaxy path search.""" assert search_galaxy_paths(path) == result @pytest.mark.parametrize( ("name", "result"), ( pytest.param( "foo", False, id="0", ), pytest.param( "git+git", True, id="1", ), pytest.param( "git@acme.com", True, id="2", ), ), ) def test_is_url(name: str, result: bool) -> None: """Checks functionality of is_url.""" assert is_url(name) == result @pytest.mark.parametrize( ("dest", "message"), ( ("/invalid/destination", "Collection is symlinked, but not pointing to"), (Path.cwd(), "Found symlinked collection, skipping its installation."), ), ids=["broken", "valid"], ) def test_prepare_environment_symlink( dest: str | Path, message: str, caplog: pytest.LogCaptureFixture, ) -> None: """Ensure avalid symlinks to collections are properly detected.""" project_dir = Path(__file__).parent / "collections" / "acme.minimal" runtime = Runtime(isolated=True, project_dir=project_dir) assert runtime.cache_dir acme = runtime.cache_dir / "collections" / "ansible_collections" / "acme" acme.mkdir(parents=True, exist_ok=True) goodies = acme / "minimal" rmtree(goodies, ignore_errors=True) goodies.unlink(missing_ok=True) goodies.symlink_to(dest) runtime.prepare_environment(install_local=True) assert message in caplog.text def test_get_galaxy_role_name_invalid() -> None: """Verifies that function returns empty string on invalid input.""" galaxy_infos = { "role_name": False, # <-- invalid data, should be string } assert not _get_galaxy_role_name(galaxy_infos) def test_runtime_has_playbook() -> None: """Tests has_playbook method.""" runtime = Runtime(require_module=True) runtime.prepare_environment( required_collections={"community.molecule": "0.1.0"}, install_local=True, ) assert not runtime.has_playbook("this-does-not-exist.yml") # call twice to ensure cache is used: assert not runtime.has_playbook("this-does-not-exist.yml") assert not runtime.has_playbook("this-does-not-exist.yml", basedir=Path()) # this is part of community.molecule collection assert runtime.has_playbook("community.molecule.validate.yml") def test_runtime_exception(monkeypatch: pytest.MonkeyPatch) -> None: """Asserts that we raise a runtime exception if unsupported environment variable is detected.""" monkeypatch.setenv("ANSIBLE_COLLECTIONS_PATHS", "foo") with pytest.raises( RuntimeError, match=r"ANSIBLE_COLLECTIONS_PATHS was detected, replace it with ANSIBLE_COLLECTIONS_PATH to continue.", ): Runtime() ansible-compat-25.1.4/test/test_runtime_example.py000066400000000000000000000016471475513774500223210ustar00rootroot00000000000000"""Sample use of Runtime class.""" from ansible_compat.runtime import Runtime def test_runtime_example() -> None: """Test basic functionality of Runtime class.""" # instantiate the runtime using isolated mode, so installing new # roles/collections do not pollute the default setup. runtime = Runtime(isolated=True, max_retries=3) # Print Ansible core version _ = runtime.version # 2.9.10 (Version object) # Get configuration info from runtime _ = runtime.config.collections_path # Detect if current project is a collection and install its requirements runtime.prepare_environment(install_local=True) # will retry 3 times if needed # Install a new collection (will retry 3 times if needed) runtime.install_collection("examples/reqs_v2/community-molecule-0.1.0.tar.gz") # Execute a command result = runtime.run(["ansible-doc", "--list"]) assert result.returncode == 0 ansible-compat-25.1.4/test/test_runtime_scan_path.py000066400000000000000000000103671475513774500226250ustar00rootroot00000000000000"""Test the scan path functionality of the runtime.""" import json import os import subprocess import textwrap from pathlib import Path import pytest from _pytest.monkeypatch import MonkeyPatch from ansible_compat.runtime import Runtime from .conftest import VirtualEnvironment V2_COLLECTION_TARBALL = Path("examples/reqs_v2/community-molecule-0.1.0.tar.gz") V2_COLLECTION_NAMESPACE = "community" V2_COLLECTION_NAME = "molecule" V2_COLLECTION_VERSION = "0.1.0" V2_COLLECTION_FULL_NAME = f"{V2_COLLECTION_NAMESPACE}.{V2_COLLECTION_NAME}" @pytest.mark.parametrize( ("scan", "raises_not_found"), ( pytest.param(False, True, id="disabled"), pytest.param(True, False, id="enabled"), ), ids=str, ) def test_scan_sys_path( venv_module: VirtualEnvironment, monkeypatch: MonkeyPatch, tmp_path: Path, scan: bool, raises_not_found: bool, ) -> None: """Confirm sys path is scanned for collections. Args: venv_module: Fixture for a virtual environment monkeypatch: Fixture for monkeypatching tmp_path: Fixture for a temporary directory scan: Whether to scan the sys path raises_not_found: Whether the collection is expected to be found """ # Isolated the test from the others, so ansible will not find collections # that might be installed by other tests. monkeypatch.setenv("VIRTUAL_ENV", venv_module.project.as_posix()) monkeypatch.setenv("ANSIBLE_HOME", tmp_path.as_posix()) # Set the sys scan path environment variable monkeypatch.setenv("ANSIBLE_COLLECTIONS_SCAN_SYS_PATH", str(scan)) # Set the ansible collections paths to avoid bleed from other tests monkeypatch.setenv("ANSIBLE_COLLECTIONS_PATH", str(tmp_path)) runtime_tmp = Runtime(project_dir=tmp_path, isolated=True) first_site_package_dir = venv_module.site_package_dirs()[0] installed_to = ( first_site_package_dir / "ansible_collections" / V2_COLLECTION_NAMESPACE / V2_COLLECTION_NAME ) if not installed_to.exists(): # Install the collection into the venv site packages directory, force # as of yet this test is not isolated from the rest of the system runtime_tmp.install_collection( collection=V2_COLLECTION_TARBALL, destination=first_site_package_dir, force=True, ) # Confirm the collection is installed assert installed_to.exists() script = textwrap.dedent( f""" import json; from ansible_compat.runtime import Runtime; r = Runtime(); fv, cp = r.require_collection(name="{V2_COLLECTION_FULL_NAME}", version="{V2_COLLECTION_VERSION}", install=False); print(json.dumps({{"found_version": str(fv), "collection_path": str(cp)}})); """, ) proc = venv_module.python_script_run(script) if raises_not_found: assert proc.returncode != 0, (proc.stdout, proc.stderr) assert "InvalidPrerequisiteError" in proc.stderr assert "'community.molecule' not found" in proc.stderr else: assert proc.returncode == 0, (proc.stdout, proc.stderr) result = json.loads(proc.stdout) assert result["found_version"] == V2_COLLECTION_VERSION assert result["collection_path"] == str(installed_to) runtime_tmp.clean() def test_ro_venv() -> None: """Tests behavior when the virtual environment is read-only. See Related https://github.com/ansible/ansible-compat/pull/470 """ tox_work_dir = os.environ.get("TOX_WORK_DIR", ".tox") venv_path = f"{tox_work_dir}/ro" commands = [ f"mkdir -p {venv_path}", f"chmod -R a+w {venv_path}", f"python -m venv --symlinks {venv_path}", f"{venv_path}/bin/python -m pip install -q -e .", f"chmod -R a-w {venv_path}", f"{venv_path}/bin/python -c \"from ansible_compat.runtime import Runtime; r = Runtime(); r.install_collection('ansible.posix:>=2.0.0')\"", ] for cmd in commands: result = subprocess.run( # noqa: S602 cmd, check=False, shell=True, text=True, capture_output=True, ) assert ( result.returncode == 0 ), f"Got {result.returncode} running {cmd}\n\tstderr: {result.stderr}\n\tstdout: {result.stdout}" ansible-compat-25.1.4/test/test_schema.py000066400000000000000000000052621475513774500203600ustar00rootroot00000000000000"""Tests for schema utilities.""" from __future__ import annotations import json from pathlib import Path from typing import TYPE_CHECKING, Any import pytest from ansible_compat.schema import JsonSchemaError, json_path, validate if TYPE_CHECKING: from ansible_compat.types import JSON expected_results = [ JsonSchemaError( message="False is not of type 'string'", data_path="environment.a", json_path="$.environment.a", schema_path="properties.environment.additionalProperties.type", relative_schema='{"type": "string"}', expected="string", validator="type", found="False", ), JsonSchemaError( message="True is not of type 'string'", data_path="environment.b", json_path="$.environment.b", schema_path="properties.environment.additionalProperties.type", relative_schema='{"type": "string"}', expected="string", validator="type", found="True", ), ] def json_from_asset(file_name: str) -> JSON: """Load a json file from disk.""" file = Path(__file__).parent / file_name with file.open(encoding="utf-8") as f: return json.load(f) # type: ignore[no-any-return] def jsonify(data: Any) -> JSON: # noqa: ANN401 """Convert object in JSON data structure.""" return json.loads(json.dumps(data, default=vars, sort_keys=True)) # type: ignore[no-any-return] @pytest.mark.parametrize("index", range(1)) def test_schema(index: int) -> None: """Test the schema validator.""" schema = json_from_asset(f"assets/validate{index}_schema.json") data = json_from_asset(f"assets/validate{index}_data.json") expected = json_from_asset(f"assets/validate{index}_expected.json") # ensure we produce consistent results between runs for _ in range(1, 100): found_errors = validate(schema=schema, data=data) # ensure returned results are already sorted, as we assume our class # knows how to sort itself assert sorted(found_errors) == found_errors, "multiple errors not sorted" found_errors_json = jsonify(found_errors) assert ( found_errors_json == expected ), f"inconsistent returns: {found_errors_json}" def test_json_path() -> None: """Test json_path function.""" assert json_path(["a", 1, "b"]) == "$.a[1].b" def test_validate_invalid_schema() -> None: """Test validate function error handling.""" schema = "[]" data = json_from_asset("assets/validate0_data.json") errors = validate(schema, data) assert len(errors) == 1 assert ( errors[0].to_friendly() == "In 'schema sanity check': Invalid schema, must be a mapping." ) ansible-compat-25.1.4/test/test_types.py000066400000000000000000000003221475513774500202540ustar00rootroot00000000000000"""Tests for types module.""" import ansible_compat.types def test_types() -> None: """Tests that JSON types are exported.""" assert ansible_compat.types.JSON assert ansible_compat.types.JSON_ro ansible-compat-25.1.4/test/test_version.py000066400000000000000000000010161475513774500205760ustar00rootroot00000000000000"""Tests for _version module.""" def test_version_module() -> None: """Tests that _version exports are present.""" # import kept here to allow mypy/pylint to run when module is not installed # and the generated _version.py is missing. # pylint: disable=no-name-in-module,no-member import ansible_compat._version # type: ignore[import-not-found,unused-ignore] assert ansible_compat._version.__version__ assert ansible_compat._version.__version_tuple__ assert ansible_compat._version.version ansible-compat-25.1.4/tools/000077500000000000000000000000001475513774500156635ustar00rootroot00000000000000ansible-compat-25.1.4/tools/get-version.sh000077500000000000000000000005711475513774500204670ustar00rootroot00000000000000#!/bin/bash set -e { python3 -c "import setuptools_scm" >/dev/null || { if [[ "$VIRTUAL_ENV" != "" ]]; then PIPARGS="" else PIPARGS="--user" fi python3 -m pip install $PIPARGS setuptools-scm } } 1>&2 # redirect stdout to stderr to avoid polluting the output python3 -m setuptools_scm | \ sed 's/Guessed Version\([^+]\+\).*/\1/' ansible-compat-25.1.4/tools/smoke.py000077500000000000000000000031451475513774500173610ustar00rootroot00000000000000#!python3 """Runs downstream projects tests with current code from compat injected in them.""" import hashlib import logging import os import tempfile from pathlib import Path from subprocess import run # noqa: S404 logging.basicConfig( level=logging.DEBUG, format="%(levelname)s: %(message)s", ) logger = logging.getLogger() parent_project_dir = Path(__file__).parent.parent.resolve().as_posix() checksum = hashlib.sha256(parent_project_dir.encode("utf-8")).hexdigest()[:4] tmp_path = Path(tempfile.gettempdir()) / f"ansible-compat-smoke-{checksum}" logger.info("Using %s temporary directory...", tmp_path) for project in ("molecule", "ansible-lint"): logger.info("Running tests for %s", project) project_dir = tmp_path / project if (project_dir / ".git").exists(): run(["git", "-C", project_dir, "pull"], check=True) else: project_dir.mkdir(parents=True, exist_ok=True) run( [ "git", "clone", "--recursive", f"https://github.com/ansible/{project}", project_dir, ], check=True, ) os.chdir(project_dir) venv_dir = (project_dir / ".venv").as_posix() os.environ["VIRTUAL_ENV"] = venv_dir run( ["uv", "venv", "--seed", venv_dir], check=True, ) # creates .venv (implicit for next commands) run( ["uv", "pip", "install", "-e", f"{parent_project_dir}[test]", "-e", ".[test]"], check=True, ) run(["uv", "pip", "freeze"], check=True) run(["uv", "run", "pytest", "-v", "-n", "auto"], check=True) ansible-compat-25.1.4/tools/update-spec.sh000077500000000000000000000012721475513774500204360ustar00rootroot00000000000000#!/bin/bash DIR=$(dirname "$0") VERSION=$(./tools/get-version.sh) mkdir -p "${DIR}/../dist" sed -e "s/VERSION_PLACEHOLDER/${VERSION}/" \ "${DIR}/../.config/python3-ansible-compat.spec" \ > "${DIR}/../dist/python3-ansible-compat.spec" export LC_ALL=en_US.UTF-8 CHANGELOG=$(git log -n 20 --pretty="* %ad %an %ae \n- %s\n" --date=format:"%a %b %d %Y") NUM=$(grep -nr "%changelog" ${DIR}/../dist/python3-ansible-compat.spec|awk -F':' '{print $1}') let NUM_START=$NUM+1 NUM_END=$(awk '{print NR}' ${DIR}/../dist/pytho3n-ansible-compat.spec|tail -n1) sed -i "${NUM_START},${NUM_END}d" ${DIR}/../dist/python3-ansible-compat.spec echo -e "$CHANGELOG" >> ${DIR}/../dist/python3-ansible-compat.spec ansible-compat-25.1.4/tox.ini000066400000000000000000000123231475513774500160370ustar00rootroot00000000000000[tox] envlist = lint pkg docs py py-devel py310-ansible216 py310-ansible217 py311-ansible216 py311-ansible217 py312-ansible216 py312-ansible217 py312-ansible218 py313-ansible218 isolated_build = true skip_missing_interpreters = True requires = tox >= 4.24.1 tox-uv >= 1.20.1 setuptools >= 65.3.0 # editable installs [testenv] description = Run the tests devel: ansible devel branch ansible216: ansible-core 2.16 ansible217: ansible-core 2.17 ansible218: ansible-core 2.18 deps = ansible216: ansible-core>=2.16,<2.17 ansible217: ansible-core>=2.17,<2.18 ansible218: ansible-core>=2.18,<2.19 devel: ansible-core @ git+https://github.com/ansible/ansible.git@devel # GPLv3+ # avoid installing ansible-core on -devel envs: !devel: ansible-core extras = test commands_pre = # safety measure to assure we do not accidentally run tests with broken dependencies !{docs}: {envpython} -m pip check # cleaning needed to prevent errors between runs sh -c "rm -f {envdir}/.coverage.* 2>/dev/null || true" commands = sh -c "ansible --version | head -n 1" # We add coverage options but not making them mandatory as we do not want to force # pytest users to run coverage when they just want to run a single test with `pytest -k test` coverage run -m pytest {posargs:} # needed for upload to codecov.io {py,py310,py311,py312,py313}: sh -c "coverage combine -q --data-file={envdir}/.coverage {envdir}/.coverage.* && coverage xml --data-file={envdir}/.coverage -o {envdir}/coverage.xml --ignore-errors --fail-under=0 && COVERAGE_FILE={envdir}/.coverage coverage lcov --fail-under=0 --ignore-errors -q && COVERAGE_FILE={envdir}/.coverage coverage report --fail-under=0 --ignore-errors" # lcov needed for vscode integration due to https://github.com/ryanluker/vscode-coverage-gutters/issues/403 passenv = CURL_CA_BUNDLE # https proxies, https://github.com/tox-dev/tox/issues/1437 FORCE_COLOR HOME NO_COLOR PYTEST_* # allows developer to define their own preferences PYTEST_REQPASS # needed for CI PYTHON* # PYTHONPYCACHEPREFIX, PYTHONIOENCODING, PYTHONBREAKPOINT,... PY_COLORS RTD_TOKEN REQUESTS_CA_BUNDLE # https proxies SETUPTOOLS_SCM_DEBUG SSL_CERT_FILE # https proxies SSH_AUTH_SOCK # may be needed by git LANG LC_* setenv = ANSIBLE_HOME = {envdir}/.ansible ANSIBLE_DEVEL_WARNING='false' COVERAGE_FILE = {env:COVERAGE_FILE:{envdir}/.coverage.{envname}} COVERAGE_PROCESS_START={toxinidir}/pyproject.toml PIP_DISABLE_PIP_VERSION_CHECK = 1 PIP_CONSTRAINT = {toxinidir}/.config/constraints.txt UV_CONSTRAINT = {toxinidir}/.config/constraints.txt PRE_COMMIT_COLOR = always FORCE_COLOR = 1 allowlist_externals = ansible git sh # https://tox.wiki/en/latest/upgrading.html#editable-mode package = editable uv_seed = true [testenv:lint] description = Run all linters # locked basepython is needed because to keep constrains.txt predictable basepython = python3.10 deps = pip pre-commit>=4.0.1 pre-commit-uv>=1.15.0 skip_install = true usedevelop = false commands = pre-commit run -a --show-diff-on-failure {posargs:} pre-commit run -a pip-compile passenv = {[testenv]passenv} PRE_COMMIT_HOME setenv = {[testenv]setenv} PIP_CONSTRAINT = /dev/null UV_CONSTRAINT = /dev/null [testenv:deps] description = Bump all test dependencies basepython = {[testenv:lint]basepython} envdir = {toxworkdir}/lint deps = {[testenv:lint]deps} skip_install = true commands = pre-commit run -a --hook-stage manual pip-compile-upgrade {[testenv:lint]commands} setenv = {[testenv]setenv} PIP_CONSTRAINT = /dev/null UV_CONSTRAINT = /dev/null [testenv:pkg] description = Build package, verify metadata, install package and assert behavior when ansible is missing. deps = build >= 0.9.0 pip twine >= 4.0.1 skip_install = true # Ref: https://twitter.com/di_codes/status/1044358639081975813 commands = # build wheel and sdist using PEP-517 {envpython} -c 'import os.path, shutil, sys; \ dist_dir = os.path.join("{toxinidir}", "dist"); \ os.path.isdir(dist_dir) or sys.exit(0); \ print("Removing \{!s\} contents...".format(dist_dir), file=sys.stderr); \ shutil.rmtree(dist_dir)' {envpython} -m build \ --outdir {toxinidir}/dist/ \ {toxinidir} # Validate metadata using twine twine check --strict {toxinidir}/dist/* # Install the wheel sh -c "python3 -m pip install {toxinidir}/dist/*.whl" pip uninstall -y ansible-compat [testenv:py] description = Run the tests with {basepython} ansible-core 2.16+ deps = {[testenv]deps} ansible-core>=2.16 [testenv:rpm] description = Use packit to build RPM (requires RPM based Linux distro) deps = packitos commands = sh -c "packit build in-mock --root=fedora-40-$(arch)" [testenv:docs] description = Build docs commands = mkdocs {posargs:build --strict --site-dir=_readthedocs/html/} setenv = # https://squidfunk.github.io/mkdocs-material/plugins/requirements/image-processing/#troubleshooting DYLD_FALLBACK_LIBRARY_PATH = /opt/homebrew/lib:{env:LD_LIBRARY_PATH} extras = docs passenv = * [testenv:smoke] description = Run ansible-lint and molecule own testing with current code from compat library commands = python3 tools/smoke.py del_env = PIP_CONSTRAINT UV_CONSTRAINT editable = true skip_install = true