pax_global_header00006660000000000000000000000064147531545300014521gustar00rootroot0000000000000052 comment=be49b1a314bfcec6e5f138cc47c1a1eee2ec1742 domdf_python_tools-3.10.0/000077500000000000000000000000001475315453000155145ustar00rootroot00000000000000domdf_python_tools-3.10.0/.bumpversion.cfg000066400000000000000000000010101475315453000206140ustar00rootroot00000000000000[bumpversion] current_version = 3.10.0 commit = True tag = True [bumpversion:file:domdf_python_tools/__init__.py] search = : str = "{current_version}" replace = : str = "{new_version}" [bumpversion:file:README.rst] [bumpversion:file:doc-source/index.rst] [bumpversion:file:repo_helper.yml] [bumpversion:file:pyproject.toml] search = version = "{current_version}" replace = version = "{new_version}" [bumpversion:file:.github/workflows/conda_ci.yml] search = ={current_version}=py_1 replace = ={new_version}=py_1 domdf_python_tools-3.10.0/.github/000077500000000000000000000000001475315453000170545ustar00rootroot00000000000000domdf_python_tools-3.10.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001475315453000212375ustar00rootroot00000000000000domdf_python_tools-3.10.0/.github/ISSUE_TEMPLATE/bug_report.md000066400000000000000000000022431475315453000237320ustar00rootroot00000000000000--- name: Bug report about: Create a report to help us improve labels: bug assignees: domdfcoding --- ## Description ## Steps to Reproduce 1. 2. 3. ## Actual result: ## Expected result: ## Reproduces how often: ## Version * Operating System: * Python: * domdf_python_tools: ## Installation source ## Other Additional Information: domdf_python_tools-3.10.0/.github/ISSUE_TEMPLATE/feature_request.md000066400000000000000000000012261475315453000247650ustar00rootroot00000000000000--- name: Feature request about: Suggest an idea for this project labels: "enhancement" assignees: domdfcoding --- ## Description ## Version * Operating System: * Python: * domdf_python_tools: ## Other Additional Information: domdf_python_tools-3.10.0/.github/auto_assign.yml000066400000000000000000000003471475315453000221170ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- addReviewers: true addAssignees: true reviewers: - domdfcoding numberOfReviewers: 0 # more settings at https://github.com/marketplace/actions/auto-assign-action domdf_python_tools-3.10.0/.github/dependabot.yml000066400000000000000000000003351475315453000217050ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- version: 2 updates: - package-ecosystem: pip directory: / schedule: interval: weekly open-pull-requests-limit: 0 reviewers: - domdfcoding domdf_python_tools-3.10.0/.github/milestones.py000077500000000000000000000012401475315453000216100ustar00rootroot00000000000000#!/usr/bin/env python # stdlib import os import sys # 3rd party from github3 import GitHub from github3.repos import Repository from packaging.version import InvalidVersion, Version latest_tag = os.environ["GITHUB_REF_NAME"] try: current_version = Version(latest_tag) except InvalidVersion: sys.exit() gh: GitHub = GitHub(token=os.environ["GITHUB_TOKEN"]) repo: Repository = gh.repository(*os.environ["GITHUB_REPOSITORY"].split('/', 1)) for milestone in repo.milestones(state="open"): try: milestone_version = Version(milestone.title) except InvalidVersion: continue if milestone_version == current_version: sys.exit(not milestone.update(state="closed")) domdf_python_tools-3.10.0/.github/stale.yml000066400000000000000000000040211475315453000207040ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. # Configuration for probot-stale - https://github.com/probot/stale --- # Number of days of inactivity before an Issue or Pull Request becomes stale daysUntilStale: 180 # Number of days of inactivity before an Issue or Pull Request with the stale label is closed. # Set to false to disable. If disabled, issues still need to be closed manually, but will remain marked as stale. daysUntilClose: false # Only issues or pull requests with all of these labels are check if stale. Defaults to `[]` (disabled) onlyLabels: [] # Issues or Pull Requests with these labels will never be considered stale. Set to `[]` to disable exemptLabels: - pinned - security - "[Status] Maybe Later" # Set to true to ignore issues in a project (defaults to false) exemptProjects: false # Set to true to ignore issues in a milestone (defaults to false) exemptMilestones: false # Set to true to ignore issues with an assignee (defaults to false) exemptAssignees: false # Label to use when marking as stale staleLabel: stale # Comment to post when marking as stale. Set to `false` to disable markComment: false # This issue has been automatically marked as stale because it has not had # recent activity. It will be closed if no further activity occurs. Thank you # for your contributions. # Comment to post when removing the stale label. # unmarkComment: > # Your comment here. # Comment to post when closing a stale Issue or Pull Request. # closeComment: > # Your comment here. # Limit the number of actions per hour, from 1-30. Default is 30 limitPerRun: 30 # Limit to only `issues` or `pulls` # only: issues # Optionally, specify configuration settings that are specific to just 'issues' or 'pulls': # pulls: # daysUntilStale: 30 # markComment: > # This pull request has been automatically marked as stale because it has not had # recent activity. It will be closed if no further activity occurs. Thank you # for your contributions. # issues: # exemptLabels: # - confirmed domdf_python_tools-3.10.0/.github/workflows/000077500000000000000000000000001475315453000211115ustar00rootroot00000000000000domdf_python_tools-3.10.0/.github/workflows/codeql-analysis.yml000066400000000000000000000046251475315453000247330ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # name: "CodeQL" on: push: branches: [ master ] pull_request: # The branches below must be a subset of the branches above branches: [ master ] permissions: # added using https://github.com/step-security/secure-workflows contents: read jobs: analyze: permissions: actions: read # for github/codeql-action/init to get workflow details contents: read # for actions/checkout to fetch code security-events: write # for github/codeql-action/autobuild to send a status report name: Analyze runs-on: ubuntu-latest strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] # Learn more: # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed steps: - name: Checkout repository uses: actions/checkout@v3 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v2 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # queries: ./path/to/local/query, your-org/your-repo/queries@main # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v2 # â„¹ï¸ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl # âœï¸ If the Autobuild fails above, remove it and uncomment the following three lines # and modify them (or add more) to build your code if your project # uses a compiled language #- run: | # make bootstrap # make release - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v2 domdf_python_tools-3.10.0/.github/workflows/conda_ci.yml000066400000000000000000000037741475315453000234060ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: Conda Tests on: push: branches: ["master"] permissions: contents: read jobs: tests: name: "Conda" runs-on: ubuntu-22.04 defaults: run: shell: bash -l {0} steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Setup Python ðŸ uses: "actions/setup-python@v5" with: python-version: "3.11" - name: Setup Conda uses: conda-incubator/setup-miniconda@v2.1.1 with: activate-environment: env conda-build-version: 3.28.4 miniconda-version: py311_24.1.2-0 python-version: "3.11" miniforge-variant: Mambaforge - name: Install dependencies 🔧 run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade "whey-conda" "hatch-requirements-txt" # $CONDA is an environment variable pointing to the root of the miniconda directory $CONDA/bin/conda update -n base conda $CONDA/bin/conda config --add channels conda-forge $CONDA/bin/conda config --add channels domdfcoding - name: "Build and index channel" run: | python -m whey --builder whey_conda --out-dir conda-bld/noarch $CONDA/bin/conda index ./conda-bld || exit 1 - name: "Search for package" run: | $CONDA/bin/conda search -c file://$(pwd)/conda-bld domdf_python_tools $CONDA/bin/conda search -c file://$(pwd)/conda-bld --override-channels domdf_python_tools - name: "Install package" run: | $CONDA/bin/conda install -c file://$(pwd)/conda-bld domdf_python_tools=3.10.0=py_1 -y || exit 1 - name: "Run Tests" run: | rm -rf domdf_python_tools $CONDA/bin/conda install pytest coincidence || exit 1 pip install -r tests/requirements.txt pytest tests/ domdf_python_tools-3.10.0/.github/workflows/docs_test_action.yml000066400000000000000000000015501475315453000251610ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: "Docs Check" on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' pull_request: permissions: contents: read jobs: docs: runs-on: ubuntu-latest steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Check for changed files uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!tests/**' - name: Install and Build 🔧 uses: sphinx-toolbox/sphinx-action@sphinx-3.3.1 if: steps.changes.outputs.code == 'true' with: pre-build-command: python -m pip install tox docs-folder: "doc-source/" build-command: "tox -e docs -- -W " domdf_python_tools-3.10.0/.github/workflows/flake8.yml000066400000000000000000000023361475315453000230120ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: Flake8 on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' pull_request: permissions: contents: read jobs: Run: name: "Flake8" runs-on: "ubuntu-20.04" steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Check for changed files uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!(doc-source/**|CONTRIBUTING.rst|.imgbotconfig|.pre-commit-config.yaml|.pylintrc|.readthedocs.yml)' - name: Setup Python ðŸ if: steps.changes.outputs.code == 'true' uses: "actions/setup-python@v5" with: python-version: "3.8" - name: Install dependencies 🔧 if: steps.changes.outputs.code == 'true' run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install tox~=3.0 - name: "Run Flake8" if: steps.changes.outputs.code == 'true' run: "python -m tox -e lint -s false -- --format github" domdf_python_tools-3.10.0/.github/workflows/mypy.yml000066400000000000000000000024501475315453000226330ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: mypy on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' pull_request: permissions: contents: read jobs: Run: name: "mypy / ${{ matrix.os }}" runs-on: ${{ matrix.os }} strategy: matrix: os: ['ubuntu-20.04', 'windows-2019'] fail-fast: false steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Check for changed files uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!(doc-source/**|CONTRIBUTING.rst|.imgbotconfig|.pre-commit-config.yaml|.pylintrc|.readthedocs.yml)' - name: Setup Python ðŸ if: steps.changes.outputs.code == 'true' uses: "actions/setup-python@v5" with: python-version: "3.8" - name: Install dependencies 🔧 run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade tox~=3.0 virtualenv!=20.16.0 - name: "Run mypy" if: steps.changes.outputs.code == 'true' run: "python -m tox -e mypy -s false" domdf_python_tools-3.10.0/.github/workflows/octocheese.yml000066400000000000000000000006251475315453000237600ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: "GitHub Releases" on: schedule: - cron: 0 12 * * * jobs: Run: runs-on: ubuntu-latest steps: - uses: domdfcoding/octocheese@master with: pypi_name: "domdf_python_tools" env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} if: startsWith(github.ref, 'refs/tags/') != true domdf_python_tools-3.10.0/.github/workflows/python_ci.yml000066400000000000000000000061571475315453000236410ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: Windows on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' pull_request: permissions: actions: write issues: write contents: read jobs: tests: name: "windows-2019 / Python ${{ matrix.config.python-version }}" runs-on: "windows-2019" continue-on-error: ${{ matrix.config.experimental }} env: USING_COVERAGE: '3.6,3.7,3.8,3.9,3.10,3.11,3.12,3.13,pypy-3.6,pypy-3.7,pypy-3.8,pypy-3.9,pypy-3.10' strategy: fail-fast: False matrix: config: - {python-version: "3.6", testenvs: "py36,build", experimental: False} - {python-version: "3.7", testenvs: "py37,build", experimental: False} - {python-version: "3.8", testenvs: "py38,build", experimental: False} - {python-version: "3.9", testenvs: "py39,build", experimental: False} - {python-version: "3.10", testenvs: "py310,build", experimental: False} - {python-version: "3.11", testenvs: "py311,build", experimental: False} - {python-version: "3.12", testenvs: "py312,build", experimental: False} - {python-version: "3.13", testenvs: "py313-dev,build", experimental: True} - {python-version: "pypy-3.6", testenvs: "pypy36", experimental: False} - {python-version: "pypy-3.7", testenvs: "pypy37,build", experimental: False} - {python-version: "pypy-3.8", testenvs: "pypy38,build", experimental: False} - {python-version: "pypy-3.9-v7.3.15", testenvs: "pypy39,build", experimental: True} - {python-version: "pypy-3.10-v7.3.15", testenvs: "pypy310,build", experimental: True} steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Check for changed files if: startsWith(github.ref, 'refs/tags/') != true uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!(doc-source/**|CONTRIBUTING.rst|.imgbotconfig|.pre-commit-config.yaml|.pylintrc|.readthedocs.yml)' - name: Setup Python ðŸ id: setup-python if: ${{ steps.changes.outputs.code == 'true' || steps.changes.outcome == 'skipped' }} uses: "actions/setup-python@v5" with: python-version: "${{ matrix.config.python-version }}" - name: Install dependencies 🔧 if: steps.setup-python.outcome == 'success' run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade tox~=3.0 virtualenv!=20.16.0 - name: "Run Tests for Python ${{ matrix.config.python-version }}" if: steps.setup-python.outcome == 'success' run: python -m tox -e "${{ matrix.config.testenvs }}" -s false - name: "Upload Coverage 🚀" uses: actions/upload-artifact@v4 if: ${{ always() && steps.setup-python.outcome == 'success' }} with: name: "coverage-${{ matrix.config.python-version }}" path: .coverage domdf_python_tools-3.10.0/.github/workflows/python_ci_alt_linux.yml000066400000000000000000000036271475315453000257170ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: ALT Linux on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' tags: - '*' pull_request: permissions: actions: write issues: write contents: read jobs: tests: name: "alt-linux / Python ${{ matrix.config.python-version }}" runs-on: "ubuntu-20.04" container: image: ghcr.io/domdfcoding/alt-linux-python:latest continue-on-error: ${{ matrix.config.experimental }} env: USING_COVERAGE: '3.10' strategy: fail-fast: False matrix: config: - {python-version: "3.10", testenvs: "py310,build", experimental: False} steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v3" - name: "Configure" run: git config --global --add safe.directory /__w/${{ github.event.repository.name }}/${{ github.event.repository.name }} - name: Check for changed files if: startsWith(github.ref, 'refs/tags/') != true uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!(doc-source/**|CONTRIBUTING.rst|.imgbotconfig|.pre-commit-config.yaml|.pylintrc|.readthedocs.yml)' - name: Install dependencies 🔧 id: setup-python if: ${{ steps.changes.outputs.code == 'true' || steps.changes.outcome == 'skipped' }} run: | python3 -VV python3 -m site python3 -m pip install --upgrade pip setuptools wheel python3 -m pip install --upgrade tox~=3.0 virtualenv!=20.16.0 python3 -m pip install --upgrade coverage_pyver_pragma - name: "Run Tests for Python ${{ matrix.config.python-version }}" if: steps.setup-python.outcome == 'success' run: python3 -m tox -e "${{ matrix.config.testenvs }}" -s false domdf_python_tools-3.10.0/.github/workflows/python_ci_linux.yml000066400000000000000000000176331475315453000250610ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: Linux on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' tags: - '*' pull_request: permissions: actions: write issues: write contents: read jobs: tests: name: "ubuntu-20.04 / Python ${{ matrix.config.python-version }}" runs-on: "ubuntu-20.04" continue-on-error: ${{ matrix.config.experimental }} env: USING_COVERAGE: '3.6,3.7,3.8,3.9,3.10,3.11,3.12,3.13,pypy-3.6,pypy-3.7,pypy-3.8,pypy-3.9,pypy-3.10' strategy: fail-fast: False matrix: config: - {python-version: "3.6", testenvs: "py36,build", experimental: False} - {python-version: "3.7", testenvs: "py37,build", experimental: False} - {python-version: "3.8", testenvs: "py38,build", experimental: False} - {python-version: "3.9", testenvs: "py39,build", experimental: False} - {python-version: "3.10", testenvs: "py310,build", experimental: False} - {python-version: "3.11", testenvs: "py311,build", experimental: False} - {python-version: "3.12", testenvs: "py312,build", experimental: False} - {python-version: "3.13", testenvs: "py313-dev,build", experimental: True} - {python-version: "pypy-3.6", testenvs: "pypy36,build", experimental: False} - {python-version: "pypy-3.7", testenvs: "pypy37,build", experimental: False} - {python-version: "pypy-3.8", testenvs: "pypy38,build", experimental: False} - {python-version: "pypy-3.9", testenvs: "pypy39,build", experimental: True} - {python-version: "pypy-3.10", testenvs: "pypy310,build", experimental: True} steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Check for changed files if: startsWith(github.ref, 'refs/tags/') != true uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!(doc-source/**|CONTRIBUTING.rst|.imgbotconfig|.pre-commit-config.yaml|.pylintrc|.readthedocs.yml)' - name: Setup Python ðŸ id: setup-python if: ${{ steps.changes.outputs.code == 'true' || steps.changes.outcome == 'skipped' }} uses: "actions/setup-python@v5" with: python-version: "${{ matrix.config.python-version }}" - name: Install dependencies 🔧 if: steps.setup-python.outcome == 'success' run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade tox~=3.0 virtualenv!=20.16.0 python -m pip install --upgrade coverage_pyver_pragma - name: "Run Tests for Python ${{ matrix.config.python-version }}" if: steps.setup-python.outcome == 'success' run: python -m tox -e "${{ matrix.config.testenvs }}" -s false - name: "Upload Coverage 🚀" uses: actions/upload-artifact@v4 if: ${{ always() && steps.setup-python.outcome == 'success' }} with: name: "coverage-${{ matrix.config.python-version }}" path: .coverage Coverage: needs: tests runs-on: "ubuntu-20.04" steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Setup Python ðŸ uses: "actions/setup-python@v5" with: python-version: 3.8 - name: Install dependencies 🔧 run: | python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade "coveralls>=3.0.0" coverage_pyver_pragma - name: "Download Coverage 🪂" uses: actions/download-artifact@v4 with: path: coverage - name: Display structure of downloaded files id: show run: ls -R working-directory: coverage continue-on-error: true - name: Combine Coverage 👷 if: ${{ steps.show.outcome != 'failure' }} run: | shopt -s globstar python -m coverage combine coverage/**/.coverage - name: "Upload Combined Coverage Artefact 🚀" if: ${{ steps.show.outcome != 'failure' }} uses: actions/upload-artifact@v4 with: name: "combined-coverage" path: .coverage - name: "Upload Combined Coverage to Coveralls" if: ${{ steps.show.outcome != 'failure' }} env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | coveralls --service=github Deploy: needs: tests runs-on: "ubuntu-20.04" steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" if: startsWith(github.ref, 'refs/tags/') - name: Setup Python ðŸ uses: "actions/setup-python@v5" if: startsWith(github.ref, 'refs/tags/') with: python-version: 3.8 - name: Install dependencies 🔧 if: startsWith(github.ref, 'refs/tags/') run: | python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade tox~=3.0 - name: Build distributions 📦 if: startsWith(github.ref, 'refs/tags/') run: | tox -e build - name: Upload distribution to PyPI 🚀 if: startsWith(github.ref, 'refs/tags/') uses: pypa/gh-action-pypi-publish@v1.4.2 with: user: __token__ password: ${{ secrets.PYPI_TOKEN }} skip_existing: true - name: Close milestone 🚪 if: startsWith(github.ref, 'refs/tags/') run: | python -m pip install --upgrade github3.py packaging python .github/milestones.py env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} Conda: needs: deploy runs-on: ubuntu-22.04 if: startsWith(github.ref, 'refs/tags/') || (startsWith(github.event.head_commit.message, 'Bump version') != true) steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Setup Python ðŸ uses: "actions/setup-python@v5" with: python-version: 3.11 - name: Setup Conda uses: conda-incubator/setup-miniconda@v2.1.1 with: activate-environment: env conda-build-version: 3.28.4 miniconda-version: py311_24.1.2-0 python-version: "3.11" miniforge-variant: Mambaforge - name: Install dependencies 🔧 run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade "mkrecipe" "hatch-requirements-txt" # $CONDA is an environment variable pointing to the root of the miniconda directory $CONDA/bin/conda config --set always_yes yes --set changeps1 no $CONDA/bin/conda update -n base conda $CONDA/bin/conda info -a $CONDA/bin/conda config --add channels conda-forge $CONDA/bin/conda config --add channels domdfcoding $CONDA/bin/conda config --remove channels defaults - name: Build Conda Package 📦 run: | python -m mkrecipe --type wheel || exit 1 $CONDA/bin/conda build conda -c conda-forge -c domdfcoding --output-folder conda/dist - name: Deploy Conda Package 🚀 if: startsWith(github.ref, 'refs/tags/') run: | $CONDA/bin/conda config --set always_yes yes --set changeps1 no $CONDA/bin/conda install anaconda-client $CONDA/bin/conda info -a for f in conda/dist/noarch/domdf_python_tools-*.tar.bz2; do [ -e "$f" ] || continue echo "$f" conda install "$f" || exit 1 echo "Deploying to Anaconda.org..." $CONDA/bin/anaconda -t "$ANACONDA_TOKEN" upload "$f" || exit 1 echo "Successfully deployed to Anaconda.org." done env: ANACONDA_TOKEN: ${{ secrets.ANACONDA_TOKEN }} domdf_python_tools-3.10.0/.github/workflows/python_ci_macos.yml000066400000000000000000000057721475315453000250250ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- name: macOS on: push: branches-ignore: - 'repo-helper-update' - 'pre-commit-ci-update-config' - 'imgbot' pull_request: permissions: actions: write issues: write contents: read jobs: tests: name: "macos-13 / Python ${{ matrix.config.python-version }}" runs-on: "macos-13" continue-on-error: ${{ matrix.config.experimental }} env: USING_COVERAGE: '3.6,3.7,3.8,3.9,3.10,3.11,3.12,3.13,pypy-3.7,pypy-3.8,pypy-3.9,pypy-3.10' strategy: fail-fast: False matrix: config: - {python-version: "3.6", testenvs: "py36,build", experimental: False} - {python-version: "3.7", testenvs: "py37,build", experimental: False} - {python-version: "3.8", testenvs: "py38,build", experimental: False} - {python-version: "3.9", testenvs: "py39,build", experimental: False} - {python-version: "3.10", testenvs: "py310,build", experimental: False} - {python-version: "3.11", testenvs: "py311,build", experimental: False} - {python-version: "3.12", testenvs: "py312,build", experimental: False} - {python-version: "3.13", testenvs: "py313-dev,build", experimental: True} - {python-version: "pypy-3.7", testenvs: "pypy37,build", experimental: False} - {python-version: "pypy-3.8", testenvs: "pypy38,build", experimental: False} - {python-version: "pypy-3.9", testenvs: "pypy39,build", experimental: True} - {python-version: "pypy-3.10", testenvs: "pypy310,build", experimental: True} steps: - name: Checkout ðŸ›Žï¸ uses: "actions/checkout@v4" - name: Check for changed files if: startsWith(github.ref, 'refs/tags/') != true uses: dorny/paths-filter@v2 id: changes with: list-files: "json" filters: | code: - '!(doc-source/**|CONTRIBUTING.rst|.imgbotconfig|.pre-commit-config.yaml|.pylintrc|.readthedocs.yml)' - name: Setup Python ðŸ id: setup-python if: ${{ steps.changes.outputs.code == 'true' || steps.changes.outcome == 'skipped' }} uses: "actions/setup-python@v5" with: python-version: "${{ matrix.config.python-version }}" - name: Install dependencies 🔧 if: steps.setup-python.outcome == 'success' run: | python -VV python -m site python -m pip install --upgrade pip setuptools wheel python -m pip install --upgrade tox~=3.0 virtualenv!=20.16.0 - name: "Run Tests for Python ${{ matrix.config.python-version }}" if: steps.setup-python.outcome == 'success' run: python -m tox -e "${{ matrix.config.testenvs }}" -s false - name: "Upload Coverage 🚀" uses: actions/upload-artifact@v4 if: ${{ always() && steps.setup-python.outcome == 'success' }} with: name: "coverage-${{ matrix.config.python-version }}" path: .coverage domdf_python_tools-3.10.0/.gitignore000066400000000000000000000020611475315453000175030ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. __pycache__/ *.py[cod] *$py.class *.so .Python build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ wheels/ *.egg-info/ .installed.cfg *.egg *.egg* *.manifest *.spec pip-log.txt pip-delete-this-directory.txt htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *.cover .hypothesis/ .pytest_cache/ cover/ *.mo *.pot *.log local_settings.py db.sqlite3 instance/ .webassets-cache .scrapy docs/_build/ doc/build target/ .ipynb_checkpoints .python-version celerybeat-schedule celerybeat.pid *.sage.py .env .venv env/ venv/ ENV/ env.bak/ venv.bak/ .spyderproject .spyproject .ropeproject /site .mypy_cache/ .dmypy.json dmypy.json *.iml *.ipr cmake-build-*/ .idea/**/mongoSettings.xml *.iws out/ atlassian-ide-plugin.xml com_crashlytics_export_strings.xml crashlytics.properties crashlytics-build.properties fabric.properties .idea build **/__pycache__ **/conda __pypackages__/ profile_default/ ipython_config.py Pipfile.lock .pyre/ demo.py frozendict.py graaltest.sh domdf_python_tools-3.10.0/.imgbotconfig000066400000000000000000000001151475315453000201610ustar00rootroot00000000000000{ "schedule": "weekly", "ignoredFiles": [ "**/*.svg" ] } domdf_python_tools-3.10.0/.pre-commit-config.yaml000066400000000000000000000043601475315453000220000ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. --- exclude: ^domdf_python_tools/compat/importlib_resources.py$ ci: autoupdate_schedule: quarterly repos: - repo: https://github.com/repo-helper/pyproject-parser rev: v0.11.1 hooks: - id: reformat-pyproject - repo: https://github.com/pre-commit/pre-commit-hooks rev: v3.4.0 hooks: - id: check-added-large-files - id: check-ast - id: fix-byte-order-marker - id: check-byte-order-marker - id: check-case-conflict - id: check-executables-have-shebangs - id: check-json - id: check-toml - id: check-yaml - id: check-merge-conflict - id: check-symlinks - id: check-vcs-permalinks - id: detect-private-key - id: trailing-whitespace - id: mixed-line-ending - id: end-of-file-fixer - repo: https://github.com/domdfcoding/pre-commit-hooks rev: v0.4.0 hooks: - id: requirements-txt-sorter args: - --allow-git - id: check-docstring-first exclude: ^(doc-source/conf|__pkginfo__|setup|tests/.*)\.py$ - id: bind-requirements - repo: https://github.com/python-formate/flake8-dunder-all rev: v0.4.1 hooks: - id: ensure-dunder-all files: ^domdf_python_tools/.*\.py$ - repo: https://github.com/domdfcoding/flake2lint rev: v0.4.3 hooks: - id: flake2lint - repo: https://github.com/pre-commit/pygrep-hooks rev: v1.10.0 hooks: - id: python-no-eval - id: rst-backticks - id: rst-directive-colons - id: rst-inline-touching-normal - repo: https://github.com/asottile/pyupgrade rev: v2.12.0 hooks: - id: pyupgrade args: - --py36-plus - --keep-runtime-typing - repo: https://github.com/Lucas-C/pre-commit-hooks rev: v1.5.1 hooks: - id: remove-crlf - id: forbid-crlf - repo: https://github.com/python-formate/snippet-fmt rev: v0.1.5 hooks: - id: snippet-fmt - repo: https://github.com/python-formate/formate rev: v0.8.0 hooks: - id: formate exclude: ^(doc-source/conf|__pkginfo__|setup)\.(_)?py$ - repo: https://github.com/python-coincidence/dep_checker rev: v0.8.0 hooks: - id: dep_checker args: - domdf_python_tools # Custom hooks can be added below this comment domdf_python_tools-3.10.0/.pylintrc000066400000000000000000000346201475315453000173660ustar00rootroot00000000000000[MASTER] # Specify a configuration file. #rcfile= # Python code to execute, usually for sys.path manipulation such as # pygtk.require(). #init-hook= # Add files or directories to the blacklist. They should be base names, not # paths. ignore=CVS # Add files or directories matching the regex patterns to the blacklist. The # regex matches against base names, not paths. ignore-patterns= # Pickle collected data for later comparisons. persistent=yes # List of plugins (as comma separated values of python modules names) to load, # usually to register additional checkers. load-plugins= # Use multiple processes to speed up Pylint. jobs=1 # Allow loading of arbitrary C extensions. Extensions are imported into the # active Python interpreter and may run arbitrary code. unsafe-load-any-extension=no # A comma-separated list of package or module names from where C extensions may # be loaded. Extensions are loading into the active Python interpreter and may # run arbitrary code extension-pkg-whitelist= # Allow optimization of some AST trees. This will activate a peephole AST # optimizer, which will apply various small optimizations. For instance, it can # be used to obtain the result of joining multiple strings with the addition # operator. Joining a lot of strings can lead to a maximum recursion error in # Pylint and this flag can prevent that. It has one side effect, the resulting # AST will be different than the one from reality. This option is deprecated # and it will be removed in Pylint 2.0. optimize-ast=no [MESSAGES CONTROL] # Only show warnings with the listed confidence levels. Leave empty to show # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED confidence= # Enable the message, report, category or checker with the given id(s). You can # either give multiple identifier separated by comma (,) or put this option # multiple time (only on the command line, not in the configuration file where # it should appear only once). See also the "--disable" option for examples. #enable= # Disable the message, report, category or checker with the given id(s). You # can either give multiple identifiers separated by comma (,) or put this # option multiple times (only on the command line, not in the configuration # file where it should appear only once).You can also use "--disable=all" to # disable everything first and then reenable specific checks. For example, if # you want to run only the similarities checker, you can use "--disable=all # --enable=similarities". If you want to run only the classes checker, but have # no Warning level messages displayed, use"--disable=all --enable=classes # --disable=W" disable=all enable=assert-on-tuple,astroid-error,bad-except-order,bad-inline-option,bad-option-value,bad-reversed-sequence,bare-except,binary-op-exception,boolean-datetime,catching-non-exception,cell-var-from-loop,confusing-with-statement,consider-merging-isinstance,consider-using-enumerate,consider-using-ternary,continue-in-finally,deprecated-pragma,django-not-available,duplicate-except,duplicate-key,eval-used,exec-used,expression-not-assigned,fatal,file-ignored,fixme,global-at-module-level,global-statement,global-variable-not-assigned,global-variable-undefined,http-response-with-content-type-json,http-response-with-json-dumps,invalid-all-object,invalid-characters-in-docstring,len-as-condition,literal-comparison,locally-disabled,locally-enabled,lost-exception,lowercase-l-suffix,misplaced-bare-raise,missing-kwoa,mixed-line-endings,model-has-unicode,model-missing-unicode,model-no-explicit-unicode,model-unicode-not-callable,multiple-imports,new-db-field-with-default,non-ascii-bytes-literals,nonexistent-operator,not-in-loop,notimplemented-raised,overlapping-except,parse-error,pointless-statement,pointless-string-statement,raising-bad-type,raising-non-exception,raw-checker-failed,redefine-in-handler,redefined-argument-from-local,redefined-builtin,redundant-content-type-for-json-response,reimported,relative-import,return-outside-function,simplifiable-if-statement,singleton-comparison,syntax-error,trailing-comma-tuple,trailing-newlines,unbalanced-tuple-unpacking,undefined-all-variable,undefined-loop-variable,unexpected-line-ending-format,unidiomatic-typecheck,unnecessary-lambda,unnecessary-pass,unnecessary-semicolon,unneeded-not,unpacking-non-sequence,unreachable,unrecognized-inline-option,used-before-assignment,useless-else-on-loop,using-constant-test,wildcard-import,yield-outside-function,useless-return [REPORTS] # Set the output format. Available formats are text, parseable, colorized, msvs # (visual studio) and html. You can also give a reporter class, eg # mypackage.mymodule.MyReporterClass. output-format=text # Put messages in a separate file for each module / package specified on the # command line instead of printing them on stdout. Reports (if any) will be # written in a file name "pylint_global.[txt|html]". This option is deprecated # and it will be removed in Pylint 2.0. files-output=no # Tells whether to display a full report or only the messages reports=no # Python expression which should return a note less than 10 (10 is the highest # note). You have access to the variables errors warning, statement which # respectively contain the number of errors / warnings messages and the total # number of statements analyzed. This is used by the global evaluation report # (RP0004). evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) # Template used to display messages. This is a python new-style format string # used to format the message information. See doc for all details #msg-template= [BASIC] # Good variable names which should always be accepted, separated by a comma good-names=i,j,k,ex,Run,_ # Bad variable names which should always be refused, separated by a comma bad-names=foo,bar,baz,toto,tutu,tata # Colon-delimited sets of names that determine each other's naming style when # the name regexes allow several styles. name-group= # Include a hint for the correct naming format with invalid-name include-naming-hint=no # List of decorators that produce properties, such as abc.abstractproperty. Add # to this list to register other decorators that produce valid properties. property-classes=abc.abstractproperty # Regular expression matching correct function names function-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for function names function-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct variable names variable-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for variable names variable-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct constant names const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Naming hint for constant names const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$ # Regular expression matching correct attribute names attr-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for attribute names attr-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct argument names argument-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for argument names argument-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression matching correct class attribute names class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Naming hint for class attribute names class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$ # Regular expression matching correct inline iteration names inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$ # Naming hint for inline iteration names inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$ # Regular expression matching correct class names class-rgx=[A-Z_][a-zA-Z0-9]+$ # Naming hint for class names class-name-hint=[A-Z_][a-zA-Z0-9]+$ # Regular expression matching correct module names module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Naming hint for module names module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$ # Regular expression matching correct method names method-rgx=[a-z_][a-z0-9_]{2,30}$ # Naming hint for method names method-name-hint=[a-z_][a-z0-9_]{2,30}$ # Regular expression which should only match function or class names that do # not require a docstring. no-docstring-rgx=^_ # Minimum line length for functions/classes that require docstrings, shorter # ones are exempt. docstring-min-length=-1 [ELIF] # Maximum number of nested blocks for function / method body max-nested-blocks=5 [FORMAT] # Maximum number of characters on a single line. max-line-length=159 # Regexp for a line that is allowed to be longer than the limit. ignore-long-lines=^\s*(# )??$ # Allow the body of an if to be on the same line as the test if there is no # else. single-line-if-stmt=no # List of optional constructs for which whitespace checking is disabled. `dict- # separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}. # `trailing-comma` allows a space between comma and closing bracket: (a, ). # `empty-line` allows space-only lines. no-space-check=trailing-comma,dict-separator # Maximum number of lines in a module max-module-lines=1000 # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 # tab). indent-string=' ' # Number of spaces of indent required inside a hanging or continued line. indent-after-paren=4 # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. expected-line-ending-format= [LOGGING] # Logging modules to check that the string format arguments are in logging # function parameter format logging-modules=logging [MISCELLANEOUS] # List of note tags to take in consideration, separated by a comma. notes=FIXME,XXX,TODO [SIMILARITIES] # Minimum lines number of a similarity. min-similarity-lines=4 # Ignore comments when computing similarities. ignore-comments=yes # Ignore docstrings when computing similarities. ignore-docstrings=yes # Ignore imports when computing similarities. ignore-imports=no [SPELLING] # Spelling dictionary name. Available dictionaries: none. To make it working # install python-enchant package. spelling-dict= # List of comma separated words that should not be checked. spelling-ignore-words= # A path to a file that contains private dictionary; one word per line. spelling-private-dict-file= # Tells whether to store unknown words to indicated private dictionary in # --spelling-private-dict-file option instead of raising a message. spelling-store-unknown-words=no [TYPECHECK] # Tells whether missing members accessed in mixin class should be ignored. A # mixin class is detected if its name ends with "mixin" (case insensitive). ignore-mixin-members=yes # List of module names for which member attributes should not be checked # (useful for modules/projects where namespaces are manipulated during runtime # and thus existing member attributes cannot be deduced by static analysis. It # supports qualified module names, as well as Unix pattern matching. ignored-modules= # List of class names for which member attributes should not be checked (useful # for classes with dynamically set attributes). This supports the use of # qualified names. ignored-classes=optparse.Values,thread._local,_thread._local # List of members which are set dynamically and missed by pylint inference # system, and so shouldn't trigger E1101 when accessed. Python regular # expressions are accepted. generated-members= # List of decorators that produce context managers, such as # contextlib.contextmanager. Add to this list to register other decorators that # produce valid context managers. contextmanager-decorators=contextlib.contextmanager [VARIABLES] # Tells whether we should check for unused import in __init__ files. init-import=no # A regular expression matching the name of dummy variables (i.e. expectedly # not used). dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy # List of additional names supposed to be defined in builtins. Remember that # you should avoid to define new builtins when possible. additional-builtins= # List of strings which can identify a callback function by name. A callback # name must start or end with one of those strings. callbacks=cb_,_cb # List of qualified module names which can have objects that can redefine # builtins. redefining-builtins-modules=six.moves,future.builtins [CLASSES] # List of method names used to declare (i.e. assign) instance attributes. defining-attr-methods=__init__,__new__,setUp # List of valid names for the first argument in a class method. valid-classmethod-first-arg=cls # List of valid names for the first argument in a metaclass class method. valid-metaclass-classmethod-first-arg=mcs # List of member names, which should be excluded from the protected access # warning. exclude-protected=_asdict,_fields,_replace,_source,_make [DESIGN] # Maximum number of arguments for function / method max-args=5 # Argument names that match this expression will be ignored. Default to name # with leading underscore ignored-argument-names=_.* # Maximum number of locals for function / method body max-locals=15 # Maximum number of return / yield for function / method body max-returns=6 # Maximum number of branch for function / method body max-branches=12 # Maximum number of statements in function / method body max-statements=60 # Maximum number of parents for a class (see R0901). max-parents=7 # Maximum number of attributes for a class (see R0902). max-attributes=7 # Minimum number of public methods for a class (see R0903). min-public-methods=2 # Maximum number of public methods for a class (see R0904). max-public-methods=20 # Maximum number of boolean expressions in a if statement max-bool-expr=5 [IMPORTS] # Deprecated modules which should not be used, separated by a comma deprecated-modules=regsub,TERMIOS,Bastion,rexec # Create a graph of every (i.e. internal and external) dependencies in the # given file (report RP0402 must not be disabled) import-graph= # Create a graph of external dependencies in the given file (report RP0402 must # not be disabled) ext-import-graph= # Create a graph of internal dependencies in the given file (report RP0402 must # not be disabled) int-import-graph= # Force import order to recognize a module as part of the standard # compatibility libraries. known-standard-library= # Force import order to recognize a module as part of a third party library. known-third-party=enchant # Analyse import fallback blocks. This can be used to support both Python 2 and # 3 compatible code, which means that the block might have code that exists # only in one or another interpreter, leading to false positives when analysed. analyse-fallback-blocks=no [EXCEPTIONS] # Exceptions that will emit a warning when being caught. Defaults to # "Exception" overgeneral-exceptions=Exception domdf_python_tools-3.10.0/.readthedocs.yml000066400000000000000000000012071475315453000206020ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. # Read the Docs configuration file --- version: 2 sphinx: builder: html configuration: doc-source/conf.py formats: - pdf - htmlzip python: install: - requirements: requirements.txt - requirements: doc-source/requirements.txt build: os: ubuntu-20.04 tools: python: '3.9' jobs: post_create_environment: - pip install .[all] post_install: - pip install sphinxcontrib-applehelp==1.0.4 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.1 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 domdf_python_tools-3.10.0/.style.yapf000066400000000000000000000272001475315453000176140ustar00rootroot00000000000000[style] # Align closing bracket with visual indentation. align_closing_bracket_with_visual_indent=True # Allow dictionary keys to exist on multiple lines. For example: # # x = { # ('this is the first element of a tuple', # 'this is the second element of a tuple'): # value, # } allow_multiline_dictionary_keys=True # Allow lambdas to be formatted on more than one line. allow_multiline_lambdas=False # Allow splitting before a default / named assignment in an argument list. allow_split_before_default_or_named_assigns=True # Allow splits before the dictionary value. allow_split_before_dict_value=True # Let spacing indicate operator precedence. For example: # # a = 1 * 2 + 3 / 4 # b = 1 / 2 - 3 * 4 # c = (1 + 2) * (3 - 4) # d = (1 - 2) / (3 + 4) # e = 1 * 2 - 3 # f = 1 + 2 + 3 + 4 # # will be formatted as follows to indicate precedence: # # a = 1*2 + 3/4 # b = 1/2 - 3*4 # c = (1+2) * (3-4) # d = (1-2) / (3+4) # e = 1*2 - 3 # f = 1 + 2 + 3 + 4 # arithmetic_precedence_indication=False # Number of blank lines surrounding top-level function and class # definitions. blank_lines_around_top_level_definition=2 # Insert a blank line before a class-level docstring. blank_line_before_class_docstring=False # Insert a blank line before a module docstring. blank_line_before_module_docstring=False # Insert a blank line before a 'def' or 'class' immediately nested # within another 'def' or 'class'. For example: # # class Foo: # # <------ this blank line # def method(): # ... blank_line_before_nested_class_or_def=True # Do not split consecutive brackets. Only relevant when # dedent_closing_brackets is set. For example: # # call_func_that_takes_a_dict( # { # 'key1': 'value1', # 'key2': 'value2', # } # ) # # would reformat to: # # call_func_that_takes_a_dict({ # 'key1': 'value1', # 'key2': 'value2', # }) coalesce_brackets=True # The column limit. column_limit=115 # The style for continuation alignment. Possible values are: # # - SPACE: Use spaces for continuation alignment. This is default behavior. # - FIXED: Use fixed number (CONTINUATION_INDENT_WIDTH) of columns # (ie: CONTINUATION_INDENT_WIDTH/INDENT_WIDTH tabs or # CONTINUATION_INDENT_WIDTH spaces) for continuation alignment. # - VALIGN-RIGHT: Vertically align continuation lines to multiple of # INDENT_WIDTH columns. Slightly right (one tab or a few spaces) if # cannot vertically align continuation lines with indent characters. continuation_align_style=VALIGN-RIGHT # Indent width used for line continuations. continuation_indent_width=8 # Put closing brackets on a separate line, dedented, if the bracketed # expression can't fit in a single line. Applies to all kinds of brackets, # including function definitions and calls. For example: # # config = { # 'key1': 'value1', # 'key2': 'value2', # } # <--- this bracket is dedented and on a separate line # # time_series = self.remote_client.query_entity_counters( # entity='dev3246.region1', # key='dns.query_latency_tcp', # transform=Transformation.AVERAGE(window=timedelta(seconds=60)), # start_ts=now()-timedelta(days=3), # end_ts=now(), # ) # <--- this bracket is dedented and on a separate line dedent_closing_brackets=False # Disable the heuristic which places each list element on a separate line # if the list is comma-terminated. disable_ending_comma_heuristic=False # Place each dictionary entry onto its own line. each_dict_entry_on_separate_line=False # Require multiline dictionary even if it would normally fit on one line. # For example: # # config = { # 'key1': 'value1' # } force_multiline_dict=False # The regex for an i18n comment. The presence of this comment stops # reformatting of that line, because the comments are required to be # next to the string they translate. ;i18n_comment= # The i18n function call names. The presence of this function stops # reformattting on that line, because the string it has cannot be moved # away from the i18n comment. ;i18n_function_call= # Indent blank lines. indent_blank_lines=False # Put closing brackets on a separate line, indented, if the bracketed # expression can't fit in a single line. Applies to all kinds of brackets, # including function definitions and calls. For example: # # config = { # 'key1': 'value1', # 'key2': 'value2', # } # <--- this bracket is indented and on a separate line # # time_series = self.remote_client.query_entity_counters( # entity='dev3246.region1', # key='dns.query_latency_tcp', # transform=Transformation.AVERAGE(window=timedelta(seconds=60)), # start_ts=now()-timedelta(days=3), # end_ts=now(), # ) # <--- this bracket is indented and on a separate line indent_closing_brackets=True # Indent the dictionary value if it cannot fit on the same line as the # dictionary key. For example: # # config = { # 'key1': # 'value1', # 'key2': value1 + # value2, # } indent_dictionary_value=True # The number of columns to use for indentation. indent_width=4 # Join short lines into one line. E.g., single line 'if' statements. join_multiple_lines=False # Do not include spaces around selected binary operators. For example: # # 1 + 2 * 3 - 4 / 5 # # will be formatted as follows when configured with "*,/": # # 1 + 2*3 - 4/5 ;no_spaces_around_selected_binary_operators= # Use spaces around default or named assigns. spaces_around_default_or_named_assign=False # Adds a space after the opening '{' and before the ending '}' dict delimiters. # # {1: 2} # # will be formatted as: # # { 1: 2 } spaces_around_dict_delimiters=False # Adds a space after the opening '[' and before the ending ']' list delimiters. # # [1, 2] # # will be formatted as: # # [ 1, 2 ] spaces_around_list_delimiters=False # Use spaces around the power operator. spaces_around_power_operator=False # Use spaces around the subscript / slice operator. For example: # # my_list[1 : 10 : 2] spaces_around_subscript_colon=False # Adds a space after the opening '(' and before the ending ')' tuple delimiters. # # (1, 2, 3) # # will be formatted as: # # ( 1, 2, 3 ) spaces_around_tuple_delimiters=False # The number of spaces required before a trailing comment. # This can be a single value (representing the number of spaces # before each trailing comment) or list of values (representing # alignment column values; trailing comments within a block will # be aligned to the first column value that is greater than the maximum # line length within the block). For example: # # With spaces_before_comment=5: # # 1 + 1 # Adding values # # will be formatted as: # # 1 + 1 # Adding values <-- 5 spaces between the end of the statement and comment # # With spaces_before_comment=15, 20: # # 1 + 1 # Adding values # two + two # More adding # # longer_statement # This is a longer statement # short # This is a shorter statement # # a_very_long_statement_that_extends_beyond_the_final_column # Comment # short # This is a shorter statement # # will be formatted as: # # 1 + 1 # Adding values <-- end of line comments in block aligned to col 15 # two + two # More adding # # longer_statement # This is a longer statement <-- end of line comments in block aligned to col 20 # short # This is a shorter statement # # a_very_long_statement_that_extends_beyond_the_final_column # Comment <-- the end of line comments are aligned based on the line length # short # This is a shorter statement # spaces_before_comment=2 # Insert a space between the ending comma and closing bracket of a list, # etc. space_between_ending_comma_and_closing_bracket=True # Use spaces inside brackets, braces, and parentheses. For example: # # method_call( 1 ) # my_dict[ 3 ][ 1 ][ get_index( *args, **kwargs ) ] # my_set = { 1, 2, 3 } space_inside_brackets=False # Split before arguments split_all_comma_separated_values=False # Split before arguments, but do not split all subexpressions recursively # (unless needed). split_all_top_level_comma_separated_values=True # Split before arguments if the argument list is terminated by a # comma. split_arguments_when_comma_terminated=False # Set to True to prefer splitting before '+', '-', '*', '/', '//', or '@' # rather than after. split_before_arithmetic_operator=True # Set to True to prefer splitting before '&', '|' or '^' rather than # after. split_before_bitwise_operator=True # Split before the closing bracket if a list or dict literal doesn't fit on # a single line. split_before_closing_bracket=True # Split before a dictionary or set generator (comp_for). For example, note # the split before the 'for': # # foo = { # variable: 'Hello world, have a nice day!' # for variable in bar if variable != 42 # } split_before_dict_set_generator=True # Split before the '.' if we need to split a longer expression: # # foo = ('This is a really long string: {}, {}, {}, {}'.format(a, b, c, d)) # # would reformat to something like: # # foo = ('This is a really long string: {}, {}, {}, {}' # .format(a, b, c, d)) split_before_dot=False # Split after the opening paren which surrounds an expression if it doesn't # fit on a single line. split_before_expression_after_opening_paren=True # If an argument / parameter list is going to be split, then split before # the first argument. split_before_first_argument=False # Set to True to prefer splitting before 'and' or 'or' rather than # after. split_before_logical_operator=True # Split named assignments onto individual lines. split_before_named_assigns=True # Set to True to split list comprehensions and generators that have # non-trivial expressions and multiple clauses before each of these # clauses. For example: # # result = [ # a_long_var + 100 for a_long_var in xrange(1000) # if a_long_var % 10] # # would reformat to something like: # # result = [ # a_long_var + 100 # for a_long_var in xrange(1000) # if a_long_var % 10] split_complex_comprehension=True # The penalty for splitting right after the opening bracket. split_penalty_after_opening_bracket=100 # The penalty for splitting the line after a unary operator. split_penalty_after_unary_operator=10000 # The penalty of splitting the line around the '+', '-', '*', '/', '//', # ``%``, and '@' operators. split_penalty_arithmetic_operator=300 # The penalty for splitting right before an if expression. split_penalty_before_if_expr=0 # The penalty of splitting the line around the '&', '|', and '^' # operators. split_penalty_bitwise_operator=300 # The penalty for splitting a list comprehension or generator # expression. split_penalty_comprehension=80 # The penalty for characters over the column limit. split_penalty_excess_character=7000 # The penalty incurred by adding a line split to the unwrapped line. The # more line splits added the higher the penalty. split_penalty_for_added_line_split=30 # The penalty of splitting a list of "import as" names. For example: # # from a_very_long_or_indented_module_name_yada_yad import (long_argument_1, # long_argument_2, # long_argument_3) # # would reformat to something like: # # from a_very_long_or_indented_module_name_yada_yad import ( # long_argument_1, long_argument_2, long_argument_3) split_penalty_import_names=0 # The penalty of splitting the line around the 'and' and 'or' # operators. split_penalty_logical_operator=300 # Use the Tab character for indentation. use_tabs=True domdf_python_tools-3.10.0/CONTRIBUTING.rst000066400000000000000000000025131475315453000201560ustar00rootroot00000000000000============== Contributing ============== .. This file based on https://github.com/PyGithub/PyGithub/blob/master/CONTRIBUTING.md ``domdf_python_tools`` uses `tox `_ to automate testing and packaging, and `pre-commit `_ to maintain code quality. Install ``pre-commit`` with ``pip`` and install the git hook: .. code-block:: bash $ python -m pip install pre-commit $ pre-commit install Coding style -------------- `formate `_ is used for code formatting. It can be run manually via ``pre-commit``: .. code-block:: bash $ pre-commit run formate -a Or, to run the complete autoformatting suite: .. code-block:: bash $ pre-commit run -a Automated tests ------------------- Tests are run with ``tox`` and ``pytest``. To run tests for a specific Python version, such as Python 3.6: .. code-block:: bash $ tox -e py36 To run tests for all Python versions, simply run: .. code-block:: bash $ tox Type Annotations ------------------- Type annotations are checked using ``mypy``. Run ``mypy`` using ``tox``: .. code-block:: bash $ tox -e mypy Build documentation locally ------------------------------ The documentation is powered by Sphinx. A local copy of the documentation can be built with ``tox``: .. code-block:: bash $ tox -e docs domdf_python_tools-3.10.0/LICENSE000066400000000000000000000020551475315453000165230ustar00rootroot00000000000000Copyright (c) 2019-2022 Dominic Davis-Foster Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. domdf_python_tools-3.10.0/README.rst000066400000000000000000000130261475315453000172050ustar00rootroot00000000000000===================== domdf_python_tools ===================== .. start short_desc **Helpful functions for Python ðŸâ€‚🛠ï¸** .. end short_desc .. start shields .. list-table:: :stub-columns: 1 :widths: 10 90 * - Docs - |docs| |docs_check| * - Tests - |actions_linux| |actions_windows| |actions_macos| |coveralls| * - PyPI - |pypi-version| |supported-versions| |supported-implementations| |wheel| * - Anaconda - |conda-version| |conda-platform| * - Activity - |commits-latest| |commits-since| |maintained| |pypi-downloads| * - QA - |codefactor| |actions_flake8| |actions_mypy| * - Other - |license| |language| |requires| .. |docs| image:: https://img.shields.io/readthedocs/domdf-python-tools/latest?logo=read-the-docs :target: https://domdf-python-tools.readthedocs.io/en/latest :alt: Documentation Build Status .. |docs_check| image:: https://github.com/domdfcoding/domdf_python_tools/workflows/Docs%20Check/badge.svg :target: https://github.com/domdfcoding/domdf_python_tools/actions?query=workflow%3A%22Docs+Check%22 :alt: Docs Check Status .. |actions_linux| image:: https://github.com/domdfcoding/domdf_python_tools/workflows/Linux/badge.svg :target: https://github.com/domdfcoding/domdf_python_tools/actions?query=workflow%3A%22Linux%22 :alt: Linux Test Status .. |actions_windows| image:: https://github.com/domdfcoding/domdf_python_tools/workflows/Windows/badge.svg :target: https://github.com/domdfcoding/domdf_python_tools/actions?query=workflow%3A%22Windows%22 :alt: Windows Test Status .. |actions_macos| image:: https://github.com/domdfcoding/domdf_python_tools/workflows/macOS/badge.svg :target: https://github.com/domdfcoding/domdf_python_tools/actions?query=workflow%3A%22macOS%22 :alt: macOS Test Status .. |actions_flake8| image:: https://github.com/domdfcoding/domdf_python_tools/workflows/Flake8/badge.svg :target: https://github.com/domdfcoding/domdf_python_tools/actions?query=workflow%3A%22Flake8%22 :alt: Flake8 Status .. |actions_mypy| image:: https://github.com/domdfcoding/domdf_python_tools/workflows/mypy/badge.svg :target: https://github.com/domdfcoding/domdf_python_tools/actions?query=workflow%3A%22mypy%22 :alt: mypy status .. |requires| image:: https://dependency-dash.repo-helper.uk/github/domdfcoding/domdf_python_tools/badge.svg :target: https://dependency-dash.repo-helper.uk/github/domdfcoding/domdf_python_tools/ :alt: Requirements Status .. |coveralls| image:: https://img.shields.io/coveralls/github/domdfcoding/domdf_python_tools/master?logo=coveralls :target: https://coveralls.io/github/domdfcoding/domdf_python_tools?branch=master :alt: Coverage .. |codefactor| image:: https://img.shields.io/codefactor/grade/github/domdfcoding/domdf_python_tools?logo=codefactor :target: https://www.codefactor.io/repository/github/domdfcoding/domdf_python_tools :alt: CodeFactor Grade .. |pypi-version| image:: https://img.shields.io/pypi/v/domdf_python_tools :target: https://pypi.org/project/domdf_python_tools/ :alt: PyPI - Package Version .. |supported-versions| image:: https://img.shields.io/pypi/pyversions/domdf_python_tools?logo=python&logoColor=white :target: https://pypi.org/project/domdf_python_tools/ :alt: PyPI - Supported Python Versions .. |supported-implementations| image:: https://img.shields.io/pypi/implementation/domdf_python_tools :target: https://pypi.org/project/domdf_python_tools/ :alt: PyPI - Supported Implementations .. |wheel| image:: https://img.shields.io/pypi/wheel/domdf_python_tools :target: https://pypi.org/project/domdf_python_tools/ :alt: PyPI - Wheel .. |conda-version| image:: https://img.shields.io/conda/v/domdfcoding/domdf_python_tools?logo=anaconda :target: https://anaconda.org/domdfcoding/domdf_python_tools :alt: Conda - Package Version .. |conda-platform| image:: https://img.shields.io/conda/pn/domdfcoding/domdf_python_tools?label=conda%7Cplatform :target: https://anaconda.org/domdfcoding/domdf_python_tools :alt: Conda - Platform .. |license| image:: https://img.shields.io/github/license/domdfcoding/domdf_python_tools :target: https://github.com/domdfcoding/domdf_python_tools/blob/master/LICENSE :alt: License .. |language| image:: https://img.shields.io/github/languages/top/domdfcoding/domdf_python_tools :alt: GitHub top language .. |commits-since| image:: https://img.shields.io/github/commits-since/domdfcoding/domdf_python_tools/v3.10.0 :target: https://github.com/domdfcoding/domdf_python_tools/pulse :alt: GitHub commits since tagged version .. |commits-latest| image:: https://img.shields.io/github/last-commit/domdfcoding/domdf_python_tools :target: https://github.com/domdfcoding/domdf_python_tools/commit/master :alt: GitHub last commit .. |maintained| image:: https://img.shields.io/maintenance/yes/2025 :alt: Maintenance .. |pypi-downloads| image:: https://img.shields.io/pypi/dm/domdf_python_tools :target: https://pypi.org/project/domdf_python_tools/ :alt: PyPI - Downloads .. end shields **Note:** Before version 3 ``domdf_python_tools`` was licensed under the LGPLv3+. Version 3 and later are licensed under the MIT License. .. start installation ``domdf_python_tools`` can be installed from PyPI or Anaconda. To install with ``pip``: .. code-block:: bash $ python -m pip install domdf_python_tools To install with ``conda``: * First add the required channels .. code-block:: bash $ conda config --add channels https://conda.anaconda.org/conda-forge $ conda config --add channels https://conda.anaconda.org/domdfcoding * Then install .. code-block:: bash $ conda install domdf_python_tools .. end installation domdf_python_tools-3.10.0/__pkginfo__.py000066400000000000000000000002661475315453000203230ustar00rootroot00000000000000# This file is managed by 'repo_helper'. Don't edit it directly. __all__ = ["extras_require"] extras_require = {"dates": ["pytz>=2019.1"], "testing": [], "all": ["pytz>=2019.1"]} domdf_python_tools-3.10.0/doc-source/000077500000000000000000000000001475315453000175575ustar00rootroot00000000000000domdf_python_tools-3.10.0/doc-source/404.rst000066400000000000000000000003271475315453000206220ustar00rootroot00000000000000:orphan: =============== 404 =============== We looked everywhere but we couldn't find that page! .. image:: not-found.png :align: center Try using the links in the sidebar to find what you are looking for. domdf_python_tools-3.10.0/doc-source/Source.rst000066400000000000000000000026251475315453000215560ustar00rootroot00000000000000========================= Downloading source code ========================= The ``domdf_python_tools`` source code is available on GitHub, and can be accessed from the following URL: https://github.com/domdfcoding/domdf_python_tools If you have ``git`` installed, you can clone the repository with the following command: .. prompt:: bash git clone https://github.com/domdfcoding/domdf_python_tools .. parsed-literal:: Cloning into 'domdf_python_tools'... remote: Enumerating objects: 47, done. remote: Counting objects: 100% (47/47), done. remote: Compressing objects: 100% (41/41), done. remote: Total 173 (delta 16), reused 17 (delta 6), pack-reused 126 Receiving objects: 100% (173/173), 126.56 KiB | 678.00 KiB/s, done. Resolving deltas: 100% (66/66), done. | Alternatively, the code can be downloaded in a 'zip' file by clicking: | :guilabel:`Clone or download` --> :guilabel:`Download Zip` .. figure:: git_download.png :alt: Downloading a 'zip' file of the source code. Downloading a 'zip' file of the source code Building from source ----------------------- The recommended way to build ``domdf_python_tools`` is to use `tox `_: .. prompt:: bash tox -e build The source and wheel distributions will be in the directory ``dist``. If you wish, you may also use `pep517.build `_ or another :pep:`517`-compatible build tool. domdf_python_tools-3.10.0/doc-source/_static/000077500000000000000000000000001475315453000212055ustar00rootroot00000000000000domdf_python_tools-3.10.0/doc-source/_static/style.css000066400000000000000000000001571475315453000230620ustar00rootroot00000000000000/* This file is managed by 'repo_helper'. Don't edit it directly. */ .longtable.autosummary { width: 100%; } domdf_python_tools-3.10.0/doc-source/_templates/000077500000000000000000000000001475315453000217145ustar00rootroot00000000000000domdf_python_tools-3.10.0/doc-source/_templates/layout.html000066400000000000000000000003441475315453000241200ustar00rootroot00000000000000 {% extends "!layout.html" %} {% block extrahead %} {% endblock %} domdf_python_tools-3.10.0/doc-source/api/000077500000000000000000000000001475315453000203305ustar00rootroot00000000000000domdf_python_tools-3.10.0/doc-source/api/bases.rst000066400000000000000000000036521475315453000221650ustar00rootroot00000000000000================================== :mod:`~domdf_python_tools.bases` ================================== .. autosummary-widths:: 5/16 .. automodule:: domdf_python_tools.bases :autosummary-members: :no-members: Type Variables ------------------ .. autotypevar:: domdf_python_tools.bases._F .. autotypevar:: domdf_python_tools.bases._LU .. autotypevar:: domdf_python_tools.bases._S .. raw:: latex \begin{multicols}{2} .. autotypevar:: domdf_python_tools.bases._T .. autotypevar:: domdf_python_tools.bases._V .. raw:: latex \end{multicols} Dictable --------- .. autoclass:: domdf_python_tools.bases.Dictable :inherited-members: :special-members: UserList --------- .. autoclass:: domdf_python_tools.bases.UserList :inherited-members: :special-members: NamedList ---------- Both :class:`~.NamedList` and :func:`~.namedlist` can be used to create a named list. :func:`~.namedlist` can be used as follows: .. code-block:: python >>> ShoppingList = namedlist("ShoppingList") >>> shopping_list = ShoppingList(["egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam"]) >>> If you wish to create a subclass with additional features it is recommended to subclass from :class:`NamedList` rather than from :func:`~.namedlist`. For example, do this: .. code-block:: python >>> class ShoppingList(NamedList): ... pass >>> and not this: .. code-block:: python >>> class ShoppingList(namedlist()) ... pass >>> This avoids any potential issues with :github:repo:`mypy `. .. autoclass:: domdf_python_tools.bases.NamedList :no-autosummary: :exclude-members: __repr__,__str__ .. autofunction:: domdf_python_tools.bases.namedlist UserFloat ------------ .. autoclass:: domdf_python_tools.bases.UserFloat :inherited-members: :special-members: :exclude-members: __ceil__,__floor__,conjugate,imag,real .. latex:clearpage:: Lineup --------- .. autoclass:: domdf_python_tools.bases.Lineup domdf_python_tools-3.10.0/doc-source/api/compat.rst000066400000000000000000000004211475315453000223420ustar00rootroot00000000000000================================= :mod:`~domdf_python_tools.compat` ================================= .. automodule:: domdf_python_tools.compat :no-autosummary: :no-special-members: :exclude-members: PYPY,PYPY36,PYPY37,PYPY38,PYPY39,PYPY37_PLUS,PYPY38_PLUS,PYPY39_PLUS domdf_python_tools-3.10.0/doc-source/api/count_demo.py000066400000000000000000000004351475315453000230400ustar00rootroot00000000000000# stdlib import itertools # this package import domdf_python_tools.iterative print(domdf_python_tools.iterative.count(14.5, 0.1)) for val in zip(domdf_python_tools.iterative.count(14.5, 0.1), itertools.count(14.5, 0.1)): print(val) print(val[0] % 0.1, val[1] % 0.1) input(">>>") domdf_python_tools-3.10.0/doc-source/api/dates.rst000066400000000000000000000012741475315453000221660ustar00rootroot00000000000000================================= :mod:`~domdf_python_tools.dates` ================================= .. autosummary-widths:: 13/32 .. automodule:: domdf_python_tools.dates :undoc-members: :exclude-members: months,month_full_names,month_short_names .. autovariable:: domdf_python_tools.dates.months :no-value: Essentially: .. code-block:: python months = { Jan="January", Feb="February", Mar="March", Apr="April", May="May", ..., Dec="December", } .. autovariable:: domdf_python_tools.dates.month_full_names :value: ('January', 'February', ..., 'December') .. autovariable:: domdf_python_tools.dates.month_short_names :value: ('Jan', 'Feb', 'Mar', ..., 'Dec') domdf_python_tools-3.10.0/doc-source/api/delegators.rst000066400000000000000000000003461475315453000232160ustar00rootroot00000000000000===================================== :mod:`~domdf_python_tools.delegators` ===================================== .. autosummary-widths:: 1/2 .. automodule:: domdf_python_tools.delegators :members: _C,delegate_kwargs,delegates domdf_python_tools-3.10.0/doc-source/api/doctools.rst000066400000000000000000000003031475315453000227040ustar00rootroot00000000000000=================================== :mod:`~domdf_python_tools.doctools` =================================== .. latex:vspace:: -20px .. automodule:: domdf_python_tools.doctools :undoc-members: domdf_python_tools-3.10.0/doc-source/api/getters.rst000066400000000000000000000004061475315453000225370ustar00rootroot00000000000000======================================= :mod:`~domdf_python_tools.getters` ======================================= .. autosummary-widths:: 45/100 .. automodule:: domdf_python_tools.getters :no-show-inheritance: :exclude-members: __call__,__reduce__,__repr__ domdf_python_tools-3.10.0/doc-source/api/import_tools.rst000066400000000000000000000003271475315453000236160ustar00rootroot00000000000000======================================= :mod:`~domdf_python_tools.import_tools` ======================================= .. autosummary-widths:: 1/2 .. automodule:: domdf_python_tools.import_tools :undoc-members: domdf_python_tools-3.10.0/doc-source/api/iterative.rst000066400000000000000000000002731475315453000230600ustar00rootroot00000000000000==================================== :mod:`~domdf_python_tools.iterative` ==================================== .. autosummary-widths:: 7/16 .. automodule:: domdf_python_tools.iterative domdf_python_tools-3.10.0/doc-source/api/pagesizes/000077500000000000000000000000001475315453000223225ustar00rootroot00000000000000domdf_python_tools-3.10.0/doc-source/api/pagesizes/classes.rst000066400000000000000000000004211475315453000245060ustar00rootroot00000000000000============================================ :mod:`~domdf_python_tools.pagesizes.classes` ============================================ .. autosummary-widths:: 7/16 .. automodule:: domdf_python_tools.pagesizes.classes :undoc-members: :exclude-members: __slots__,__str__ domdf_python_tools-3.10.0/doc-source/api/pagesizes/index.rst000066400000000000000000000003461475315453000241660ustar00rootroot00000000000000========================================= :mod:`~domdf_python_tools.pagesizes` ========================================= .. automodule:: domdf_python_tools.pagesizes :no-members: .. toctree:: :caption: Submodules :glob: * domdf_python_tools-3.10.0/doc-source/api/pagesizes/sizes.rst000066400000000000000000000004431475315453000242120ustar00rootroot00000000000000=========================================== :mod:`~domdf_python_tools.pagesizes.sizes` =========================================== .. automodule:: domdf_python_tools.pagesizes.sizes :no-members: :autosummary-members: :autosummary-undoc-members: :autosummary-no-titles: :undoc-members: domdf_python_tools-3.10.0/doc-source/api/pagesizes/units.rst000066400000000000000000000010411475315453000242120ustar00rootroot00000000000000=========================================== :mod:`~domdf_python_tools.pagesizes.units` =========================================== .. autosummary-widths:: 1/2 .. automodule:: domdf_python_tools.pagesizes.units :autosummary-members: :no-members: .. autoclass:: Unit :undoc-members: .. autounit:: Unitpt .. autounit:: UnitInch .. autounit:: Unitcm .. autounit:: Unitmm .. autounit:: Unitum .. autounit:: Unitpc .. autovariable:: pt .. autovariable:: inch .. autovariable:: cm .. autovariable:: mm .. autovariable:: um .. autovariable:: pc domdf_python_tools-3.10.0/doc-source/api/pagesizes/utils.rst000066400000000000000000000003461475315453000242170ustar00rootroot00000000000000=========================================== :mod:`~domdf_python_tools.pagesizes.utils` =========================================== .. autosummary-widths:: 7/16 .. automodule:: domdf_python_tools.pagesizes.utils :undoc-members: domdf_python_tools-3.10.0/doc-source/api/paths.rst000066400000000000000000000004351475315453000222030ustar00rootroot00000000000000================================= :mod:`~domdf_python_tools.paths` ================================= .. autosummary-widths:: 45/100 .. automodule:: domdf_python_tools.paths :undoc-members: :exclude-members: __enter__,__exit__,__slots__,methodmap,phase3,phase4,group,is_mount,owner domdf_python_tools-3.10.0/doc-source/api/pretty_print.rst000066400000000000000000000003061475315453000236240ustar00rootroot00000000000000======================================= :mod:`~domdf_python_tools.pretty_print` ======================================= .. autosummary-widths:: 1/2 .. automodule:: domdf_python_tools.pretty_print domdf_python_tools-3.10.0/doc-source/api/secrets.rst000066400000000000000000000003061475315453000225310ustar00rootroot00000000000000================================== :mod:`~domdf_python_tools.secrets` ================================== .. autosummary-widths:: 4/16 .. automodule:: domdf_python_tools.secrets :special-members: domdf_python_tools-3.10.0/doc-source/api/stringlist.rst000066400000000000000000000012361475315453000232660ustar00rootroot00000000000000===================================== :mod:`~domdf_python_tools.stringlist` ===================================== .. autosummary-widths:: 51/128 .. automodule:: domdf_python_tools.stringlist :no-members: :autosummary-members: .. autoclass:: domdf_python_tools.stringlist.DelimitedList :special-members: .. latex:clearpage:: .. autoclass:: domdf_python_tools.stringlist.Indent :special-members: .. latex:clearpage:: .. autoclass:: domdf_python_tools.stringlist.StringList :special-members: .. autofunction:: domdf_python_tools.stringlist.splitlines .. autofunction:: domdf_python_tools.stringlist.joinlines .. autotypevar:: domdf_python_tools.stringlist._SL domdf_python_tools-3.10.0/doc-source/api/terminal.rst000066400000000000000000000004011475315453000226700ustar00rootroot00000000000000=================================== :mod:`~domdf_python_tools.terminal` =================================== .. autosummary-widths:: 13/32 .. automodule:: domdf_python_tools.terminal :undoc-members: :special-members: :exclude-members: get_terminal_size domdf_python_tools-3.10.0/doc-source/api/typing.rst000066400000000000000000000045151475315453000224010ustar00rootroot00000000000000================================= :mod:`~domdf_python_tools.typing` ================================= Various type annotation aids. .. module:: domdf_python_tools.typing Type Hints ------------ .. autosummary2:: ~domdf_python_tools.typing.PathLike ~domdf_python_tools.typing.PathType ~domdf_python_tools.typing.AnyNumber ~domdf_python_tools.typing.WrapperDescriptorType, The type of methods of some built-in data types and base classes. ~domdf_python_tools.typing.MethodWrapperType, The type of *bound* methods of some built-in data types and base classes. ~domdf_python_tools.typing.MethodDescriptorType, The type of methods of some built-in data types. ~domdf_python_tools.typing.ClassMethodDescriptorType, The type of *unbound* class methods of some built-in data types. .. autogenericalias:: PathLike .. autotypevar:: PathType .. autogenericalias:: AnyNumber .. data:: WrapperDescriptorType The type of methods of some built-in data types and base classes, such as :meth:`object.__init__` or :meth:`object.__lt__`. .. versionadded:: 0.8.0 .. data:: MethodWrapperType The type of *bound* methods of some built-in data types and base classes. For example, it is the type of :code:`object().__str__`. .. versionadded:: 0.8.0 .. data:: MethodDescriptorType The type of methods of some built-in data types, such as :meth:`str.join`. .. versionadded:: 0.8.0 .. data:: ClassMethodDescriptorType The type of *unbound* class methods of some built-in data types, such as ``dict.__dict__['fromkeys']``. .. versionadded:: 0.8.0 Protocols ------------ .. autosummary:: ~domdf_python_tools.typing.JsonLibrary ~domdf_python_tools.typing.HasHead ~domdf_python_tools.typing.String ~domdf_python_tools.typing.FrameOrSeries ~domdf_python_tools.typing.SupportsIndex ~domdf_python_tools.typing.SupportsLessThan ~domdf_python_tools.typing.SupportsLessEqual ~domdf_python_tools.typing.SupportsGreaterThan ~domdf_python_tools.typing.SupportsGreaterEqual .. autoprotocol:: JsonLibrary .. autoprotocol:: HasHead .. autoprotocol:: String .. autoprotocol:: FrameOrSeries .. autoprotocol:: SupportsIndex .. autoprotocol:: SupportsLessThan .. autoprotocol:: SupportsLessEqual .. autoprotocol:: SupportsGreaterThan .. autoprotocol:: SupportsGreaterEqual .. latex:clearpage:: Utility Functions --------------------- .. autofunction:: check_membership domdf_python_tools-3.10.0/doc-source/api/utils.rst000066400000000000000000000002531475315453000222220ustar00rootroot00000000000000================================= :mod:`~domdf_python_tools.utils` ================================= .. autosummary-widths:: 1/2 .. automodule:: domdf_python_tools.utils domdf_python_tools-3.10.0/doc-source/api/versions.rst000066400000000000000000000003551475315453000227350ustar00rootroot00000000000000=================================== :mod:`~domdf_python_tools.versions` =================================== .. autosummary-widths:: 1/2 .. automodule:: domdf_python_tools.versions :undoc-members: :special-members: :private-members: domdf_python_tools-3.10.0/doc-source/api/words.rst000066400000000000000000000057541475315453000222330ustar00rootroot00000000000000================================= :mod:`~domdf_python_tools.words` ================================= .. automodule:: domdf_python_tools.words :no-members: Constants ------------ .. automodulesumm:: domdf_python_tools.words :autosummary-members: CR,LF,TAB,ascii_digits,greek_uppercase,greek_lowercase .. raw:: latex \begin{multicols}{2} .. autovariable:: domdf_python_tools.words.CR .. autovariable:: domdf_python_tools.words.LF .. autovariable:: domdf_python_tools.words.TAB .. autovariable:: domdf_python_tools.words.ascii_digits :no-value: .. autovariable:: domdf_python_tools.words.greek_uppercase :no-value: .. autovariable:: domdf_python_tools.words.greek_lowercase :no-value: .. raw:: latex \end{multicols} Fonts ------ .. autosummary-widths:: 13/32 .. automodulesumm:: domdf_python_tools.words :autosummary-members: make_font,Font,SERIF_BOLD_LETTERS,SERIF_ITALIC_LETTERS,SERIF_BOLD_ITALIC_LETTERS,SANS_SERIF_LETTERS,SANS_SERIF_BOLD_LETTERS,SANS_SERIF_ITALIC_LETTERS,SANS_SERIF_BOLD_ITALIC_LETTERS,SCRIPT_LETTERS,FRAKTUR_LETTERS,MONOSPACE_LETTERS,DOUBLESTRUCK_LETTERS .. autofunction:: domdf_python_tools.words.make_font .. autoclass:: domdf_python_tools.words.Font :special-members: .. raw:: latex \begin{multicols}{2} .. autovariable:: domdf_python_tools.words.SERIF_BOLD_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SERIF_ITALIC_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SERIF_BOLD_ITALIC_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SANS_SERIF_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SANS_SERIF_BOLD_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SANS_SERIF_ITALIC_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SANS_SERIF_BOLD_ITALIC_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.SCRIPT_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.FRAKTUR_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.MONOSPACE_LETTERS :no-value: :no-type: .. autovariable:: domdf_python_tools.words.DOUBLESTRUCK_LETTERS :no-value: :no-type: .. raw:: latex \end{multicols} Functions ------------- .. autosummary-widths:: 8/16 .. automodulesumm:: domdf_python_tools.words :autosummary-members: alpha_sort,get_words_list,get_random_word,as_text,word_join .. autofunction:: domdf_python_tools.words.alpha_sort .. autofunction:: domdf_python_tools.words.as_text .. autofunction:: domdf_python_tools.words.get_words_list .. autofunction:: domdf_python_tools.words.get_random_word .. autofunction:: domdf_python_tools.words.word_join .. latex:clearpage:: .. autofunction:: domdf_python_tools.words.truncate_string Classes ------------- .. autosummary-widths:: 8/16 .. automodulesumm:: domdf_python_tools.words :autosummary-members: Plural,PluralPhrase .. autoclass:: domdf_python_tools.words.Plural .. autonamedtuple:: domdf_python_tools.words.PluralPhrase domdf_python_tools-3.10.0/doc-source/conf.py000066400000000000000000000064421475315453000210640ustar00rootroot00000000000000#!/usr/bin/env python3 # This file is managed by 'repo_helper'. Don't edit it directly. # stdlib import os import re import sys # 3rd party import domdf_python_tools domdf_python_tools.__docs = True # 3rd party from sphinx_pyproject import SphinxConfig sys.path.append('.') config = SphinxConfig(globalns=globals()) project = config["project"] author = config["author"] documentation_summary = config.description github_url = "https://github.com/{github_username}/{github_repository}".format_map(config) rst_prolog = f""".. |pkgname| replace:: domdf_python_tools .. |pkgname2| replace:: ``domdf_python_tools`` .. |browse_github| replace:: `Browse the GitHub Repository <{github_url}>`__ """ slug = re.sub(r'\W+', '-', project.lower()) release = version = config.version sphinx_builder = os.environ.get("SPHINX_BUILDER", "html").lower() todo_include_todos = int(os.environ.get("SHOW_TODOS", 0)) and sphinx_builder != "latex" intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), "sphinx": ("https://www.sphinx-doc.org/en/stable/", None), "pandas": ("https://pandas.pydata.org/docs/", None), "consolekit": ("https://consolekit.readthedocs.io/en/latest/", None), "pytest": ("https://docs.pytest.org/en/stable", None), "pytest-regressions": ("https://pytest-regressions.readthedocs.io/en/latest/", None), } html_theme_options = {"logo_only": False} html_context = { "display_github": True, "github_user": "domdfcoding", "github_repo": "domdf_python_tools", "github_version": "master", "conf_py_path": "/doc-source/", } htmlhelp_basename = slug latex_documents = [("index", f'{slug}.tex', project, author, "manual")] man_pages = [("index", slug, project, [author], 1)] texinfo_documents = [("index", slug, project, author, slug, project, "Miscellaneous")] toctree_plus_types = set(config["toctree_plus_types"]) autodoc_default_options = { "members": None, # Include all members (methods). "special-members": None, "autosummary": None, "show-inheritance": None, "exclude-members": ','.join(config["autodoc_exclude_members"]), } latex_elements = { "printindex": "\\begin{flushleft}\n\\printindex\n\\end{flushleft}", "tableofcontents": "\\pdfbookmark[0]{\\contentsname}{toc}\\sphinxtableofcontents", } # Fix for pathlib issue with sphinxemoji on Python 3.9 and Sphinx 4.x def copy_asset_files(app, exc): # 3rd party from domdf_python_tools.compat import importlib_resources from sphinx.util.fileutil import copy_asset if exc: return asset_files = ["twemoji.js", "twemoji.css"] for path in asset_files: path_str = os.fspath(importlib_resources.files("sphinxemoji") / path) copy_asset(path_str, os.path.join(app.outdir, "_static")) def setup(app): # 3rd party from sphinx_toolbox.latex import better_header_layout from sphinxemoji import sphinxemoji app.connect("config-inited", lambda app, config: better_header_layout(config)) app.connect("build-finished", copy_asset_files) app.add_js_file("https://unpkg.com/twemoji@latest/dist/twemoji.min.js") app.add_js_file("twemoji.js") app.add_css_file("twemoji.css") app.add_transform(sphinxemoji.EmojiSubstitutions) manpages_url = "https://manpages.debian.org/{path}" toctree_plus_types.add("fixture") latex_elements["preamble"] = "\\usepackage{textgreek}\\usepackage{multicol}" needspace_amount = r"5\baselineskip" domdf_python_tools-3.10.0/doc-source/contributing.rst000066400000000000000000000024071475315453000230230ustar00rootroot00000000000000Overview --------- .. This file based on https://github.com/PyGithub/PyGithub/blob/master/CONTRIBUTING.md ``domdf_python_tools`` uses `tox `_ to automate testing and packaging, and `pre-commit `_ to maintain code quality. Install ``pre-commit`` with ``pip`` and install the git hook: .. prompt:: bash python -m pip install pre-commit pre-commit install Coding style -------------- `formate `_ is used for code formatting. It can be run manually via ``pre-commit``: .. prompt:: bash pre-commit run formate -a Or, to run the complete autoformatting suite: .. prompt:: bash pre-commit run -a Automated tests ------------------- Tests are run with ``tox`` and ``pytest``. To run tests for a specific Python version, such as Python 3.6: .. prompt:: bash tox -e py36 To run tests for all Python versions, simply run: .. prompt:: bash tox Type Annotations ------------------- Type annotations are checked using ``mypy``. Run ``mypy`` using ``tox``: .. prompt:: bash tox -e mypy Build documentation locally ------------------------------ The documentation is powered by Sphinx. A local copy of the documentation can be built with ``tox``: .. prompt:: bash tox -e docs domdf_python_tools-3.10.0/doc-source/docutils.conf000066400000000000000000000000501475315453000222470ustar00rootroot00000000000000[restructuredtext parser] tab_width : 4 domdf_python_tools-3.10.0/doc-source/git_download.png000066400000000000000000000642761475315453000227560ustar00rootroot00000000000000‰PNG  IHDRªnÆv­gAMA± üa cHRMz&€„ú€èu0ê`:˜pœºQ<bKGDÿÿÿ ½§“g¹IDATxÚí½]T×þÿÖòÎË^öæ~ýŽv%k¤ô›R~"E ‹Cé‚þù[Ê)´¡‚j#TÐx‚´X©x´Ôö¨ˆµˆVJ‘RZå`-´Š¨€€–„’ð É€Øù_L2™ÏO×Uç__4uñ˜ªàµÇ6¬=¶am+އ ‹ì’vlÃÚc¦¦çÚ#è’ù×Ä¢ÿVrv±2ã&®þ² Ü­>ÏO.ú§Ý¿ö˜ÉôkŠCÖœ°ÄÚ¡Ä"»¤­)6· ŽYZ ÒÿÍÛ­ÆÙ9ÄÊŒ\ýe¸[}žŸîõÿŸ`ÆýkN„¬µ¯BˆÅ »ò¶æ„¥°æ?Ð?ú‡þ‹£ÿµG6¬9ÊrÿW!kO†²ãybѦ°­ýÊÒXstÃÚ# ôý#§÷Ä<ìÏvÿ×–xþë0b‘‚]ÒØ-Ó€ôOÎ!Vf´ÜÂÕ_N»Õçùé^ÿG7¬=¶aMqÈÚ÷?_†@üÉaiÐëŽmXstaúo¹Ýj ç+3šoáê/§ÀÝêóüä¤ÿ¢ ¦‘¸±Zôøô€þ¡ÄâéÿôXªú?¶Pýß1³ˆ•Í·põ—Sànõy~Bÿˆ•«ÿæÛwôä,beFó-\ýeu½p·ú:?¡Ä Öÿ­;zã,beFóM\ýeu½p·ú:?Ý?øw,díñµ'B×~úüÉÐ翆„O.¾{þ$ý>€ÐµÇCÖ y‚ú¼ñ]~ú;Q/¿$â ×½òZü?$‡¾oEµÂ9¦~¯ÞºŽÏð_Ú}É8«oÎ…' x‚Wvé³zãè÷É"‚' „Û¾Ÿ\*ú¿]EðOøõµn3%žH®溷Å=AK(OÅðOð²ìúÒ¼‹§ÿ{ÿN_/‡‘øÝ¨Þ8u~«ˆà Þ¦2Í"f²ç)Yäò¿¸úó­«•†û4³³äü´z²£YU–ñc4kƒè õ,EQÔ|cÆé§×XçŽÐ+,4™O.VV/öÉ.ýü¼ë ‘£ÛxÝ벟û¿*þn Á;KÝt=s½é“óô'šó)t†¼SöÀZ{çeñ¯‡®£kCþú¨×ßúè¼fv†Õãt¦OÎñÉê¿ëxò›¯¿ñæë[ËnËÏçnõ¥u/¿Ÿ"+»®q­ÿÞŸîN|+êå—D~ë£b’?Ìýî–u{ÂMÙ˜1ÎÎ \ÿB²éõõëüBßüGö©ËÍ.Ì´x×bêÁÕ²]Éo¾²~_¸îå×âeeמ”þE)ÿâÖãzkx’ÉÞ¥d‘Ëÿâè?ôziêêÿšQÁ|.ª­TÎSEQ†‹ïvæ¤ø÷n]lÐôifgu†¾Võ¯§®¾/bM(dtwËǺåÃ'Þ*ÏÜ×Ý"Ÿž&g‡åêŸØz¾$ìoçrŽ«ÚäÓÓäì˜RÓRu+'Ôö\7~Ýx™½øîÙMmêÙîã?:ùÈ<¨j“¦uÓÝÍÝGÞ½àЈ.¿‡>–%[2« ¦OÒèOÎîý…ÎÉáÂ@NyâEV‡=_QâÂpŸÚ0KÎOk¦»›»¼w.Âó“]²ú¿ygÆ@zªãoнü ™ :ë?iÏgE½ú÷WB7ï g ªã±´þ·í’l x‚µ¯•œ1Ãu½*´ýó‹=~{´«û§7ùÙ®ßVÙOδeïXþÚÁÛöi{tÚ„Q¯‡ZÏb¼DÅ}:ï™Dø÷]Í䌜é9þºi›¿çÒŸ4D+äõ#6e{ ¼M%ý¤E9¦íµ–êo‚S¾yM-·½øKÿZ«?µ²ôo g wsCé«¶ñõõV æ¿–y„tx‚3ý¿9œTzykå}—²AÎÈ™ŽÊD›#®ßàg2Ó5»3Z¬kñà‡m/»N§§×Ë›»•¼žg–nÎÉ66§ÆéÖð0“½KÉ"—ÿ›‹¡ÿø}cô·Çªj#lú iÝ}êéaõtÛ¾*GN*ßépÊ@ÓWj£áù>ù´õ–³åævÆ¥ŸLí :͉7œµ¸w¶ñ[}z}Žõ_UÐ`°Ú iÓÙÑÝá"îöÑnÿöÝ¥>!7m2|ŠþäÂOjŠ¢(J~k·<ñ"«ÃB¯·éì·0Xò™ÓÉ>múŸøÉÔ£]/½ì¶¡ËÜ¥^ß*Ý•wüç~rfâ7S/9tÛñ†Îû=wÏçÅÓÇ«wg äÌÄÅô—è R÷}wñrÃÅÙFz¼áõ¯T3íyÙÆW×›*&¿Ð¨×·ž½ïvŸ.Ò¶~Ó®¯*+¿;žþšiŸ1_©f Òi1¦·ûaß\%ýãôàŒ¼:Þª5` ¦¯¿NÎàN9ŸãÒÐ?O@¼´1ó«ŸØ9lN°mu^bê…¿üN~YݵŸ8žb2(ñ»Aeƒœ1¨JÞ1}å•­ÇË~ø©DÏ4œ™i®ÅÍ]ôëS¿¨¹y»ù·²‚Tú/gý6öô/x=«¤äôYvT6ºÒ¿‹[ëLö<%‹\þCÿ§ÍSÃÅw=‘ŽNSM›$4\¹ëRæ{—OübòÙlÃÕ7­4LQ”¡íÛæ‚Œ«'.Lš4ÖÚÿ|IØó§wšúÊÓûj7…žKMëè ¤¼›ê0ž—“÷]ü¶ãÄ{åš>{†M_T÷ØuyïžÎÆ f#r8Ü…Ÿ4¬3úÛåF’Y¬výÍçKÂB¯w°ZòÄ‹¬>sþ`ºåøÕœwksötvÓÉ›öÉåd—¼þ§ ¤gÑSBño”t¹ÙXUdªGÖmþaù|´æÃLºÖüaçazD!4ÿšœîùi×Öm›·nÛScþVÇñWy‚'Ê»I2ô]*]ûl®á¶O§i‹Úת3%Ì쀗e×8ísâ7ZEü­G äµ¼¿<ÁËë7<Á Y¿t?Ñ2[/½8á(̇‹üJeú¤9?Ȭœi9mО3Wç&¼8G7q£åö´çߺÅÒ¿ÕŸZM‰'’+‡ ä´áî“þץט·ì¯L²lu‚Ó#¬¿29Öq<’ÞmlI—²aÞž{Ö\>u³6¬+ûg\‹‘ÊDÓm’­ßTä~*øh—ì£]_]ò<Û§ ¤7w«¹X: óÝd}8ÜÞe²ç)YÜò¿(ú7ÏþRcGB=ÒÿÙ‚ºÝ0]ùÓM¯2w|5ôÞÕõÿÑ4´ð·Z“,iåü­þWIcÌS›Ž™÷lŸ ÏŽ;ýËå7ž‘ù[¤¦Ð|îïuY‘ËáNï¤ÕKöåü­$ì­ŽaŠ¢4†1Š¢tÝ;ÿVa’·iÅ}žx“Õçö~Û×øK_ã©ú7™ ”EQ”ºã}®'k_¶–º.o_¶–.ýÇrÖ¿éf¶ñ‡èå×6F¾aŠ ºKÇ‹ÿº‡ÙR×ÕúÛ¹ÓÇ÷dm‹1B:Ó?ç}Ú¥í%éE»S{!ë·}š«§Ðük†Á¯ß¼ G> â ˆ7Žß¸»ï5Sã`ÈÊñü—†þ_úÕúÑýd^Úýu¿]uoÎóéÓ1øµ©}h¹RÎËÆ¨y &îô }VûJÿ®Åµ]–±ñuA±îùê§ß:´Ó^‰ÿÉèßù­á]&/Pÿ>/ÿ‹«‡ú7KykûéAóDø¯ïYÏÁ¿gwD]wæó%ao˜FÎg5“Ýò1S¨MáŽ=çì’áÑq].cºétÝfÿ&#r:œ9g¦+ß*ysÏ0EQcU·~RS5v"âtêñISã@Ä-O¼Êj“ÑCÜ™qýÄ©îÆVóÌ­N'ëI À½û—„þG~2uÑÖôGý¿$ý[-@/˜6èº~ø(†=³(ñ]êŸÃ>9¤ÍºŽã²Ï^Óð~ü׿¥¿$ „©çz~J a깎Ê8¡€à‰â¾ô‰rͽ'Ë­ÀÕýKCÿdç‰xúæç¿‘ÿkÛ…gãè±8a깇Nõ¯5Ïá±ëŽÁî›M7›:õ¤ÊÔ¥¥™«‰Ášm/¸Ô¿Û}z¡nû¼IKî…õžàå¬kSzÝŬuÌ""4ßi]ï¡r³rSzã”^c®þÞ?7îÅ9º‰ë-·¼ºúæËñBDjFÞÁOòrÓb7˜žz¾}¬ƒÞ²}eåÿÛ;OÿøcUI†ù¡¾H‡'¨×ü(6¯üO>x®áæ¯5E›Í+ÿãÊÕSœÊ†â˜é(¢W%Eçj.~·éærü«ñOºæ¾»ï\ÃÍöŽöÆ_ÌúéÃÇ^ä¼ww+Kº“N¶±¹ n ¯2Ùó”,nù¿Ñr{q^ûóþ)óÊsÊÐýKwåÁ['Nu·¨Íã©;3ÿæp9zŸy9ºº,£öýwë-ËÑ=èfVþ÷Ôn <·é­Ëe­ôJ:Í‘‡+ÿ½;®£‘}¦§éf•ªõ9{:Ú¬üw¸0Fök£y¼27¼Õ?‡40ÇÒ¨O¤Õ¾ÿîå2æ?“þ9¬›€gî_*ú×§º/fD8|é¯(H|¶}ܹbõÆ)½QU#µ~—XŸúuÇä”Þ8Õ}ÎÔGdfL#¢L•iÖ¯Zëúšà øoµ»Ý§úç¶ÏÁª÷ÍÏûEî°2Á[—æ´¦ó\9Ÿãâè_ol2U»¶!ü{Z¹ù\ý¿´áeë4¿[Ô8î°º7Nõ9,W¢ ɦkä¾lhï”ÄÙ$ï¥u|ßêßýµPÿ(q8W-Š9Ò®Õ/wý{“É Õ¿¯Ëÿbé¿$ìùs9U“_ú;«î+Œ8íìat‡ßšU«ö†z º°Àú‹öÏýS†–=UÎ^QìÍqŽÿÿx¤ÙfO³Ó¤ísÿnö|I˜Èò¼_ßÁ*úqÿJµùÝÉL§Ü+ýsHù‚ë¿ë&û4¦}îü÷“]Êþ]o¹­Ó½ŒqeýéÜͱù%!\±1N¼ïd“𵥩·ûº¦ãÇñÛ¯†®ã ×½ñöæ¼s}–¿ª›JÒbÿþ‚PôrÄÛ›÷_TŒßüü¨W#¢^•V›6S׿mzõ%­ÿ6û´Gië.ùÍuÇtêôF]ßÙ8Sõ'­§sæâf¡i䛇Î3¥Ó',åèôF^Sa®þ*Æ9§‡s\k¹åýÕ¨¨Þÿa\lTÐK"¾pCÐo¿û¯³õ}“¬mÚsBM#ùµ %iïl|ù%ÑËooλ¨g¶qp‚ºqeíé»±Q/¿$⯊|çÃOj½GeÃ¨é¾ø‰xÓ«ë×½°>*NRRç\ÚQ¯FDŹù'^ ucùÁͱQAë×ñy¢ÖÿýÕw>ü¼F¡ö6Ͻ»[ÙÒu²Í©q¾5<ÌdÏS²¸åÿúâé¿$ìùÓñï^ÿéͰfv–šŸVOv7÷Uî¹ð¦›7Ñž¶¼ŠŽ4ôɇ=^¿éoß¾ç\ÿÏ—„ý­j奄åô49?­™ìø¥£à­r7©õø¸ÎZçrŽ÷µ) Ó:C_³êÈ»?\ëSNö5ßzŸóáhC6Sæ%ýtþœÎ1½Sa¾e×Ù°éŸCDŽ\Ð ëæg5“¿ÜÍ =ýþÁá>ådŸRs1­Ü““]ºúo¾­›1"Vf\k¾µ˜û·è¿¹í‹ÀÝêóüÄþ"Vîþ¢Bþ¡èú‡þÐ?ú‡þ¡èúG<íú¿Ö|{rƈX™q­ùÖbîߢÿfä¶o®îVç'ôXÉú¿59c@¬Ì¸Ö|™°¬®îVç'ôXÙúŸ6 Vf\kº‰LXN× w«¯óÓ­þ×Û°¶hÚ!kO„¬=ºöëPHñ¤bíסkO†®=²æDÈÚ¢ kŽm€þÐ?ô€þп'úol¾51m@¬Ìhlº‰LXN× w«¯óúG@ÿèýCÿvú? ý#–ªþ.Xÿ£“Ó¨ ô¿¢btrš‹þ‰ÃÁkmXSlÖ?Zˆ'ë~ZÿÅ!km /Hÿ·Û~ïA]ý# ÿ}ƒ#·Û~w]uþ߬ÈÿùdýÚcÖY{"díW–φ@,R°Kš©ì}²öDÈÚã!kmøŸOÖÿ߬Hïõ¯i¹sÐ?ú_Q]ÿ–;wû‡º®:?®þì¯)ÿK|¼öxˆiüŸi˜ãybѦ°Ñî_s"díñâ‹à¿¦üïÇÕŸy¯Š¢ä]=-wîöiЀþÐÿS/þþ!MË»Ý^ߟð×”ÿ%òׯ9b^`_…"‹våm͉5G6ùëÿšò¿¯ïw[€ÿâv õðÈí¶ß›o!<Š«M7ÄòŠ[­÷ú‡ÿàÌžª‚íþšø"áQü?ïúy®÷,ؾ!¯ºKûõ/X cllllll!{ðT^Ð?”à »¿§GÙÓ£\` À£¦ô¿¸Ö 8gtttll|ll¼»»§»»gttÔÙ–¾m@ÿ¾?J0ïÜO‹†K À»fôïcñs¹*ó€#´Z-ã~›€V«µß~Mè¡î‡éøÜý\ZÜÛn[Ðÿ‚Äo%.Ñh4cccÎ466vÿ~÷ýûÝÆÅN¸4\4 Nîwm}‡f°cdddtt̵†FGǺºîwuݱù:—¦€ÛFôï±û]XßáežÌ<|øptt”‹ŒFGG;;»:;»>|Hסe\´\· ®îw(~·²'’$IrxxX«å®$­vT¡èT(:‡‡‡I’´WŒ³¦€ÃFôÏÕý;ýöâw,ûÙYrvÖH’@IrÈC÷3-¹¢S®è6šåÂRƒ¦€}#Àá0ôïXÿöîw!~æ:°¯´ÁhdÀJfhhH«Õzç&­VÛ!WtȃCC£‘-¦)à¶`߀þÝtýÙî·¿õÙ¦7¸D`%177·C=zôˆ-“hhé8jØ4Î@ÿž¹ŸÿäääððCµzP©T!ñdC­~899iÓpÖ€þû»pÿððÃÞÞ““:/~d X ‘MNêz{ŒŒŒpi@ÿ»hhøáÈÀRãñãÇÃù´ úwêþÙÙÉÉɾ¾~¸À’m1 ýX&ú×MMMMM9ÿ‡þqù‡þ,/ý;ÿ‡þ=пvlú°,ô?©ÓAÿžë_o˜šÑ³'þGÇÆ¡ËHÿ:Ž=ýÏ<þ·rõïì©?gëþè‘èÀrÒ¿Ýô?ôïFÿìuÌÄ¿ft ú°,ô?19ÉLÿ³WÿAÿ\—ý3ÿÐ?€e¤fúß~ñ?ôïFÿìuÐ?€å¨zõôÏIÿSÐ?€§HÿFèúýCÿnôO?õýX^úgžýƒþ½×ÿˆvú°,ô?>1ý/XÿãÐ?€å§ þ{¡Ë+ÿ ËYÿz½ú‡þ@ÿÐ?ôú‡þ¡xâ[úV§Ê×ÕÍ:ø[ç೩òçÊ ‹wôáÎñ­‡U|içê´Îgs”¡G†OõÏ{±Ùĵ¾U©òÐËãÞà3©rþ÷FèúðÄõ?¹\¹:U¾JÒZ<¸õ›Á·¨žK“¯Jí\÷½ÁèáfÐ?ôÀR‚$I¹ GÇíz.U¾úÓ‘zÖ§“Ó[s«R{¤ýžmýCÿ\jdõ•òÂì”Øð @?¡È/(:*Q,É-¬lV;¨¨›e<ÁÂ)<Ýž¢(ЬM:ÙF™Ÿ.+ªQèì’«“WïMOö1„ÅnL”HòÏ\’ëàýSu£¼{Uj×ÖN»ôÜ|6Uþb-éÑfÐ?ô¼AN+ý€ÄÂ+èß^ÿ,ûÆ€þmŠMʙ˩“=åâWÛFIk{0$–¥þݸ<ôZ^÷³iŠÕ;º_ÓµWÊŠ ñŠa`@d‚8¿¢Iãú<µ•‰Ìr3É%Ò‘>ý³,Ÿ“õb³Þø‰ÕGçëxÌ<ædÃüѵ—劣̧&Îç6ɽLõOQ‡¢™{•Ó§ê´ûë´GzV§u'Ý›÷h3 þCÿŽé)޵Öu Û–’Ü“*Ž%N㤡úòHÒ‡bUª|•D•Ñ9OQÕ?ÂO•?wd¬ºuÊ6üOÆHÏO9r2)Í‘¯Êiå¾ôý;AwÞR)‡Èš=—v¨uYb Ëµ]ñåjGúwü”Z‹%•gâ-kD~aÑáa|V à@›su( ÃmŒNQdƒŒ5ëlYÓ§)gÚ ÑEJÇç;Ð\]V^Ϙ2LRT^QVg~ÚͲ½€:[×NrÖ?‡X:ú×–%Ú–+My2ëJ…ÄKOV6´xõ¬_vÎ'´û­Æ]Ǿ*ÍÛÿYÞþÏþ{©žÝ,8u¶‚žÐhG¡4àÊáçR嫨U®JU„^{DQ¥Ÿmí7²&Ý÷vjC%æ±}Óà¿ÍNæ{•†ÃŽ”L2¤òU;ÔÕZ¶äþ<ùª4uõ÷Í èß1êËŒlB™Æ“¯:Ò¿®Fb±i¸¨A¥£(J§¾RÌzâË_ÂL1Xë?pcnõ¹z@­¸R*±LIXÆêuçÓÍm aìÞf³ªÛN2Ã|› xk«í5wÁùéµ:»1j‚'ð“6Ò Û.ešVÐꪹÃeî_@ðDá™g®ÈUjÅ¥C –,bvþôèŸÔ©ÛÏç³ÎÑr¹U•é|»dû…ÅÆ§Ëö–Ö6)u\æCj~®§ÇüÙ‚§?´Y÷×Ûg¹ úÙ?6m=z¢Fž`ÈÈ‘¯J특fµ´~¸sx½¶’žz6UþlÉô„¥?³UÊô¿:¬X•Ú“ÁšnU»Xú×ú}ϪTù³‡ÇnLZµZëúžI•?sdr“͠èŸòv蘻þYóëìtÛÑ`QR•Î^ÿÒFÖñu—ÓóbK辺®šQŽÍê°ËΣ(¥˜l’šg%‚òš(Ê2"íÄ6™â@“*ósk пÕì&—Õ*„e«×þå³G8ÈžºBqLßÙHO˜ø@ƒÚu–ÐÓù5?[¦BZn·9[ùÏ4zûè1<ÿWjC%òU©òg¤ª×ލß.î ÍëZ*_•Ö“ÔjêœqžbUj'ÿð`Æ÷#%ý뤊U©]¯]3Ïh'^Û!_•ÚµîÈPÆ÷#[(Ÿuùà5güü`תTùª´®ö½]<øö‘¾uRŪTùêœÁêIÏ6ƒþ¡·½ÿä÷þ³ý­—˱aͲûIíô(ipÖ\ˆ5 ¿[êEQҊʪj&Êò-ëÆ]¼’¬c'è}¶Ð æ§œ)KÑO©UêØI ”Ô‘Ô‚õÏ4wh𤖇â8럟x²©Íú ½¶öÖföª‚%§~La«£$uª¦šŠ¢Ü¬¤¸èÛýˆ6+œå mñ¤­ö ºsO¯ügZìV¿³ùNèÉêïÕ¡Ÿö<'Q¬–tñóÄ”[w¸)jÒPôMߋҮgÒ«wtó~~oÖJªzCÑ7}ërºV§u>›£ŠqùÚzAqO›dú-Å3;z^<ØŸqÍ0ìùfÐ?ôïöÜ¿gÏ[;С®:‰5«m÷…‹TRªu¶úɶ^y0Pj«]•˜cwÓùé2㢤¥<ÅPA2¸>]9Ó܉c?©o~/D°¿£S€þ¢’5õ#Œ–5;ø-À+ÌÂLö‹¬¡çïéŸéôÓƒü6«ühý¿ÿá¿P5ýCÿEQdsûÿ~‘bÙ¡3•uWjªËeE±ß±Yh™Íuûà_˜¸¤YM?ø×TjõàßyþqÐ?ûýÄþâËïÃj¯äÇò-æpmT–‡¬Ç3,+ò¾œÎ­þ# { 祬5?‚õަˆøÜŠKÍí=jõ€RÑÚP} =šÏáyHzîÿÔÙ g]:ØüY.üí6ÌýýCÿVUsGi²ŸÛiuÿ«Wµ;Ö›ŠÃkT–Ÿgú§tV-Q@dlTdˆŸõïŹ{|¼’ÉN¡(©ÆübU2ßÁ;Üœ¯å]¶¼À¨ô¾®š¿®š¿¦|Ô¨œ»Ú3{µ›lè&î/wê/+f~SL×˧ê:t¿Ü›¨jT¡TXâ(•*7ú×‘Ô ŒIãtLOëçÇõóc3F§ç´S³9¢#G&ÇõÃc3C£ÓjíÔ€F×÷p¢]ý€þ¡`‰Ñrú‡þ¬0jn@ÿÐ?€FU#ôý€þ¡èôýCÿ èúýCÿÐ?èú è€þ¡h+æ ˆÄ £?êjÄOÛò'âRqž8.:À_ÄG&ħ”µi—W¹ï8Mð"ö¶-öqÔ%qÂ?ëÊÓXw,(uí•ù’ø˜èß?"<.Yœ_ݪ3ÿU^Ìay­Ž¿L^Ê $x¢ør-EQšªd‚' ü%çÞÍy<Ÿ´åi­Áÿ¬’¼p{ÃDXAÇ¢Ö~ºê$ž€° ¾HxLrviËÉúr„Ïð3¡èúw¦¡`£¿ùŽ ŠŽŠ‹ <Á§œ±Tß”¶,Q@ðKÕÐÿòÔ¿û+èujò¢üé"»1Eœ”˜N—"a´¬nG*D œîœl”ø ar™†²èŸ'ÈlÔ=ý?ÉÒý;п0$*19Þ Qa!|ºÖŠ)ì ¡èú÷´ÃÖ æ ^HÒ¡ú s‘šæŠìHÁø¥T8«Û ‚y‚ðC è±ðq»¿‚^æ¡®^ì/ xârËÖºŽª¬`º¯cv.Îow`ÿ™OÀ7ߌþ ^„¬™|Êõï««ðtë?ÈÁ¸‘N^‘$ x¢ø*-ôýCÿÕÚ’ Á ‘Ôiþ5;L@ðB²Lõ¯N­è+t‹$'è±õïþ z—‡º*1Ÿ'6Ú‰šlÊ x¢¤*EQ”²0œ'  Zí7“†0#ÿŒþÓx"’Õ·ûóôo—WÐÿÒÓ?EQ=űÛ÷Ð?ôýsa 4ÏdÖ;«â45Yá‘ѵÿ9r¢(R#oi’{¹æúRâ¡»õQ;ÊÍ…ñq ’*úOª¢‡ãÿ-² ËÈ?£ÿ¥í—2Cž(ªXõçëÿÏú_€þIÚ÷éõ$ôý?¥ú'š+d)±ÁAôÒ¼äìÒÆÛ®ÙSS IŒ öñý#¢RòÊšµ†7=¨eXµ’®2…½ G$®#]èJ±,).:@(ò ‹O·MXÇ¡h‚—\©V•¥‡Îó„¢(ŠT_:”•á'„Å&åV´jì“§ë©)HŠ ñŠü‚¢ã¥VÛpÍ(W²Ó¿®ý@œˆà…$U©HÎg}%SDðJ¬Ù”Bðb‹”sX[™("xÑä–¤œO<?½ÖÒ¤“†›sÕí䜇ÖÍÄò‚'àÇì Ý!º¯f;þß–Ìùgé_Miª“è5jôï.o9ÝS6¦Ki™†¨+³sƦíL**ó%#Cüü#¢R²ŠÔd„Ï ”48bXØUðè–÷¸´p8„Iÿ­EYnrxˆ/ ËÊÛu–þJc vã ú‡þŸ2ýÔHxB±1E"IG‰ž ³žõ]uefŸ' ü£7¦H’£„‚_ªp^!Ó‹­².‘^èŸìi¨(ÉMöã ü JÊ+®8›!Õµˆ ¤ƧH’â"üè¥UjëÝÆŠÓ#aHT¢$»´t2U±7&à üÂ’Ò%ñ1!O@„e]Ò°÷Ÿ’à' ‰JÉÊ–J6†‰ž€w¦Ç£Œrs ký›Ý/f»ŸÃY»S§(O°šŸ&³éUœA²&Ò2ÆÃtÍÝ^AnyhïÿZ±iÝ_Bö¡ê+r­Ó2¥>e7þßšÁù·Ò?E ”'óy¿”ZOõïöžr¤ÿÙ!qp¤ä@Uý•†úi¬ŸMÎhêM+f"’ÒÅà ^àÆ¸©þr<¾å=--¡Ø& ‚â#E„0$*Qí' ät¹ä¡ãË#?#Ž ´tdý#6¦çU5öèMX9)DZüÛ蟢TE1"‚"©ÓùNÿîï)Gúa²+¬ga*SDVYHðã‹™¾¯®)?–Ï8׿÷WÁ›[ÞÃÒÂáнa‚' "eL+ܼ(©JëKý C¢Y•RbòÆÈ?ž€&)ic2èúªôOÖK„"Hv…e²½µMa¶‹â@¤€ðϺd]Õ’ ²' ­)Š¢ÈÚ$¡€Êkr4˜fþ²&ïôO?Íe—0z˜é…Е›%Öºê$¡€ˆ,´še$[öÆD‡§Tô°öc“žŽü‚"kãœQîdÖ¿†v`R¹Šôü¬}£æŒH¦{{ TÀˆîá™{Ûõï2]@jä•Åyâ¸h?¦ð#’Š­†‚éþ¥¥XÊ Ã­GþíôO‘má<–gê¤.\ÿîï)Çú·)¥t7]ÖL'º"^( bNZ “í²0/õ¿Ð’¼ÐÒÂå´þ-«ƒÙÛqÎgñ|ñÜ?~‘Yerú‡þŸÖÞ¿öRf=²*9Tq©M¥±é$k*ây"LRT^QÆŽbI0O@0}»Áÿl!Wºš¦SÅ,@ÿÊÂpöªëùE~f=«×’Ýì2»šeîÖBvŠ&x6û1UÐmœ3ÊýÔ%qB½1Æôv„¨b…gí#ýS­¹ŒtçÓEDP^“®V,ðSjuæ=0§ÃMÿ.ó#¤¶£¡ºHšÀ<‘•5ÕYãÿ=ű6#ÿöúgTÂóÛIßôþÝÝSŽõo6½¥IÇú°!ËÏÑÅj’†x¥ÿ—ä–N‡Pì þYWls¯E$ üeWsðŸÔ©›JÅ<”ׄçþ¡ÿ§v驾RœµÑô6! O”•4›gX•'£œ·Ž·Áé¥@’Cî¦ÛØKý·å;T©úÌFV%Åe3½¾7¼XÅmi‚“J“CFq8=|- „±{«Î$ù aB‰Òã³ö•þÉfYO~HEQ-ÙAt³C[–( üeWHZ®L猛þ]æ¡£‚ÔZW{¾AáP9¦KX 2iËE棨ŠblGþ韢tõ’ !Œ-’ûhéŸë{ʱþms†­]˜ïh‹v¹çú_hI^hiátóÒ?ûnC¤€Jœ®+òÙÒ?ºcåôý/ý+O†óì† ­Híë}²ýRÕIYz,½”LÒ £Ûé]6ùa^TUí´² ïR¯õ/wÙNg÷þÝ=€Ð ós5ªÉ­Òä’QîDë?zo³ŽÉC~bÅ€‡gíHQä¥L‘§ú§Èz‰¿€ˆ«†ót7º§8–àEìmÓV&ŠK-¼8ú§Ûˆö]@Ö_yVƒLšòd>OœßNªÏl´ùw¬ŠÒÔˆMKí¼Ñ¿]Þº¾§<×?Y—ÅwØûÏ ñ½þ½½å=(-œá²÷o7±èeíçrå?½nÔò\ ôý/ýÓ3ļØ"ëÝTEq"‚(¡Ë´º±äPaQʺS•`)åí²0d;KG)ëKŠOV¶9¿ͯýa¯Hg¥PQ'ZØÜ½ÄßAÂèYð(«¹wú§+#›)yJQ™×ÄU]2ÊýlüS•Ð+˜…NÜÎúнLóÄêŸÅŠ‹Š^´é»mySÌAÿ.óÖý=å±þM£;¶J£_uìký{}Ë{PZ¸Â4÷/±¾ôôÜ?»ày_û¹ÓÏ¡h‚'0½W ú‡þ—É/þ‘­ùÑO@DfYý¸Ž¦½,3šÏ‘­¤©Z‰âÙÞ‡¥¦þ«j°Zjn^”±·ÍÕƒ}ÌKãÕ³iëäÕÙ1„0$Àßþ]v”é·¹ Â¥õLç˜Y_¦¦<Ð?¥­L $xYëÈèg·üÒëuœÕÅ!£ÜÈî¹ùÉ(¡€b~Ÿ†ÓY7å†<ÑÆR¦héZ%ðyôïz̃–%Ÿ'òóY,kZ~èÇ~*‡+è•þ)½ÔË?6»¼Ý'Õ-söVWSDðüŒü;×?E)OF Ík¾œëß}Þr½§<Ñ¿ù¤âK™y]kq‚·•ÿž^¯oyK‹ëC˜Wþ‡eW›62¯ü·]èeíçVÿÅÑO°±Ü“—þ’º¥ªG©ÕAÿÐÿûÁ_RU–Áç žÈ/,:*.!*2„^2Í˪´´‹uW¤O@øGǧgIÒ“£Âmy§HEI"½”)–Þ&Ü_@ð7+Ü>Õ?P# 7U©Á1 ñq±Áþt§?á@³¶)7Ä©þéw¸%Hr *>^ØB?CÏ‹O—$ÑkÂ…bÛçþ9¼}H]+ £ªNgf‰ã"ø<¶t¹©‹KF¹9ý[ÿLu™å÷i8œ5ÙV.¼Àð™,W–Â÷OÈÎŒµÒ?—fúV3t\`3îö z§Š"[KÅt)†Ç$ÄÇ%„›~‘EœRaÿ: ú=Á„£‘Wú§HÓCq.õÏ!oÝßSžëŸ¢Ôµâ0ÁD&‹Ó%ñ‘!|ÿ„½¹Éöëø¬Û4^]¯oyÎ¥…Ã!{ÃDŒ8)Èü`^œéÝQù-:ßÔ~nôO¿rÊòÂé: Ÿ' ‚bãmž^N'¥È*•¬ JΓÐ?ôÿ¤ôOQEjšÏÈÒ“£"MïöÚ˜(‘Yw¡ènTS©,>&‚ùA^q~µÝ+Àt­åyIôöED%ÊJÔ\‹·¦¥,?+>2ÄO( „‘É’Cµ:º&=“[Ýã¸Ô^ÊMŠøB×-}õ•â,Öûï *å:·söΪŒóù’x7oýs«.åê@Ž^úK¶ÈÂèß§Ñq¡ëRöµwÜG „Ïú‡þ—¢þûvv÷j´c“º©©©i@ø$tº©[wÚ–rÜû]ñpDý¯DýOè:»{'uS33zúòX8333ÓÓ3·î´ÍÎ=Zša0’íØÝßå=ªÐÿŠÓ—²O;:®×ë ƒÁ`4I£‘$ÉY@xtEj0 ÃRÖ?Ó¸û»ÜçcÐÿR×{Çý陃ÁH’³³³³sss,Œ¹¹¹ÙÙ9£‘¼u§œ{´ÄC£»×¡€þWœþ IÎÎÍÍÍÏÏ?°`æçç=z4;;·,ô¯7’7o·Bÿ+PÿÆÙÙYÚýðóóósss·n·‘³–~ܺÝý¯8ýäÜÜí~<« ç?þxüøñ£GnÝn%gç–~ÜBïeêÿÑ£GÐ?ø\ÿ7o·gç–~`ð%êŸ$g¡€þ¡èÀÒÒOïƒòʪòʪžÞÐ?ôýÀÒÖ?9·ð¨©½ô‚ Á<Á þ5µ—|²[& è€Ïôßr»Õ@Î-<^ð ¿P{éBí¥ Ððü}²[&Z èßÍyQÅ@iÁs£v.Û<¡®dcv€W1°ÀmXEºÎèßãk±ô// ç ‚óÛÿô,ÏðƒbÅÅ-v”Ú¦â¬øÈ>O@ƒã${kTŽû 2?ž€ð—5¹Ë“žÒ>/BÖLz³®½$3!Ø_Ć„§\Rë.¥‹aÖ%Ö&—2E¬³v4çÊ)8\ j×–% aÖÛm=5I‘!|¡ÈI>sA×QU މðš®Å:ûk¡m-ÏKŠ ñ øÂðĬ¢VòtµIBARycIJŸ' N¯îÑ)JÒ£ý„¢€¸ÂV‡×BÓR”à/"„Á‰y•r…#ú(=nŽEžO9­%xÑ”>ÍCÎçî¾6Ëüx"qÖT6x¾tR~ý€çeÞýõr{¿+O†ó~™õ¤]cBâ/ bNöxV~8×OTÿžôý/rï?]+É?YRZ(‰ $x¢¨R».8_^!‹¼qqEY9µ­ʳm˜c…%g:SVZ˜M«˜ý³Tdk~4Á Ù˜{²¬¦ö|Õ™éÑ|^`R•Ö{ýëê%A~bµÆ›mÔe‰"‚%=YYUQ$M“ˆã„?K·5Ð\]V^QV~&;Fä¸Úõ(\\ ÎúO%dW”1û)ö´¤éšò£ùO@øËV…W2E.V\sË—ׂ³þ­òY×( Aì|怼0œ'H¯µœ Ù¾7L@øK,õu[A0O^¯³–”×d:V½X(  Z)ŠÒU' Dda‡¹Z÷“¶p¿d³,€'ε”MÄg­@¥‡Ó±L¹TÌ„R,fz’×å.AY—tVãöDda·eÞé½Ãá~×ÕeùñDñåVå¹2ED™üáX~<©7\ЖÇEÿM·îÌg= Zÿž~‹Ö¿§ßb¢ ú‡þÝêß/³‘}Û”Ä9¸Õ}¥~JµÎö+ÑäÌÙþ"¦°ÃWÃvšZ±¿À]ÿz²MOq,Á ”°4éj“„‹¨7ׂ³þmóYj“ÏîiÍ x!²fëž™²¥©M¥!]mÓ”šר üôZ’¢(ªQâ/à§Ðÿ¶¯¯Ý\‹ŽCѶÇ"ë%þV ôUz¸‹‹þ}”‡ž¤‡ƒþr[¬úúÖe̳2ïâþâr¿Ó vWS/H™»€cùñM½ÑS»Øú/ûîû¼³É>ʾûú‡þŸÄà¿ÕПN|ªÿà|«Šr 4à‰$uVƒÿô4a’´°¬®}`A/ÑC‚®µçjSme5•«*ŠYDý»¹œõï.ŸÝüW¦¸}v\w>ÅÁùjÊž(©†dªo?i#SGó3é·ÈèßÕµ¸’)²Ë=ÕH¶}–Çâ¢_å¡'éá ×eÌ“2ïòþâV{Šc ^Ä^9³A¬õšŽåÇ7õFSn'ýß¼3c = “þ dÙw•q ÿ´²ï*í¿eÒ¿‡Çb¢é&ôý»Õ¿køTÿ63Кòd‚'×VK¥j %q~ôâaH”´¢Ã»F€¦:^(ðc czºÍ•Ì@‚k}FÚ²¸ÅÔ?§e}¶qŸÏ®Ñ–% ží¼,—mtUb‚'ˆ7M»Ö³iÝéßÕµ /¥Ûç­ªÈjù›¯ÒÃåX\ôï«<ô$=\ôï²üxPæ]ß_Ë¡¦:^(Ò£ôª‘63ÜËê+™‹­×1<2º'/ë¿©ZqãÁt\ï}|½÷ñuÕüuÕü5å£FåÜÕžÙ«ÝdC7Ùpßx¹SY1ó›bº^>UסûåÞDUãÊÒ¿MEiê 48ì©[ë*ö¦Góy¿”jÏ®“M¹„0¶Héý6MÒ@‚gó×EîýûHÿä³'½Rî=W±Uo›KõíæZ8ê+ö†qíý{”ÇâÔû÷Qz’žëŸs™wwq-‡ºK™„Ö%½ ÂfA¨'ÍÇ×MR®úŸ6­ÿiùÛÕk?ÿ¿]½ÆÞfhdôõ7ÞŒKø'óÉ¿?ûâìw•ž‹‰' ß¾µx¹³Ø’nvMëÓzãÔŒA7­Ÿœš™ÐMë¦Ç'§FÇ'µcšÑñíØCÍèÐCM‡âþŠÒ¿Ís>vsÿîæK™ö˃ݣ®Ø(Hu ئçP4Á ÌfϹêjÅÂ'§ÿ6û•â@˜ýÛæ³çsÿ‡ÌIkš«ËÊ«™:LóÖ¤•¬ŽÅ¹úvw-èùo›k‘$´R ¯ÒÃåX\æþ}”‡ž¤gÁúçZæÝÞ_œïw²-/˜'JªÑ6å†X-ŠôNÿ ¨7zE/¶þÿýÙÌC}ÿþì ÷ÓÝ}¯}¿Øúohl‚þW¢þ[Ý{ÚÝ6ôJ`ÉyõÂÖ gªíd|d¬MUx)3Ðúéf‡.Vþë®HC7¯ t¿ Ù ó³^qÝSêýÊ.ùãæZ¨ÏDY/$Û ÂyŽVþ[åsc¶õJrNy(/ çY¯ê"Û÷F L5Óõ*¶ÙFW/ö7/Ïö úæz-X‹ÂÌ×ÂnåÿÂÓÃéXôï£<ô$= Ö?·2Ïáþâr¿³Føqâx«sô üø¨Þ d‹­‡Á¸ßu@ÿÐÿ“Ñ¿®FâÇðÃIJC'‹Ȥq±íS[n¶¡—ÅÄGJ”מ¯:#‹ !x¢ìg M%dWTÖÔž¯ª8 õ㉢µ;hæ»xî_y2JhU…QÞmC*D ^H|~Åùšê’ÜäàÈ,±õ@¨ùakæykÖ“ýÖ/oá’?î®…ª(F@ce5-JEkMaRLr|Œ£¥q.ó™KRº+¹|ž(81¯¨¼¢¬´@Hð“ªÔÖM(ó3ëUµçË Å‘"B!³¼ôž[õÍåZPŠ1¦çÑËÊ+Ф Á‘qŒ€Êkòyz8‹ƒþ}“‡œÒér)cÊ<§ëÅå~gFDª’ùö/Ââ~½|Uo .ú¿ÑrgJOzqñÿ$x‚)=ÙÙ­úíêu::»U쿺 Oz£ú‡þ}¡ŠÒ^9$<_“,)·¿U\ncZ ¤è¨É‹4¿¬´ÝvpNÓRÂü~š008F,«jw<€çô6¦ÇýÜ̹r؆îp×ïM‰ aHTzᵺ,Ѫ*¼’éìíoÌKžä»kA*ke‰Ì+äderí•Ì@B(¹DZunâ«ÔUyñ‘!|žÈ/,ARêišr‰~ùMjŽÞFGi[KeôÛÖøÂðļ²6Oçn9_ MãôØ¡€†'\Rk+Sì¿ðôp?–[ýû$9¥‡S9äx¿»)óÜ®Çû=¿`÷ºO®×Âë 7øDÿXƒÿ>û‚þëÍ;w…¢uÐ?ôÿçêÿÏä)ùuAUQŒ€+è X!xUæ=¹ßééÿø*íRΖþoOé…YÿÆÎnåo ttv+™ nÞiŠþ/Á¼ýÿ1Ø„§½Ñrú‡þ¡¯Ñ^)–I¤–׉¨Ïl:zQ9O >*óÜ朗8‘Ço¥\œs_lý»ºÿOOwýCÿÐÿb@vŠæó‰yEåÕ•¥Ia"B{@ûƒ§•y÷;©l¬,?™Bð­ãà‰;ý_o¹­Ó=Š·âÿIð:½‘=øÏ³YËödq:ó_‚'x+þŸž‹‰ëÐ?ôý/°CÐT*ÛFÏ)†„'ÊJÚtPxº|Pæ9Üïº*1½&@R®Xú j‹þ›oëfŒ…Iÿ3Æ_¯48ô…M°·Tk™›ôï᱘¸Þ ýCÿ–€þ= èú‡þ`ë?yK:ÁÄ%lò(ž yK:ôýCÿðäõ­ùöäŒÑ£h¾Ýž¼%ý­øzÉ[Ò›o·{z,&®AÿÐ?ßéÿÖäŒaéǵæ[Ð?ôÀwúŸ6,ý€þ¡Ð?ôýðVÿÍ·&¦ K?¡èôý?ýú7IèV¬þG'§¡ÿ•¨ƒÑ877ýÀÊÔßàÈí¶ß¡ÿ•¥ÿ®žcã³³³óóót À™ŸŸŸ››[úúœn¹s·è!ô¿²ô?>¡“ßWÑt À™ŸŸôèÑìì’Öÿèätÿ¦åÎÝŽÎnŸ@ÿK]ÿEõ«Êï«&&§Œ$977÷À˜›››3ÉÆæ[K9n·ýîó~?ô¿lôOtõ¸y»¥ À²×ÿÊÌ—?Ì<~üøñãÇóóóóóó=š›››%IÒH’F£Q¯×ÏÌÌLOO릦&uºñ‰ ¥R…Rú‡þèú è€þ¡ú‡þèú è€þŸœþûÔ„cdt™€@ø6 ôþ ÷ýCÿ èúýCÿÐ?èú‡þ@ÿÐ?ýCÿôýÐ?ô@ÿÐ?ýCÿôýÐ?ô@ÿÐ?ýCÿôý€þ¡èôýCÿ èúýCÿÐ?èúG©ýCÿôýÐ?ô@ÿÐ?žÓ9ølª|•m(ž‘tóöo­›î[¢ WÔ*éÔ®>2iüSLîÏ3åÒk-oÒÚ¿ÚA®Ú„ês-EQÔåoºLŸäiô×ï >ãè+«%]üO¼ýý¤b@ÿÐ?¾×?K99ƒÕZèIèß;úІQvôýàý+ž‘öðszø9ÝÏIV-FKÐ?Gýw¿˜Cg¦Mt±¼®*št«æ¢ô<·£“ݤXýéX/J/€þ¡¬å~V‡Ò8i8UÜcö"ôÚ#èßý;a¸¥Ÿky®dz‚r«Ó ½áóƒ«]/ èèŸöÜçŸ*lÍD7†§ö£~íÓžg%Šgvô¬;Ü¿µvR¡gþþ¨è Ir/Ö’æ R»ç¦bÒè»¶vREµÖõ­ûTµîSÕkuFãäÌçŪç$ŠÕi]Ïåõm½fœà wicTJVתCózž•(V§u>+U†”÷Û·r+ZGÞþ´û™4ÅjI÷ºbíåIoõ?<*a¦T†o˜çï=Ð?EQý#|s›ìí{(¾ú‡þð½þ©‰fû=RMQ”âZÿsi'¤û‹†Ûºùð„ÉÙ“ã¡ÌÀõáIÓ‡Êáçè%êê9+>sx0f‡í!žûffÂ¥þ¹¤¢(J;i¿sºòÚµYVÌ_.WÚÎâK¬“z®ÿ9£4ÏÜ–JS}ÎÊjÏôoÉFELëc”_ýCÿø^ÿ”vlé¯I÷(Š¢Œý#|Ư’žÐ#ꘃ=–ùl¦SËx}ÇÐeºS~OmÙL:|ƒî_~`ÓJ°¸0U¾*UñlŽ*ôSÖþÓX­;ýsM5_]ÜiÙ¬x0©øÁ‹;ÌnÞ1xÙÜ/Ÿ¸§¶¬‹”t¯;¨â[­Šà®ÿùËßt›¿ÕiÝÂðLÿFåÐsæ–ÊV%Š/€þ¡Cÿ–Áyùk׿)j¾üˆYœ9ƒÍ&¾7ÄŒH¿F¯˜›N’XíVñ}Ï*ÛUo/–˜ö¶®nÖÖ…©ÝI­¦':-û»õ±ý{¶·MiëÎPšý­Ÿx-ÍÆ¸³ŸjJð3ŸjoèMøý–Ùw®úgOù?[¬0Q·Ì3ÿ}æàƒSå«R;ß¾÷˜µ†®GÚoëBˬEQ”Q*e·Bé߃´ÍÞhªnªn50glÖ¾h““ãæ‘î v?›5ûÎIÿZË”ÿªœáËvì{óà_ªü¹rƒ¥@ÿÐ?FïßâȾSÖ]OcKßjk‡ _V™ú»ß(jfëùªTyhݽÿ=Ié'Mnóۅϖ̰v?ûyž;ý{’6Š¢(êñpÿô©º‘­Å}ërºXüæ|`r†•<Š¢(ʰugýÏ÷³¦ü÷÷;ØÞsý+ž;2ÞŠ7ÿèú`±ôo3÷ßožÑOSWÛlÉË<¯O kM½äOÇzMûéÎPΗQЫÿ†•Cô¡Ÿ)ž2ÚëÿõïIÚŒÃIy6N]mFæÕ=9#­V»ã¾òþò7Ì”GgèåY‡q|ÓÃÏS½V<´¿Åê  èëßvå¿«¶E– ¦—LاõŸºÖ¿:U¾*­¿zάíCåu*ó”ü<µpýsOÛœ!#Çü ÂÕÛåc§îzõÌ4‡9Ûõþ3¸­ünU»˜ò离•ÿ@ÿÐ?‹¬ÿYÛçþ-s¶óë‹»›û¦/š&Ý»×}Úm°¸M¹Î´s+Ãy¯îiëd¼>x‘B·u´ô¢†[ú˜%x«wô„f½ÀÍOþ¨-;Lëâ;|ÿžºÜý;ÿ¡ýCÿ,®þ9þâßcŵþçn,í?5l£CRšc¯ÕÇÕG‰ÒOúFÿœÓ6[tØvÝßs‡Õæ±ú®$K’U—tÛîJÒÃßñ§ýâô è€?OÿŠÕ’.þÁþ­uÓ½Ž0›žÚ_ÒʼWÿ`ÿÖÚ)…£éíŒÛv 2 ès¯Jë+·þÖÂôÏ9mzã©ò¾¥«Óºøû3®'¨ùêoT/æ)_ÌS®+žìµl:ßzmøµ¼®gÒÏH•¯k«‡ÉSÅô–ªŒ{Ð?€þ¡èôýCÿ èúðTë_3:±cD;NÇCÍØCÍØðÈèðÈèÐCíà°F=420ø°ða¿zøAÿàƒ>uo߀òAOoÿ}僛·ÛQª ÷Þ?ýCÿô¿|õo0 Oþ ôï¹þu:èÀòÓ¿Ný/Tÿ““Ð?€e¡ÿ‰ÉIè¡úŸšš‚þ,;ýOaðúýCÿnôo€þ~üÊ€%ÂãÇÇÇ'†††tSSŒþ-]ÿY+]ÿ—éÖø?{@755¢Ñ  ¨•J@ –HÔ~˜˜ºûðÕwâê¡¡!­VK¯øû]qßò¸¿u×úç<ýï¤@ÿ ÀøÄÄøøøØØØèèèèè¨V«ÕŽŽj´ZV;¢Ñ0ñpd@ ‹¿Ý™œž[tûé9#¶Jh¹hGGµZ--±±±ññññ‰ ú ÿÌÃ~Ýl÷ÛŒüCÿp°Yà°Àn˜ÚZ­Æ.Ø@ ¾_íJNÏ-º³ìOÄ^4ŒeLâwä~¶þ Žºþóóóпcý³Ø+œµlìvÓ°Ø\>¶ë½ôÜâÖ§ê¤ØB¡Ã¿½ûgffä]=ìY›®ÿJ׿³ñÇ+\¶¬Öí6£“+E»Þ{ÿ£â¶§á\ì%ÂXŸ-~{÷ëõzÅ}¥ý°?Óõ‡þ9 °§Ø-f€}#ÀÒ078fDuó粟ìß¾C*Þž³]v0ÿø¯ÞâðÅî«?ž.Ü¿ûv©x×þü“?ßè¹z\ºùý¿jçºÿÁúcâ÷³ß?~kÄzßæÏ›šìþ›?—:˜µƒNd}ëÐHC‘tóû_ÖôYPq­æØ¾Ì‘ålùð#éþcÇÊgÛìÆ2lñ3ø3Ëý:»•ìa›®?ôï@ÿ,¿ÈjØ ЦÀn 8h bbb|¢»æÐžÍÛ²7o۳㣃ï+ر#{ó¶ìÍŸ/׸úâpÛ7ûvoÞ–½y[öé¾ÒÝ›·eo–Ø—½yÛÇ'îrÞÿð•Ù›wœ¹¡aïàÒ¡Ý›·íù²E³°dw_:üñæmÙ›·í~ÿ£ƒÒœ=›·eoÉ9Q°/û½÷¿¬ég6ë¿qrÿ–÷³ß{?{‹t¿ì£é¿—Sòk¯•D¿Ðº™²ëô3ËýÝJ›‡ýØ]ÿǯtýs°} €yÀf€iLMMéÌ׆¹T°í­SÛ>عyÏ7FÍ6Ÿ>°ùƒÛJî8ÿâè­Óù›?Ø)Þs²¶s„þpø÷ºïÞ¹ùƒ›?È;ù;÷ýÔ}ñ¯Íäݵì_ýÛ§™;7ïþæÖè‚’}¿ö°øƒâ¿ùͼÙpÛÅO³vnÞ¶só¶#ÿ0mÖõ«÷·íÜœuäl›Úô݇?}‘·yÛNñg¿©¬•Ïêð[Äoy½y¾¿³[eï~è߃Û€y3 Àn0ívSÀªMÀ\?@LM©¿ûwañÉÆ!«Ï»ªw°kë—M£Î¾¨iúwæ®”>ûAiõùhûwÙìJù`ïÉöÿðrñÖve”Üc§®ûb뻲Ï(–ì{'w;HäÃÆ“Û>عùƒ#µjú“þŸòwnþ ÷?M#V{½S$ÍNÙv ¬ÓJ%Œeï°Åo0Ï÷wv«\¸ú÷²À`7ØívSWÆZ/|±õƒ][¿lÒ8ÛæöÙ>Ø•ZxeÈöÝßæíJý`o‰Ü“ýkeîJÝ}¶uœþÿÀ…ïJýàÓ3ò…%»ëÇ}°+õÓú>Û-›gìLùàèσ¦£ÌØ™’uöæ¸í.ïœÙ›òÁÎýuCöÊgõ›züÌB¿ÙY’$»zz]¸úw ®-æqë‘vSÀ¾AÀ–‰‡]÷nÿöó%_ïûô“4É®TÉ®TÉ®´ÿ4kœ|Cs¥8U²+óT‡ý¾þ#K•|R"÷hÿ£¿uobffffà×$»R÷þصÀdß9-‘ìJûêö„í7{¾ÍÛ•úÁÑŸ‡fffffzÎý`Wê§?+íŽÐwñ‹Ôví,ïadÏV¾}Ÿ^èGÏøwõôºpÿüý{ܰi0#úØ5ìÑÐëõzý䃆/re©iꇟÈ—œøî—_®wtܬI¤iÿiÑ:ù–¶áDªDúai‡ýþ®ÉR%ŸœRx¶ÿ‡ 'Ò$ÒÏvèõzÕ?O•H?úoßB“}óLšDšvòŽÝ·•å{w¥JŽ]Öëõz}ï¥$»R?½¤²üÃßþ“*‘o#“hXâ'ÍJbüï+¸p?ôï~ À¦`ß°o°[ì6k†kålÙþñ¿PŒL±>VžÏÙ¾;ýè­1gßë(ß¾}÷–/Glÿ0pþ“Ý[¶ï+íòpÿ#M…;vo‘UÞ¢÷p¤vhÁÉVVK·ïÞrèò Í·§Z‹vJ·HŠêèÔßøìCé–ßÞ™²=Œâì¾-iþ/CVBa‰ÆÆúÌhÿÜÜ\·ªÏ¡û¡˜€ëF€¥@_ëÖ@ lcºíË»·ì8Ûé£dÏ¨ëæ‹·ïoß³cßá¼óÒ·ïÞþŕУ9[¶o°eô時ôí»·lß½ý£Ãlß±{ËöÝ[>*½>ìTö6ÊgõÓúwØé‡þ½oØ4ìÛ.Z¬Ñõ^ÿñó²²öd|üÅÁU?ßÑÏÍv6œ9vôÓc?ÞuþUýà­Ÿ+Ž|öY–tÏûÒŸžúµý¡~ô^Mñ‰ª«j/ö?võØqÆ¿2NÜôe²u÷ë«>ÿ쳌¬=-þY>:§»qt÷–¥7õV;T5לh8k X6Œ7Ê–mÝsNnôվ˗mÍþ²vÈêSc×»3þ•qVá“ã8S’ªOíLüп÷-×í´ `¹1}÷ì¿·fÊþUÕëÃ>øåËm™²mÿþ¡Y©2ÎǵÝM?ìÝ-g–+=¶?é0†²×¿Ý ÿ…6¸7,=F/ߟ½[¶5S¶u÷©k>ÞysÙç™™²­ìÈ>x´ñ!é»c8´[ÿ½ýû²€f,7&›Ë>ÏÌþ(û`yjf10=ÜÙÜx¹ê‡ u×nuöMx©~ìCëß…Ñ ÿ?©ü9PÕÛ?èZdÐ?šžå³þÑ,ðÔjÞÐ?°â€þèú äýúôèÐ?–°þÿ_:‹/á| %tEXtdate:create2020-05-28T18:18:44+00:00ðic%tEXtdate:modify2020-05-28T18:18:44+00:004ÛºtEXtSoftwareShutterc‚Ð IEND®B`‚domdf_python_tools-3.10.0/doc-source/index.rst000066400000000000000000000101431475315453000214170ustar00rootroot00000000000000==================== domdf_python_tools ==================== .. start short_desc .. documentation-summary:: :meta: .. end short_desc .. start shields .. only:: html .. list-table:: :stub-columns: 1 :widths: 10 90 * - Docs - |docs| |docs_check| * - Tests - |actions_linux| |actions_windows| |actions_macos| |coveralls| * - PyPI - |pypi-version| |supported-versions| |supported-implementations| |wheel| * - Anaconda - |conda-version| |conda-platform| * - Activity - |commits-latest| |commits-since| |maintained| |pypi-downloads| * - QA - |codefactor| |actions_flake8| |actions_mypy| * - Other - |license| |language| |requires| .. |docs| rtfd-shield:: :project: domdf-python-tools :alt: Documentation Build Status .. |docs_check| actions-shield:: :workflow: Docs Check :alt: Docs Check Status .. |actions_linux| actions-shield:: :workflow: Linux :alt: Linux Test Status .. |actions_windows| actions-shield:: :workflow: Windows :alt: Windows Test Status .. |actions_macos| actions-shield:: :workflow: macOS :alt: macOS Test Status .. |actions_flake8| actions-shield:: :workflow: Flake8 :alt: Flake8 Status .. |actions_mypy| actions-shield:: :workflow: mypy :alt: mypy status .. |requires| image:: https://dependency-dash.repo-helper.uk/github/domdfcoding/domdf_python_tools/badge.svg :target: https://dependency-dash.repo-helper.uk/github/domdfcoding/domdf_python_tools/ :alt: Requirements Status .. |coveralls| coveralls-shield:: :alt: Coverage .. |codefactor| codefactor-shield:: :alt: CodeFactor Grade .. |pypi-version| pypi-shield:: :project: domdf_python_tools :version: :alt: PyPI - Package Version .. |supported-versions| pypi-shield:: :project: domdf_python_tools :py-versions: :alt: PyPI - Supported Python Versions .. |supported-implementations| pypi-shield:: :project: domdf_python_tools :implementations: :alt: PyPI - Supported Implementations .. |wheel| pypi-shield:: :project: domdf_python_tools :wheel: :alt: PyPI - Wheel .. |conda-version| image:: https://img.shields.io/conda/v/domdfcoding/domdf_python_tools?logo=anaconda :target: https://anaconda.org/domdfcoding/domdf_python_tools :alt: Conda - Package Version .. |conda-platform| image:: https://img.shields.io/conda/pn/domdfcoding/domdf_python_tools?label=conda%7Cplatform :target: https://anaconda.org/domdfcoding/domdf_python_tools :alt: Conda - Platform .. |license| github-shield:: :license: :alt: License .. |language| github-shield:: :top-language: :alt: GitHub top language .. |commits-since| github-shield:: :commits-since: v3.10.0 :alt: GitHub commits since tagged version .. |commits-latest| github-shield:: :last-commit: :alt: GitHub last commit .. |maintained| maintained-shield:: 2025 :alt: Maintenance .. |pypi-downloads| pypi-shield:: :project: domdf_python_tools :downloads: month :alt: PyPI - Downloads .. end shields .. only:: html .. note:: Before version 3 ``domdf_python_tools`` was licensed under the LGPLv3+. Version 3 and later are licensed under the MIT License. Installation ------------- .. start installation .. installation:: domdf_python_tools :pypi: :github: :anaconda: :conda-channels: conda-forge, domdfcoding .. end installation Highlights --------------- .. api-highlights:: :module: domdf_python_tools :colours: blue,green,red,orange .stringlist.StringList .stringlist.DelimitedList .paths.PathPlus .paths.TemporaryPathPlus .iterative.groupfloats .iterative.count .words.Plural .words.word_join .utils.strtobool .utils.head Contents ---------- .. html-section:: .. toctree:: :hidden: Home .. toctree:: :maxdepth: 1 :caption: API Reference :glob: api/* api/*/index .. toctree:: :maxdepth: 2 :caption: Contributing contributing Source license .. sidebar-links:: :caption: Links :github: :pypi: domdf_python_tools .. start links .. only:: html View the :ref:`Function Index ` or browse the `Source Code <_modules/index.html>`__. :github:repo:`Browse the GitHub Repository ` .. end links domdf_python_tools-3.10.0/doc-source/latex_unicode.py000066400000000000000000000235051475315453000227610ustar00rootroot00000000000000# TemporaryDirectorySubclassDocumenter and Autosummary based on Sphinx # https://github.com/sphinx-doc/sphinx # # Copyright (c) 2007-2021 by the Sphinx team. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # stdlib import re import sys from tempfile import TemporaryDirectory from types import ModuleType from typing import Any, List, Optional, Tuple # 3rd party from docutils import nodes from docutils.nodes import Node from docutils.statemachine import StringList from sphinx import addnodes from sphinx.application import Sphinx, logger from sphinx.builders.latex import LaTeXTranslator from sphinx.errors import PycodeError from sphinx.ext.autodoc import Documenter, Options from sphinx.ext.autodoc.directive import DocumenterBridge from sphinx.ext.autosummary import extract_summary, get_import_prefixes_from_env, mangle_signature from sphinx.locale import _, __, admonitionlabels from sphinx.pycode import ModuleAnalyzer from sphinx.util.docutils import SphinxDirective, SphinxRole, unescape from sphinx_toolbox import latex from sphinx_toolbox.more_autodoc.typehints import default_preprocessors, format_annotation from sphinx_toolbox.more_autosummary import PatchedAutoSummClassDocumenter from sphinx_toolbox.more_autosummary.column_widths import AutosummaryWidths # this package from domdf_python_tools.paths import PathPlus def replace_emoji(app: Sphinx, exception: Optional[Exception] = None): if exception: return if app.builder.name.lower() != "latex": return output_file = PathPlus(app.builder.outdir) / f"{app.builder.titles[0][1]}.tex" output_content = output_file.read_text() # Documentation summary emoji output_content = output_content.replace(" ðŸâ€‚🛠ï¸", '') output_content = output_content.replace('ðŸ', '') output_content = output_content.replace('🛠', '') output_content = output_content.replace('ï¸', '') # Variation Selector-16 output_content = output_content.replace('≈', r" $\approx$ ") # coming in sphinx-toolbox 2.12 output_content = output_content.replace('μ', r"\textmu ") # fixed in sphinx-toolbox 2.12 output_content = output_content.replace(r"\textmum", r"\textmu m") # fixed in sphinx-toolbox 2.12 output_content = output_content.replace('\u205f', r"\medspace ") # medium mathematical space # in words.py output_content = output_content.replace(r'A\sphinxhyphen{}Ω', r"A\sphinxhyphen{}\textOmega") output_content = output_content.replace(r'α\sphinxhyphen{}Ï–', r"\textalpha\sphinxhyphen{}\textomega") output_file.write_clean(output_content) class InlineRole(SphinxRole): """ Sphinx role for showing inline code (monospaced) which contains backticks. """ def run(self): return [nodes.literal('', unescape(self.text))], [] class TemporaryDirectorySubclassDocumenter(PatchedAutoSummClassDocumenter): """ Modified class documenter for documenting :class:`domdf_python_tools.paths.TemporaryDirectory`. Can be removed with sphinx-toolbox 2.12.0 """ priority = PatchedAutoSummClassDocumenter.priority + 2 objtype = "td-class" directivetype = "class" @classmethod def can_document_member( cls, member: Any, membername: str, isattr: bool, parent: Any, ) -> bool: if not isinstance(member, type): return False if not issubclass(member, TemporaryDirectory): return False return super().can_document_member(member, membername, isattr, parent) def add_directive_header(self, sig: str) -> None: sourcename = self.get_sourcename() if self.doc_as_attr: self.directivetype = "attribute" Documenter.add_directive_header(self, sig) if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals: self.add_line(" :final:", sourcename) # add inheritance info, if wanted if not self.doc_as_attr and self.options.show_inheritance: sourcename = self.get_sourcename() self.add_line('', sourcename) if hasattr(self.object, "__bases__") and len(self.object.__bases__): bases = [] for b in self.object.__bases__: if b is TemporaryDirectory: bases.append(":py:obj:`~tempfile.TemporaryDirectory`") elif b.__module__ in ("__builtin__", "builtins"): bases.append(f':class:`{b.__name__}`') else: bases.append(format_annotation(b)) self.add_line(" " + _("Bases: %s") % ", ".join(bases), sourcename) class Autosummary(AutosummaryWidths): """ Modified autosummary directive which allows the summary of objects to be customised. """ def get_items(self, names: List[str]) -> List[Tuple[str, str, str, str]]: """Try to import the given names, and return a list of ``[(name, signature, summary_string, real_name), ...]``. """ prefixes = get_import_prefixes_from_env(self.env) items: List[Tuple[str, str, str, str]] = [] max_item_chars = 50 for name in names: summary = None if ',' in name: name, summary = name.split(',') name = name.strip().split()[0] summary = summary.strip() display_name = name if name.startswith('~'): name = name[1:] display_name = name.split('.')[-1] try: real_name, obj, parent, modname = self.import_by_name(name, prefixes=prefixes) except ImportError: logger.warning(__("autosummary: failed to import %s"), name, location=self.get_source_info()) continue self.bridge.result = StringList() # initialize for each documenter full_name = real_name if not isinstance(obj, ModuleType): # give explicitly separated module name, so that members # of inner classes can be documented full_name = modname + "::" + full_name[len(modname) + 1:] # NB. using full_name here is important, since Documenters # handle module prefixes slightly differently documenter = self.create_documenter(self.env.app, obj, parent, full_name) if not documenter.parse_name(): logger.warning(__("failed to parse name %s"), real_name, location=self.get_source_info()) items.append((display_name, '', '', real_name)) continue if not documenter.import_object(): logger.warning(__("failed to import object %s"), real_name, location=self.get_source_info()) items.append((display_name, '', '', real_name)) continue if documenter.options.members and not documenter.check_module(): continue # try to also get a source code analyzer for attribute docs try: documenter.analyzer = ModuleAnalyzer.for_module(documenter.get_real_modname()) # parse right now, to get PycodeErrors on parsing (results will # be cached anyway) documenter.analyzer.find_attr_docs() except PycodeError as err: logger.debug("[autodoc] module analyzer failed: %s", err) # no source file -- e.g. for builtin and C modules documenter.analyzer = None # -- Grab the signature try: sig = documenter.format_signature(show_annotation=False) except TypeError: # the documenter does not support ``show_annotation`` option sig = documenter.format_signature() if not sig: sig = '' else: max_chars = max(10, max_item_chars - len(display_name)) sig = mangle_signature(sig, max_chars=max_chars) # -- Grab the summary documenter.add_content(None) if summary is None: summary = extract_summary(self.bridge.result.data[:], self.state.document) items.append((display_name, sig, summary, real_name)) return items def run(self) -> List[Node]: self.bridge = DocumenterBridge(self.env, self.state.document.reporter, Options(), self.lineno, self.state) names = [x.strip() for x in self.content if x.strip() and re.search(r'^[~a-zA-Z_]', x.strip()[0])] items = self.get_items(names) nodes = self.get_table(items) if "caption" in self.options: logger.warning(__("A captioned autosummary requires :toctree: option. ignored."), location=nodes[-1]) return nodes class AutoUnitDirective(SphinxDirective): required_arguments = 1 def run(self) -> List[nodes.Node]: content = [f".. autoclass:: {self.arguments[0]}", " :no-autosummary:"] content_node = nodes.paragraph(rawsource='\n'.join(content)) self.state.nested_parse(StringList(content), self.content_offset, content_node) return content_node.children def setup(app: Sphinx): app.connect("build-finished", replace_emoji) app.connect("build-finished", latex.replace_unknown_unicode, priority=550) app.add_autodocumenter(TemporaryDirectorySubclassDocumenter) app.add_role("inline-code", InlineRole()) app.add_directive("autounit", AutoUnitDirective) app.add_directive("autosummary2", Autosummary, override=True) class SysStdout: def __repr__(self) -> str: return "sys.stdout" class SysStderr: def __repr__(self) -> str: return "sys.stderr" default_preprocessors.append((lambda x: x is sys.stdout, lambda d: SysStdout())) default_preprocessors.append((lambda x: x is sys.stderr, lambda d: SysStderr())) domdf_python_tools-3.10.0/doc-source/license.rst000066400000000000000000000002361475315453000217340ustar00rootroot00000000000000========= License ========= ``domdf_python_tools`` is licensed under the :choosealicense:`MIT` .. license-info:: MIT .. license:: :py: domdf_python_tools domdf_python_tools-3.10.0/doc-source/not-found.png000066400000000000000000001352371475315453000222110ustar00rootroot00000000000000‰PNG  IHDR\r¨f'¸zTXtRaw profile type exifxÚ­œi’7’…ÿãsìp«ÙÜ`Ž?ßC)J¢š”͈M»*3w‹ÃƒîüÏ_÷_ü×S4—K³Úkõü—{îqðóŸÿúû3øüþüö_øúóOßwßÿùšøš>?hãë]ƒï—?Þðí3Âüó÷}ý$Ú×…¾}òד>9ò—ýã"ù~ü|?ä¯ õóùKíÖ~\ꌟ¯ëë…o)_¿S{—þ~ý÷ã7rc—váU)Æ“BòïÏüYAúüü.ü“éuü|¤”ŠãKHßî• ùÓíý±Á?nÐO7ßýu÷ÿióãøú~úË^ÖoQ«?ÿA(?ßü·Å?|pú¾¢ø—Œ˜þv;_¿ïÝvïùÜÝÈ•­_õ=ÞexádËÓ{[åWãwáïíýêü2?ü"8Û/?ùµB‘¨\rØa„ÎûºÂb‰9žØøã",úž¥{\DŒ(êW¸±¥žv2"·âq„.§ø}-á}nŸ·‚ñÉ;ðÒ¸Xà-ÿøËý§þ›_îÞ¥- Þ¾ïëŠÊ\–¡ÈéO^E@ÂýŠ[yüí×÷¢õ?6Áò¶Ù¸Ááç糄?r+½8'^Wøú©ŠàÚþº[Ägð•„5øc }44XyL9N"J‰›EÆœR®EJ†Ïæ=-¼×ÆkÔ·Á&QRMØô4VÎ…üiÙÈ¡QRÉ¥”ZZ1Wz5Õ\K­µUÜh©åVZm­YëmX²lŪ53ë6zì ,½öÖ­÷>Ftƒ\kðúÁwfœiæYfmÚìs,ÒgåUV]mÙêkì¸Ó&vÝmÛî{œàHqò)§žvìô3.¹vÓÍ·ÜzÛµÛïøµ¯¨þí׿ˆZøŠZ|‘ÒëÚ÷¨ñ]×Ú·KÁIQ̈Ẍ7E€„ŽŠ™·sTä3ߣð,²È¢Ø¸1B˜Oˆå†ï±û#r¿7Wì·â9§ÐýDκ¿Çí'QÛâ¹õ"ö©Bí©OT??6\´!Rÿøu¿Ó\¡Œ[ 5³Špòèmö»÷¡úÒvþ®•[]Ààí`c-¥µ[çõs°¾ë¹%YeS@°E"t_Ón1Ë—YêXí:¿@Ô[äŽÊ -êaß39rAÉ †³;ìNi|Șí®8)ØvÙ n›Û;îªú+ÅẳZXeî³On mݕܜÃNë¬Ü×fë¶ð®<µüÞ +šZxeço “ï¥Îm7°¢ÙÍ‹ ªmÞQZšlŠ‚¹ãl©¬bËvˆ‡PÕ•ÜMãìr¸ÍpÉvÞGŸ¸;Øü7¿ºßxaá¶<·Ë§Ä0o¨—ÔU§Ak¥Sl7wœb„iuÜ?ÙæÚqðu°ìä8Æ ©Iƒ]ç;ÌÆ^ëÔÈ$·N¿7-Ê4ôu¹Ç½ØÕÅ~²ÓlMÛ'øëÓ"gÖ;j™‰xŽc¡²¨z \RÅqÚô7s¡Ê™m¤k‘MccCÚ·—ÙûⵉeÏrWš÷$¾MÒõy­¥å·5úÈlv<~ôiqJ?^ÒùšÉéChÚ G"mÜs3;²Á'»;+}œÜ%ž¡|✠?Ë—:UQÑqsÍYÚ<þH5²´Êöqq½2ØÈ6ÖräÔÍN©Ôó̸°£bj”‘eŠ–zë$ͲRE´õ*B ”K,(,·kY„”=ÊvÏñJ»» Òž:©“Ø•¨ îžl õ¬£[I£ ?Wñ uHm€Ù¬.´ ’¥ÙÙ„×ê| ‚X\~Úm0ˆÄŸ”ýá•\Ó7 'û¾B蕸 pä¼Ò-zM½þ–Ò6ÙS !8CõšŽçëÉA[Ê%@¾ËÜ©€Ê¼O”8š@ïŠä {Dn’ÖÀÄ,”2a˜‡÷(ÙŸyJ6O¡M;››%vÐÞ™È!p+B‘ó9à³þ·Ø`R“ð(ú¨qöèí¦?ì¾|ЭØÅ‰LäN}\ȧ®VÆ!‡Iß=›()…R'÷Ë,`b«r¢c±¾`ê1Z&H,ØN)KPѸñ¸å²ÔN ÕX›B6&n ,Þƒrtž@WbI@äYZ½ò!´Á-S`ðËIƒ•à qCkK•]õò}¥MANYFêî‘1H„C£—‡&Dá×_Ýß~²ÓÄbßÀ Q³Ã`÷´#NZ=‚¨¬²àå<ŸNY½¯‘kÄ:²-AQIh2¶½Dpá(9»‘L¾äï\™ "ÎÜ ¸x”"1P¡»ŽÌChÆvFª1ÇÆø\ !ßN]d¾·w7(†¾x1D°SO#oaCÒ‘ªç†JáÚ¼” d"r€]ßÿHˆüHw¨WPmÃk²"t"w% ƒÜ¸ûÕ<аaši¬=Oou\n}³*êÊ7•ü1ÄfuYí¸ØçºqùØ+xSX·1à&È”,37’Öó=xP¨cWÛT$É|CøŠPͱfy*Mâ¦DÀé”Q"x$æ­ª»´fŸa”4Íòô²’ànïNƒÌ¢>¢i’—žè`߈wÓýA/&uµ)KÛD°>@髺½Ë`8žË ,-©4Ø"‡XšÕ—ßøI„ ¿¥kßZ òwQåQ"ÄapÒä¦R=¢i&( D uúM¼á· —¡%+][ `”CvZnÃ(´±ß&_p9ø1t¼ÈCª¦‘ D4Ü5î[²ÞÍKo0„JžÛÍJØAý:ÛK€üArHkl‘]]ÄK°Ÿ,ÄÎ`µ0JãË5‘Ýîú À‚‹#uÞt!50æIe­3¨oâç2jj®@yA‰eM½²Ì<Þõ¨]”„•=“8C#'È,½Ô” Í²3Þ9Kjâ‘S9~7€~£p²³…Bó8MÊfô•zˆ441*'3‹¸ˆ$©:D*\ˆÒ¤[Øm9¸ùå8eAP}*D§µòD¡g—PXÀ@¤Ãw•qµEE6–À½¥¢D駃!%‰>ŽŠË´aãPÂFzHd-ÿ-¤CèZd^Ä|ÏR¨éç!Áš1}Ø^$@u@cÜ5ò€G Ý;±UlÕRà·À륊£4ãàüSä‰Ì`w(}äÏ­"+«7¢&z¥dGRr&uoþ&㣜µLqò‚IØAÆL ËCàúÉwLM ô¯5$J«S´m¥~kØ2R "Z¹\‚Ky•~¿-é6¹ª Qíx'µ-ÔaO…€ Þ¥¬û¬»>dÎÆãl=Šw‰L $,å¡9ÊÚwº}Ù®Ž$ºDøÂË<ú˜ ÖàÙcP Öô„éhضøc"Ño”Ž †ì+8fýìPbè6P¡ÉGQ™û TÐ.D舄|î(óp;˜â¨W*œFÝæ-´ë`¿vCÈ^0¶$”Âs(ºÃr®ñ=H(uX ñ mrèŸ{Uî¤+ºÄìÛ`½Øz+,{?X|! FFŠvÌéE¥ÌëØò<Øé,*Ȧÿb:L¢ÒÉ>¨ƒ¨–|: |0:—:‡“Ùöá]€ªa»sS!Ùä%‚ÓZÙ`6И cÃÿí•‹W!KÀ=°yj%çí ƒHp²]¢UŸ ŒË=áð@­ì×høÒAíסŸ³Ã…-Ó~UŒEˆGÝúïU9å5q… ý‘CMô.7‰¡9È]dðˆna%€2uYÑàßÁñc‚b@B€2­Æí+üˆäÒ,Ã~© U¦‰)C7!—rg‹Ns Å:"ŠP@¾Xw4RsÈÞ<÷ Ï‹¢IØ€ÃÕO‚åì•›)^8’>APGJÝ€x\ì9²ˆÚ$¬ÙñFÃßÀ¾ç(…ÒĶ`—Xzñ8”åÊP(ëdˆVJRÔcB3ô¨»K=)9Á/è¨ J ªÝ!mEò…Àž q`ÅU†$1-W”?¯ƒ:Â`i‹Ž æ"蜉¤nM©BÀ¬HjJ±‚Þy ѽpÇA¸‹¬ÿØmpÿ‚ïþQâ £ î:·OnLn²¿–ž@Ñhl•´ðÞ»žÉ¦B‘]þ €tðëðL@f#ŸÔú¾»P#Á؆@Ã7ʹòR¸å¤ðDœ ¥³¹çcXójq“¤ç‡¼Ç Zë¼Ñ‹?/åê€u`ýØÝÕ{šáS°C¶™Þgç(l |¢Œ¡s5Q^M蓪C ž*;e t>¢0á?ªzF=ö–€ÍuŸÅ; IüÁ5±—^6‹ |Öí©zëvÌïhrŽdÅ+”áω]'Q3rIÌ3 û‹qÅó@|J¢‡X+—ßúâûPe&uÎ.7XIý‹d¨—ÒKÁ¬@2]¡Ã`A[vȳB^aM‰?ð¶IDÌw ÞwÔ`áÕÕYžX£ŽáUVRsd4šŸa+¡Š,Éê®Y2Èj‹6'ÁR³ËðÒ±íùÑñs(Æ^Q˜Ä})°V’ï‹ P¿KKBSá͘ iŽg>ù‰`Öá>¾^ LºÄÎý€ÎF®°Ê6¢ŒLHÞöÏñö,­!»ë)äÜ’`R‘VT˜JĤ#cG¬À—1H,¢.Â’Ž d4 ˆ0M’r˜1‡Lˆ`¸ÐøÁ‡&0jª ¥„ ¼Õ(‚‘b~wêc&œOú !S¯¡PÐûSŽ'ØSȑʞUe!J/¿ÖšC€¨p’¶C#¸tì§[¬ 7„ˆê` Xe›ñ¹.Pí‘Ë¡Åp#+ 1N+¹j(U&¼DÄ}?Ø¥’²Pø1É#Þ@ub_Âþ)&¹¿|/“ÔâA&IÙ”‡@ ²”w³…¼‘¯”P@HéVÕ½EŒ¾ Æžò—²áÎ &@,ðSÄèj2ûBà”ô ê Èâxˆ ëÔÇ;¨³‰ˆ€Â.w§Ž—¿ê±Q÷p dš¶Ìšá>2kx¼03ŠÔFmÜ ß»§¸!«­>f‚5ŸFò ‰€‹Tñ V“ÆhsßëR‚x«\†ºcÇ¥].¤VP¿ Õ‹pæ®}o°?¦§hˆ¶ P¹ À` ÷"Ü\hÒlU>…|C+ÌÎa>^[D(‘Nl-=ø9y†ú1~?6BõR·Éїμê ðnCe\òEd@e‡}¨ào–)ëú6ìòš$÷æ N]ˆÜ#—ÚŠR0c°J5v4D›x¡ƒ;”=&± å$nä+»¶¡-J2)ƒo“Nn„IOjsQÈ©ªV*øOŠî¾ ÃÙ¾FÒeäà颣Á?l›À „ íâ˜Û@“ÁA˜%2o½à†7üñÕ ¤_ÛØOOj9œC‚bàìç˜P@U}kgx?t5ëË×—¹Pú ¤dÆ€©#ºÌÅW„lTÒ %)¿l³e@4¥Š›€Æ1{:mAR W¶¬9äàñŠBüˆj…#ødÑäæÞ‹;ŻޏƒØ'{Ÿ¼º‰zô:¿‹ùVÌ$|ºTP ë]HqE¤êŸ 8<6¡ ¢ßÅoH8.µ“Õ`Ȫvô•3ða–QÑéHqâÔØõ0u!å‘DÄ“;¾!¢ û$UxþÂø+LT­Å(a|Æ($ö[诮˜ÑsS¶ºN„'6 ut»G Cê¿O‡I¡|*q.¹G ÁÎ#Ÿ  ZÖe@ ØXiØE‚„¼Ãí¨„ó2åoÀ8p.Èê`$¸É±P’ÿé$è}%©×8W'ǵ³E’úrqâ•áYL7±ìªË {½²»ÉAÔÞª‘rÛ~>RujRä™=|X‰”od§éÔ ^ÁÓ'o³ðù]í–¬­Vûi«ëLI!‰¶/ùLõ†í|¸%Ù¡Nª+x¼.¤f(= n’C'ƒƒzÂJý²ÊêªÁ0‚„|˜()¼l“ä.0:V'V¤ðχâ-@´K‰™ݬÃu@ÃEg[u£¤0VÀ AYƈR5¯¿¢jÕŒçb)yg&uÌb„ßãìF› ¤Hˆ-ÄñµÆx%Ð$¨7§ŽKœ Û(Hò‚¡ŒMÙÒñ¾{RŒk‚×5³ vVý 9ãAFR»`Yžtä†$™•ßÞ›Àò±jÕ¡KŽäðÓ”H1'/7ÃÞrCêF6œš?BA…0ÎÄr’ÐXæ©þ1¢¨©K)D~¦àÕê÷@$x`Ò~b$u4 „,âârÈ-Td"zÉØ€˜a/#£Qi;o § ã^w‚ò\¸S´. ºÓ|ô8뇉VPgèí7¨ûÄBѸl}^ º©CÇØÜØV·¦CÅáÔ®«l!`KL±Iýt<ÿ†Íp8¥ÁnäÐ!Ú0E•¾ÈRᨰb¼K×¶ÎuQƒ¢á‹®îÆÖÅ{åG®Öz㸰Vg²éC™ãž)V»¹$o–D„БõQ¦˜=\-'JEB^û£¾mÄ÷%*ái™:¶Ã™_Ðóàžó¤˜EµvŒ3BÑé÷+ MÞ}±Âp& ‰?.±Ù¨S—t0EGǃcÁÖDà·ô R-¸àJ hÜñ‰6($d“ð'l85Dˆ-¨>SE^–Q4{{H±cSŽ„k–Ï6hˆ¬ÜóÓ´Éé,摽TÇX3!øÛò,,f%h8±ý©¶¯éH]è¬Ã©>œÞ ¦ò°ª‘¦Âò@ WÑJ†šI¼”p7ÅÍ%}Ô)¢ ü²ÜÊ„ªqà'H §ºZ€h ƒS ~¡ý¯¾ºb-‚Ô#Œ¹œSP“V*¹b’Åûöí»:#Ê(«á¹p;Ü~F„Ô† Ôw–.OžÚ„ëi£j‚ºF•v˜ –B°À^uŠ…%fäñE|5ŸúÆœŸÎ¦6Ùu±Õ"1˜^P¦ì™é£f-šŸŸJ’ûIçÆ¨)§c›‹ÕOé"t„Á#õŒ+j¨0ì…æ6êë+@î¶êU›{Éóßð!HÂôº@“$2[Þ}%nT~O'³ ̡̹÷ù¦ut,; »P«à5—V—$Ælל£´ï¶wôO /¢¹çÒ”JuX˜Ü­WC|bŠB¶!9`C–´}Î3±2_µ1XÄ‹âô ÛŒEJˆpBÀ•[ÔÙBëN”Xu÷u2ÓjDkÌìâAIXu-‘êOI’±ø:V žÐ)|¹…’mÛMµˆÐ¬‚eÕÐm¥’HšuØä#Ü…ôFvM)Ž#Ó¨î0÷›,ªS ßÚu^§€h*侇:ì‘>”ˆP ×Ò9Ùþœfk`h¦ I§Ð Ù—¥F¨±ÖÁ6V„Mªh¡ÞÕס,¨¥ù¦+¤,â êø“WÇ“[‚PˆT-0]‡wÈ{uЗÔ5ÌË=;nè·‡bòGÙ¯I/÷Ç—¸p¦iÖR¨4ÔÁŠšXMö~èЪ“ò|QV“mü‚—͈Ò4ÙÞÕúµr†¼}ZY‡^èOÃÐEvø ”Ð}¥#-`ÜÜ,¹jÙ½^˜z+0é®ï„©³ä!Lˆ›£>Ñë# ²É4uê‚Ò*͉إF„Šáx²SfÊkêB5°#([à-bÜW#Vðœ×§†ÙÄ{6\-$š·ÎÐIásLÛ·ÔKI¾„9‰N©Q) /âÜ•XèÎËxÓ'¿SZY*¥àš!Nt´‰£æ‚`nÒ‰éN0Ìcw>%â2pX„t DJ„“„Ú(V…$°ORp†v¤HÇ%Z÷$b$χbׇp`©MÑAå]®oj…F¹2’KT‹Î„Ѽ£è’ŽÛS‘Ù£‹o8RãY0+Èg€é`~Î&©IÉ1,P±º`eŸ©ÇÝQÝið§X‘Ûä›™t:‚©ÇS ÍjLˆ”ÏgÚåužIyÐØ‚z&Ú*t+%B–É¢TnFã†j#XŸ-kZP¯ø‘­6ÂV6:IÙðK‹³¾D>z5uÕŸ„]€<Îw)gBÛ>ú‰·AzÔ7#}H:Ê’Ô`PÅ H^º£ÞÁI:®×-¡z í®ˆcjäj-G±¶Ü=’¼è¤çjÏë(D ¤`*-cU–‘Y:¥$˜ Ï˜„ Ôæ¶!0bÐÄuÃDÉsD¿P˜ {F–+é`æ~oàåó•­“&×Pp ]€õšBrÂáa_`“­C 1HÚòkZ¸8ûiv7!™4}†ˆÊ¾RE°-]6 WEõgi½ cÃ7R‰%ØØDÇNs£Ý˯:€$™AÕ+ÈHT yÜ" HS¤¯¡À=P»&¶àpÌ> ojŒab¤ÉÈ ãN(J@Ú¡ÁêìåM¼¨Ä¡cj{#~ m£†¬ÑEGß@/†ULP2â'®]vê‚âqjl¨aLrÉPö¯…¨©/'v;”XÐ< jqò ¿q#(Œï%ÏÁ‰Xò‘ãs¤",V1—pT^0ÇzÉN* šäȼ„ :“1ƒE„ç•@¤LuÝA.[ãüŒü×L!rC3=@]ˆ ääÀ‰€nˆ,Ê•Ú@¾ëè>÷#±—ܪEÓƒ° [„ñkè1ÓXª½1¤ â°‹}¦™4|öS¬ä˜½,NÁ7W;ÐJî‡UÂи¬%•&?ÕpdÍl¥ù Z}ǬB"¹ ¦ k|ƒÿ¯9×_¯A£±X¸|¸çe`=%šò–n¶èäÑ8(´‚bÔ܃¦1£¦^q·…–œžžhÚ:u2’†UÆQ&ˆ=:¢³ø6tŒßBÑ8(ó °¶ –R¹Éøé¨¤hX^#U˜ß5¡ü ±¼£óUÍâ0`•:šS§f€›¿}GÍ( F&&k– hQ äæD;9p;?utŒU×a/ÅBNf^М zaM•,èH¦ŒLÁ6LúÑA]œV‰“\†}’d2NÁÐÉé)n,¤´Þd,`Ï\UÏx’ØEÊ%¢VQEÀæÑ 2F>W™³ŒÖ¢ÀÍ8*ƒ£±œs@G×±i2 ŸçÿÚRÿñUƒ(ØËÖÝÈäa¡d•vAQPaäx8ž i"8A™›Ô®DlRÍ!Ïø5WCÓkhbjxì"§¨ég¨‹žfòŠ$8:¤gŒdDv×ôÒ›¾’áÌÇ(6I\ ‰°†,«YÒ̽5eÈÔ 40ß4òúT«Æ£Å–'Ù½Cøƒ+ˆ’sWÔćH¥HåÌD6²åòùáJ뱌Œ½DÌj´Û“±–³S„êï«Q7ŸI@ «PÀ˜aê¢Cywæ¤þÕ›Êçº:#Âåý5%ìê ðÔ¯ºžHøBSXu¼5_PAÊ›ß|5¶ô …\•W’á§Oد¹]˧ϟp‘ò*[ÊÿuñjïšÜ_XG2øu>,¬g0ÐÙ(rOHI{еŠqÞ ¿š”$8·Ëµ@4XQÑà¯FöàúHex{ à¨£s‰eäÃö²ÐœK´}@üÅŒˆ¯ëI(a@Q󱲋겷÷êá4Ø=l¦F ¶ëÍ3û¶¯Îæ†mמõš3•öí)Ëëu¬›NËg_°ÈT³=z5ÁÑx–õ†14„¤VHrbY±ÙK33°ÅÕ—@Ø8À;:…7p…nƒ’GÄéÁXj j+鎻 ‚ÂsÕüÝÕ(ÖI‘ô[ÂØ7 C·7Ò¥§0Xwi*‹Ÿê1ŒH¡Û”ÑÔämêî_¼éÍsŒE: )ðÅG¡¬‰„ö@U“ ¨Û˜IOÑ㿝mL`FN í­ëîÐ*SÃj|Ó7½Zílb/¹£Ÿ™¨òAu|Þ±[aï©ÄêÁ'¹ˆ¹«²™**¼·a7‘Ïêþê„ü™|ˆ†'³›ºÊlƒßh¹¥§YPzNO}lDƒLÖî™~=^IÇÍH< í—Kܱ.›ÛU¤§¾žk–ßÒ1žÇuÊ^‡b)ŸP!élÍ òv6k^{ ›Ô Ï–&Íðô¯Lt†ãÒ¿yÞåûˆb‘ã“µŽˆw"³±íâøŠ{óöz×QªŒTBP¢ÊtHÎ'òTó™MšÕ‰°¦c4ù¬™ù»]ªu«™«±æ¸Õˆß;ê‘° <á6á¨i\öR‡ÓÔè¼ <ðŸi%ï[êCÿúæ MFêŒI“ZW“§ù <Æ]Q‘¸^…2×e™&:qEçB ç÷ƒ¨?{X—<”,W„Ágû£K’ýFlkšaÅ®h±$²ÿ`/¦;7‘ … T2 Çúã‘^¯~J;’í0¤tÌÑsLg¾qBiªs­‰¶æØP–Œœ'Ö„·¸Y'è,ëÇû$í‡íPÉÀÛWçð€øÙºUÌîÞ4ŒvˬªinB­^åÔ çÁë‹9`ÁÄ$!Ð{ƒž4ΆZS^ª bjÞ³Mäå'R9ïƒRrlÃTkAç5¬ãĨT\špNÏE3Ûùó0m%?L¯Y p±åKãY3˜ï‰šŽÎ0ƒÂZ¯?“5­>)åW¤üVÿÉ–UJïÈ¦ŽºHÚa„ý¹êgu à{é ‚Í‹“§Äƒ\¦n\ÒNé8ëJþx VãÖ XI]'yAÀ0#óP2MÈ­–BöùØÔ‰á'ÜH¤6u£ z ¼Ô 0¤gDγ"ïY ,hFÂk иyoÍP¤àÀ^õ®°¦,BäU Ü—bŠ6á×Fùþëjv_å̦ÅUĦy`>zJ¨QchÂbíXtœó¯Á`*ŠTû•Vá‹Á$?ðýêQ6o*ÆT´2ˆQ«hVÛâî+3Š8ƒ×s„Ò;2cèë«О8¸ÒªFÓ¥IMÌòÁB»×èÕé³aê~Í.,0v}¨}Á†l5ï3d+n¿±yü‘ø¬ä¥>€Û0"ïŘŽÜÐXº^îôúw"è)@Óàð‘Ý3Õ7Àƒúcm(›PÄzðL-WŽðs]r[_Õ´Ae=—©œd¨õÔ!—ˆ…бÂÕƒïØçLîÒÃW9˜ÞÙAƹD=+ªç e”Zð¸k¶¾À¡À"øÖAÞ _ì©ï­ÙÙüyÒáôé¨ü®é¡HAÅ…HEIè!¼U—z·Uc oÚBG¯É¡Nyê¯`&^ +îàojÔ³ ˜SU^NÏ Ô×DõàÄŒZP=ŸY×®ç 6Å·½¦œü{:ã²?8ÝÏó¢ª:ÛCE;éÁ–Œì]³j: ÊËâó=ÀI¦†…«iØgH·Ntö€‰à{›š[¼J3S„-¯C£éBÿí±+ •áÄß>s]|a:‚_4¶ÊÎx`[ó3zêhˆr)~›òƒ‡¬¼ïÁæppìÎp¿`Ù|K—&‡´Y Ðßzþp‡ŽU!‰kÓ±q›‡±*Ò@#e×ñ©r4&GI¶gÖáÅÑ@ƒŸ‡ßЂH”pâM›¦BW‡HgE9#,ôÐÚèK0•JÜq F:F $fõ]Ê  šZ,ö¼æÈ"¸®ëç‚h:ÜžE¦æ—¦,‚ÖEÃç­Œ•ý¶öFôôétÖXñk _^šš~ |ê<)ß5gVéA –¦SQÑšÆå5 ØE˦ ˜¤sZ‚71Ô{¾}äÏdÝô꣒Žð ƒžß¦ƒPÔØ‡'—‡ÛÇ•sOÞ†$±B¹1³ØYI 0EWåw¥^I"NE$¶ôòz<å Ù¸Œ;Èï"`pþ&35Xdz˜áˆ^(XÞ[qè>- yN½"Äü“¦Ø-Wîøt¨ ¯V²9xêØ¿éyî+, YæHÔÿÖóÝ0=’dšöaXÐÐÁ2œåu¼?¾s“ŠVÙ¨Îùþåà`ž÷×Ö9.“c}¸}ÃXÛi¢d”É6Vu¡ôôœÚŸ§•¨î±ê·‘Å“±ÉÀeQ'ÙºßÙ¥PQÍ‚&ª@=!`&ÿí¡^¡ùš¹_9Q'òºÒ§ýÚ5ì6kä£È…¨ãútå.a!fAH„´Ò¹2¼Im¯ª'‚5/LP?zIÏ?lRáÖ lZ]0Ô@¹š|h ^g-z¢/W#tåõhIf!èÜÏlZFÏøm@é!V6Uÿ¼ ¯Ð#±®Ä÷B]€‡1¯”$•øœxÔ¤ ?2]öøqÑeŽeòf&=OÌ Å–;X)™ñ$qLÕtÊ粫Œ·k•šÒº'{a$¯¯,3fI,b "Ȩ¡Œ lÄiÕI±¦ý„Ðõ‹ä’ÉU†BŽT¡Arý`ð»[«01î%E@èÅq>†ð.Ь;Î÷±ã4O€à3p¥·ýÕ0ýIz½­ÅŽ€¾mà⺭É{Àå0ðdH¦äJAš\¡¼ŸÑ7å€þ[ gÍë­µÓ C]¥n€ƒC`¤HÙë>ïîîìíß3­þ~ÖÍrÏä}¯bKGDÛÛÛ—»$º pHYs  šœtIMEä#¦õ? IDATxÚì½w¼Õy6ú¬Ù½ï}zÑ9G½‰& H˜jÀÛ¤ù&_ÊçëüÒntâ/÷÷Ý/¿$æ`'¾NÏ—›æc\0ÛÛQD“PC]çèô¶{/kÝ?¦­™Y³ÏFBسðöÑž=³Ë̼ïû¼ÏÛg9ËYÎr–³œå,g9ËYÎr–³œå,g9ËYÎr–³œå,g9ËYÎr–³œå,g9ËYÎr–³œå,g9ËYÀEœSpñ×£>êÐÉëÐ ¡<"‚üÜÊõh(¢òÈH˜0MH~üãwN¬³p9­¡¡¡WØ`€UÊ£Wð÷j 8à$€cSJ|ö³Ÿ­8WÂYޏøÂîp%€\«ü{ÙûüµjŽ8àE/‹ÅS?ü°sÁœå(€÷@èW¸ÀíŠàG?_{ÀÉ„Àçó!‰ # Âï÷ÃçóÁívƒI’F”RÔj5T*”J%”Ëe”Ëe‹E‹E4÷âgR?ð¯„oìÞ½»ä\yGüÌ®GyÄM¹Ào(ßu¡ïáv»ÑÝÝööv´µµ!##ÃãñhB~Á’J)êõ:jµ …2™ ’É$R©²Ù,’É$ŠÅâ»ýùiøÇÁÁÁCÎÝà(€Ÿ%kŸðk>`àBŽÅbX±bzzzÐÞÞŽD"—ËuI¿?¥•J•JétÓÓÓ˜››ÃÌÌ æççÁ»Ð·|À)¥ßþìg?ÛpDÄQ?­‚ß à÷ü&äxü‚ËåraÕªUX¾|9z{{Ç/Ëߦº™Lãã㘞žÆØØÊåò…¼ÍiC„ÿؽ{·ZtÀOàwø Ô÷/Ö¯[·«W¯Æ’%Kàõz?P¿·Ñh X,"ŸÏcbb8þü…¸ £þœòÿíÞ½»æˆŒ£>¨‚ð‡~@h¡ý°eË,]º>Ÿï§â4 ?˜˜˜Àèè(FFF@)]ÌágüO·Ûýøïÿþï3GtðX>ú¨‹1öëþ@G³}}>®¼òJlذ‰Dâ'öÍkµjµêõºFâ©J©­àI’I’àr¹´‡Ûí†Ûí†Çãù‰EÞMPÉñ±1œ>}Ùlv1‡¾à÷_rÄÇQ—»Õ¿ÀߨÚl¿X,†íÛ·cíÚµï â«D\¹\FµZE¥RA­vqѲÛí†ÏçƒÏçÓBŠïF)4 är9¤R)LMMáÔ©S˜žž^è0à?ìœvÄÈQ—#Üðël¥"‘Hàúë¯ÇêÕ«/ˆ½gŒ¡\.£T*¡T*¡R©¼¶ý½½„Àëõ" "Àï÷ƒrA¿)›Í"Ncff'NœÀäääB‡¥ü¡ßïÿçßþíßvÜG\Â'€°ÄnŸp8Œ›nº kÖ¬Y´à3Æ42­X,.Öo~ß–$IƒZâÑb•ªR©fggqôèQÌÎÎ.tØó~mpppØ)G¼_‚ðÈì¾-l¾ñƱeËx<žE½o¹\F.—C>Ÿ¿ì…¾™2‡ÃˆD"ðûý‹:†RŠT*…L&ƒ©©)¼ýöÛÈçóÍÉøÝb±ø¯Nñ‘£.µð_àqkìöY¿~=nºé&„ÃáEÝüù|étú¢ûò—zy<-Cq1œA­VÃÜÜòù<Î;‡#GŽ,”Šüu¿188˜qÄËQu=üðßðW„±ºx<ŽÛn»  'ù5 d2d2™Kbíë †Zjü!··ëâ_I’‹ÅFáv/ܪ ŸÏkŠàðáÃk¶û9|Ã1G\”õÈ#!ÿàíö¹æškpíµ×.Èì×ëu¤Óid³Ù÷ŒÌ+WHeʘM—ÎV0“© “«`4UÅd¶†L©Ž:–["ˆÜèŠzПð"ñ¡#æC<êC{ÜD,¿÷½I9&„ "/¨(¥˜››C.—Ãää$Þzë-T*¶I‚¿588øÏŽ˜9 ཆü}¾¹ ‡eE"Üu×]èëë[´Ÿû“>¥ 3©"F§ò83žÃ¡ó9œš+ãbVµù±¹?‚½ôu…Ñ‘B’ÈO¤b±‰Ä‚®A¡PÀÌÌ J¥<ˆñññf»ÿ=€ßt²ðžÿÕ¾  [ôúÚµk±k×®É.µšîÝBýB¹†³£:“ÂßI!S~ëfb~n^—À– ,_C(àyWï#IZZZ‹ÅDMªÁÁƒ›qÏø¹ÁÁA§‰£~"áÿ(€¯Bn¤i±`·Þz+®¸âЦ!¯r¹ŒÙÙYT«Õ þüR¥ŽÃ)¼rl?>™Eƒ^ž¡o‰ܼ:Šk7´cÍ@ÿ…·#ôz½hoooªHcH&“šû´ÿ~är9»Ý¸cppð¼#vŽx7Âÿß Ç÷-Îo0Ä}÷݇ÞÞÞ¦7ëüü<2™ #§€ó“Y¼zdOœG¥þÁ úÜî¹¢Û7v` ;zÁ<¢µµµ©[ º•Jo½õV3—`ÀmƒƒƒGÑsÀ…ÿïAŽñ[¾oWWî½÷^D£Ñ¦Vffæ‚Bzõñ³óøÎ+ã80^|÷'˜€ù¼žbÈï)x½ž¢ßç.yÜîŠÛíªI’Ôp¹¥:Ñ~C½N]”Rw½ÞðÔëu_©RT«µ`¡\ Uªµ cïþš]ÑÄ=×öbʸ]‹Ov»Ýèèè@ °Ý§Z­brrµZ 'NœÀ±cÇìvp»!pÀb…ÿü™èµU«VáŽ;î°­ÖcŒ!•J!•J-úójuŠ7ŽM㉗&0’®^°°G‚þt,âO†Ct8H‡B¬$Iï l ”J…B)’Ï—ùB)žÉ•ZrÅJüB•Â@Ü‹¯ïÁUë;áu/^$ $ [«Ñh`rr•Jcccxã7ì8–Œâ¼âˆ £Þ•ðoݺ7ß|³m*/OR-f5 NÌà«/Œ]àû½îb["<Õ’ˆLÅã‘9·ÛuIÙîz½áN§sí©T®s6•ï.WëÁÅÛ÷â¡K°umÇ¢sü~?:;;mC†”RLOO£X,b~~/¿ü²òÊ)HàeG  þÿ ÀçE¯mß¾7Üpƒ­%*—˘ššZtóÌÓ£i|yÏ0ŽN/NYø¼îRW[t´£=>‰„.+f;—+ÆgfS}ÓsÙ¾rµXÌ1ë;ø¥K±ªqŽ\.ºººl BÆfffÏç‘Íf±oß>»nD·¾éˆ¢£xáÿUÿ,ú~7Þx#¶mÛf+ü¹\³³³‹Šë§s<ù£sxöèÂ.!`í‰ðDwg˹––è !ä²®~cŒ‘d*Û19•\:›Ê÷.ÆM¸}C|x)Qÿ"ÎA{{;"ûnjÓÓÓÈçóÈçóØ·oŸ]¢Y;;âè( Ý àIØþ;v`Û¶m¶Çª]rô¥Ãk‡§ñÏG®Ò%¸$©ÞÛ?ÛÛÓv&ð½+6Ðí"¤-ìõµ…=¾€Ïí x]ŸGrI„B@c´Ö ´T¥õRµQË—jµélµœ¯4ê?é9-—+Áññ¹c3éemš öJøÍ]ýض© Ò"*ãñ8Z[[TÅb{÷îµSç !×íÞ½{ÜÉŸa044´ À!ˆóßpà ؾ}»Ðò3Æ0;;Û,­cÎ|_ùþüðdf¡•j}݉ÓKz;Ny<îEûõ~äZÕŠô$ü‘XØŒ=¡€ÏíW¾5£òa` `„10¨›ä×êõF½P®—2ùjq.[)œ›-æÒÅú»âjµºgl|vÕèdre}Epóª(~á#+,Ü-‰ ½½ÝöÚLOO£P( P(`ïÞ½v¼ÌÛ„wïÞsÄògP Øyz®a]sÍ5رc‡í 655µ¨&—ÇÎ&ñŧN#Yj4ƒ¶´¯+qf ¿ó¸Çã^Øßâ®í´tµñ°/ª~MÎ á=Æ´ „QÆ@P ``LQ…R­<“)gFf ™ss¥‚vðâwäüÔš±©ôJʘ}ÿ ¿ûÑذ¢uÁ÷ ƒèêê²½F“““(•JÈçóxá…ìjžpïàà ÓŽü-×åð%}ôÑ €=Vš_Û´in¹åa"Êb…¿A¾ûÒþê»Ã(Õíe¥-šÜ¼~à•ÎΖQ—KjzvD¼¾›ÖµöÞ¼©cå–e‰Ž¸?ð¹}Dê™61¾â2«²A@ m'``DÍP6`ú>ò&eFÀ|®š;5ž=9UÈ^$˜Mõœ<;¹µRkز7,à×îYƒpÐó®‘@½^Çøø¸¢}饗ìÈÚ‡wÄógÜyçŸÜ®ÛhÛÚpÿý÷Û†šÔXsÓ;UÂÐW6ÍäëilZ¿ôåh“p! ÛWÄÛ︲kíº¾x_Èï ªÖ(]QDµêÐþŸpV_ÙLŒ¯1}mþ/Qu‘† Ô÷!x]¾ÞÖ`|yg(.1VŸËU5Ð# äzºZ†k•ª/W¬cçSU9=-ËãM jµªÕ*B¡E H’„@ €\.‡P(„@ `×wðŽ]»v}kÏž=³Žˆþ+¥²ïkæïá÷ûñÀØV¤ÍÎÎ.Ôž ÃYü¯ÇŽa,[³%ùÖ¯ê}c` ë¸ËeŸ©wå@´åž«zÖ¯ìöø¼./$¢‚{Y]ë«.€¢Ték¦dÈÏS_ FW€é"ψŒ@Ì Ž€xÝ’»»%[ÚŒVkÊbHCI’h[[l2ä÷fSé|eÌro$K ¼rd"H4!ÕVè¡uü‚ÚÚ¼P( £Z­Š"7^Þ¹sç¿=÷ÜsNñO£xä‘G"„h7û‹wß}·m-*•B:Ý<÷æäH üø äªb¹Ž†|©+6-ß…çíÞcI¸ïšÞµ–Æ|>—WxY‰n•™®4…  &ãÜ}u“jÑÍ9gÛA`ä=„Ï ðhïÁ s^˽¤=”èˆzýó¹j±R_¸ö9 ä:ÚccÙL¡­R³&•ê {Îcã’ÚâÍkë¼^¯ÖJ½££óóó"4×Aiß³gÏÓŽ˜þ*€Ûn»íïì4oß¾}»mI¯Ú–ªÙzçlý$ª ±ÜÝÙ´aÙ«^GÈð»%B>²¹£ÇÆÎuá 'È[tèV_•uÓ3è h LJ€ “êvô· E·ðojåsÔíA¿Û·¼3œp1VŸÉVèq»k]‰‘J¹È \‚exáè<ÖwÐÑbŸy\.—áõz…™€Ö]¹³³£££¨×-æU»ví:°gÏžލþ)¥®ȼ½¿¿;wîæ™W*LMM5}ßãòðÛÕé¯èo?¼rÅ’ÃvY|ý-þÀ×.ÙØ×ê”$¢±îŸ¯I«æôÞ1W‰UXy4 +¦(u?üzFxY¶A2Ÿ¨ÿb”|#V ¤ö˜?ÒðfÒåBµÁš¢Bko‹OºÀÉL±ÓJÂ{%±±7ˆö„=(‹ƒ–kJÑø—Ë…x<Žóç…­nݵk׿ïÙ³§èˆëOPÆq? ÓDÞ@ €{ï½W˜ZÚh4011Ñ´{Ï™± þøñÂ>{!týªÞ×z{ÛÏÙãš–Î[¶tmzÝ~Hª¹g<ѧ¹úä—tJ˜O9BÂ4:P–zÂ#}]qp˜ž©šBˆ¬²nÔ ¼`Ý•xݾþö`¬Pª•²¥…¹X,<ò{rsÉ|3½°ïXW „Ñó7U‘HÄÊUÇž‹E„B!0ÆD/ Ïž=O8âúS víÚõ7v˜·ïܹK—.3==ݬù$Æfòøã¯¼ƒ² Æ/I¤±ií’WÛÛb2û¯î^¹~ ±T"d¡Ö„3O4?! DRì¿&äœ"Ðãøg×GU :ö·º¼À3;5¡ ž$õ´ãn‚Æt¶²`T(ÈFB¾Ô\2Û˘qÒeÀËï$qõŠ8¢aqóUJ)ªÕªP¹û|>m”Zkk+¦§§E…CwíÚõÖž={N:"ûVCCC7ø’ùî\³f ®»î:a²ÚnÊn¥²üÙcÇ„Ù}.‰4¶¬ë©¥%6#:6âw¹?qÝ’Ý­Á6ÝjË4»(,'ëÂÃ~=ÞàÂz<ùgðCŒ(€ƒ°ƒw8ÐÏ,þ¾¨ŸÅN@HkÔŽÝîñd¹°PÞ@0è/Ä#¹™¹Ì³¨Q†·O§píÚV|â2áZ­I’„a]Õ€ÖÖV ‹ò®Ûµk×ÿÞ³gø *€¡¡!7€§`Jõ …B¸ãŽ;„;ªÕjÓA•åjùõc83_ÂþÍëú^ni‰ cɯïÁëú6ÇBÞˆlÕ Ñ¡¿ü×(üzè\fïç#{ëN¸÷eĨÿÌ¿Ñ 0H·eW`gñ À˽Ii„ƒž@GÄëK–òt”b¿ßWŒ…ýÉ™ù¬E ä«çFÓØ¾¾ n›&#år¡PÈÒÃA„\,µ!§33@öìÙó¼#¶ïÝ’.ág}À&óÆíÛ·£¥¥Œ1Ëcffƶ¬—1àk{NãðdQ@`m\³äU;Ëß÷ùïß¾dsÈï¨p·¿¼5U=w.×˜Ò ƒ"€ªLLœœ*Öê·7ì@Ì0žËÎ`zc¡¹7£ ñ‹ú¦–¨?rÓúö%^÷£‚‰èìÆÕKöB,„Ì‘©ßsƶz³kFµÎN+W®´kñöûCCC«±ý€)…ø{ؼ½¯¯ëÖ­CûTª©ß¿ïà¾s()|mͲ®mmqaŠÙ’¸/p϶ÞM~ŸËgb5›FaÔ¥‘(¥²Ú_Óë²ôY¨ûæBKšH41}Eó[1S¾i_Ò\oÄBžÐŽu­K¼®…•@[[|rͲ®¢×ž>œÄ‹ì3ª+•ŠmG[[›†¶nNt÷A=rÖåþ@‹™Þ¾};|>ŸÁê«Ð¿Y²Ïètý}qwéž–==b¶¿=âõÝyuÏŸÇåµÊ¼Q6tÞSS„ž©1{fPºA7±Ù#Mk1X3ûÝ´ˆã‚ <”¢Aoð†um½®E4èéi^ÚÓ*lâñ7?Åè”}Uo*•¶e÷ûýQØÚÚj7Îí¾¡¡¡Ñý€(eŠÏo™·¯_¿ÝÝÝB˜8??o ýËÕþá©Ó ‚×Ûâ¡ÉåË{„-§#>—ûž«{Öû½.ßãÒ¥žèÒ6Š…<ÁëV·t/fšø²åÝÇÚã! Ò¢ŒáïŸ:…r¥në Ø%tµ´´h.Ñúõëíz>þÅ׿þuGz? àÿà7kú-[¶Àív[üþB¡Ð´‘çw^Ɖ9k2[Ðïɯ[Óÿ:5ÉÇ%rÏÕ=kÂ÷B=ò˜™gà6©ÿ`”1€(õúD®éç_WÿtÓ…[{n<Ž€iÎ1ì]#0»÷²þˆÅ,Ó-Q_dKlÁ&„­[ÛÿzÐï±fœœ¯à©GlU{˜—ÛíÖj@Ö®]+:ü†áááÛñ½ÌÀÐÐP?€_1oß´i“-ñ—L&mßïìX_{mF@úºaMß~»Î=·_Ñ1ÐõÅ É•ìrÏy#¥i­|—Û[z^ø û1Æ#óõÊnŠ‹ ÷Òð>Q•S(Fô猦¸Ì ÔøçgÏ _[Ñ×~Ô®;ïÕËcmK»"Ý inÏ£YQ¥p_7q²,kÍy,‡2“ðË]{U$L;ƈ.”²ð£¥'¶žè@íX…Øg:²Ð_³šÅ»0"§Z.º+O$J¯èo?"zí?{•šø­jµš0ÇÃåri(ÀãñØÅÛ‡‡‡w:"|™*€G}´À¯š·¯[·±XÌbù)¥MÇv½x`'Ð? ÌõõuœÓñú®\Ù¶Ì„Å/ܺ¹Ô €jòu¡§Ì¬äFúCîí¡wîЕƒö:'äL`±Á˜Qx•× S1ˆþ`„*nU2™‚ “‹…™ž)ÀÔ`øÜ…@*µÆµåê[Òq: Xr.NÏWðâ[M AQŠw,ÓPÀÀÀ€Ýð—AG„/SÀû4LÍ=C¡V¯^ I’, —ËÙöñOeËø—¬7‘DHcÍÊÞ·DÅ=„²sKçJ—D\¶~±Á zß®kÝ|äN}ŒêÁ ¦é¦l(ï €™¬±òœZj®QŒî€šÈGtdÀ ¨0Õñ€®l˜¾¿î^€>„ól8ÏÅjã‚2î!lͪ%oI„X.⿼0ŽdV\„Øh4„(Àívk·Û5kÖˆß544´ÅãËL<úè£^ÿݼ}õêÕÂUØ­§_:/ʹtIëñ`( <ðúÕ‰ÎxÈVnsÝ=gFHÌ8²AaÆ5áb–c((“ÿS4Ñ--×ó“d¦½%3øëÄ`ñ™Yiƒ0S¹W¨º]S(jýÆhhBýÊs -(J((Ȩ?ågßEçá`П_¶¤Í¬6žÞg?8NƒRj1 ü½200`7è·1¾Ìcìc 1>ǃ•+WÂãñ™;ë?>“Ç·Zûv}ž|_§°8¤-ìñ®ï‹/a: ×p¸‰Æfœ)×÷ÒúìéH@í½GÈ_ý£ =5 ?ŒŠ@w2`d󙢣8«¯*¢³LxrPS ÷FFº¥'ÜCW,Lý73GÀ2¥w×z¼¯¯ó¤(*ðÔÛó›ÉÛ¢‘ðz½ZCõþ¬‡”D3g]F.ÀoZˆº+„¾ÿBÖÿ»¯ŒŠ‰¿¥‡í†n^¿¶µß%I‡ö¬ Є۰i&”©@_JÆ æžiŠA·ØTÙWÑ„éÖßäÓÝà}y9¨yœuÕHBU!` # ”;N #}ÓP—yT®6jµ}WM%‰Ð•K»‰^{æåQÛã2™ŒðÞà+Db>åˆòe¢†††V¸É×—.5dý©«\.Û2ÿcÓy|ÿXZDüÍÚ•÷®ì F:[B £Ý'ÌŒTöŸi>¾@ P.¨o†õ¦{Uùê~¯`˜ÀÓ™|B gLåJ4ë®)*>8©„„ª®ƒ!l¨Nà””ê0Íÿ'·P°b¥^ùIÚÚⓉ¨•ÜóNÚ6C°V« ›¼ƒA-b …„Éc~õ‘Gq¤ù2AŸ‚)µ³³Ó29†‡ÿvëGoŠÙãeGíŽÙº<ÑËûö°¥ºÀó¥‘þ%@ŸqKSÔ¢(SßF•BƬ¤ ÕªEgg`Q˜!¼†ˆNVmý»#™HõcUÂ’1¢ÝñÉ—ê%@E/ïn-·¹FÏ¿iÈår  ¾ví—-º‰r•#Îï³øÜç>Gü¼Èúûý~ËÅm4vSc1Ÿ)ã©CVß¿5œŽÇ#Âfžk»CÑxØV'è¨I2ƤfLÞa% K îÏ›ÿSçô0P. Àôù=òvU9hÿÖ<]A(à_±ÐJ(OŽßSÍE`<õ  2Ó…>7@\9Hô÷àù9Œ¨¸GªË¡+—d®RP>‡ê´Ç…­X,2ßYz¸=s8‰¹´8Ó³X,¢^¯[î“`P&µ··Û…Áç÷YH’´ Àr~›ÛíFoo¯ükÖ×ÿÕÃSÂÛ®I»mƒÈ ±nžÌRXÁÊà źS«àFqñþºìÛóa0]Ð4Ÿ€2Æ[Sár Ó‚û̼/3¸ºÂ!@Cvô؉ðÞ_m=yòLxv.é)—«š›¯rêo$ô¡œ "+:™©aS¾ %0 ¸VŒ¯¶ïñÏç-Ù>ŸO‹H’d×9ꓟÿüç%G¤/l¹ßã÷{ÐÊ ÷ ûëþ¿hUª <ùº5å7ò¥‰¨°ÁG«?˜ûÂJz¾,–Jö‹ÚTWFÊD~IVD"ÄP¨¯¨{þDŽóÉ-û!L;€JÀÇ IDAT£ç:oˆés±6uæ'”à“†x(®’|ªö DýQ0<õݽ­?>pÆPÛpÇõrŸù_8GõöT{ObLeÖÿÁ F(+”ëåjÖ¡w àLÂBEŒÜŠÇ#sѰ?™Í— • ß|c»¶÷Áç±fƒ D£Q‹ƒZ¾@oo/Nœ°è–nJéuö9bý> €‡~î7oïíí…×ëµXÿz½ŽZMi:>œD¶l öõ´ž²ûü}±Å܈<¦øÉšyeœD¨­q9ËM(øD?ÅÚ3äÙsB 4BPóË)•ßGöÇy«Î'šP€Û‡fç’³ðÀêýE(®„æR=ôHx΂¨Ä¡ý)X*WÍx ãƒ^(7Ð×ÝzÚÂø—8q.eKV«UËýÂψÇãÂR>æˆôû¤‚Áàzüooo7Àÿ…¬?ì=d5ò^·«Òn3»/à•\íq J|ŸY”x%@ÕpŸÛ)SütÕïgÚ Ï ¢‘ Ð_׳„ô¸€ò~”1ªø:Ü×"Œr$•9ÔÇ8•ýyyß×^[8¾kóƵiè|…Á]1¦sІò©ËÓéRÎ(ìüƒÐ u ÚÛãã·ËRüÿ£·íÝQ5¨Çã1ôŒ´s·#Òïp—Èú››@ªŠÀ®ÛO&_ÁÞÓÖš€®öØy»¸ÿÆ%Ñ„$I“;fÈòµ(ú2Æc”)‰hTd9§&«Ì±ûÊëÚƒª³ñ„ªD ¥L!Úˆj9‚PÍ6¦ëi Š0ñõzßþþ+–Ä—mëûK]íE%jõ{éߟÊ éÁ”°zÖ'R•¼IðM<á¶/¬$I¢ÝíQK]ð‹§³Hç*¶ @ àï¥ÎÎNÑ¡«†††V;býþ(€Û,BÛÕ%„ÿF£ üO o«®Î„mqyok0¡Æúµø{%`IÓåÈ=fÈHeÚ¿)W·$ /Q¶ÉÑYc0ª*=tHxaÑcîò%†Ï4¡’±‰Ÿ×Š~d…BØá#'b#³Y wó‘[¶ÍI>Â%úcÈRñˆ1 x2WÉ4£bØo€ÿätu¶ ‹¶¿sN\ú]­VÑh4,÷ Ïþ' »2á]ŽX_b044ä`iÓÔÖÖ&lú!j¥®ýïX#|á€7…¥‚Ñ€Û {CŒËöS‰6³Ð)/.OÍÖc¦Ì<93FÞ—j‰@JØh¡?ª‡å¤Æ ½ˆ`¿@5.0Ýu L+4Ð3 u¾Au+Àس{^¶4êˆ}tóæ óz¦ŸÊc£{bjÂùÿò÷O–RbèoUÄ𼹇ƒÙpÀk¹~ûÛ÷~(—Ë"xBˆ]R£Þ° ¦®?ñx@@ØëßÎúJuì;cÍkoŒÙ}ðŠŽ`”ók5kP~ªaaÕi œ ñ„ bÉå¨!5øï ”¥,Œ½6ÀX.¢¹”Q=7@&óU¿š2F,!Dy?rŒŒ^8xÆ2Œïçnß6ð{ë*j`|ɰñ»¨®ôÄ ùsë Z™+eÍBO¬(€iHå”@GkÔÂß¼t6‡|I|/¨."o8ÔIBÚ{vtˆ½ñ‘Gq—XÜhÞÐÙÙ)´þŒ1[02™÷úkMÚ}pg"ácµ|ʘæ0¾&_¯ÐW_â;cº½Õo|Ý7§Yp9¨ e*5KÈ`…)a–\E`ugL‡Ü `ÏýøËOÁõ×~ho ÿeœ!¼BÁe6ŒÍ¤ÊIj„ÿÊo%"`vTm­1K c #Y[7@tïð( ¥¥Eth !dƒ#Ú—Vl7ohmmÕ’7Ì?ì*ÿN ÚGù<î’]· ò†)çØó!>•3…Õ”„¢eÛYú{(œbÐÃ~<È’_¥ (S(Eè9Á×IE¦ûÿKJtÈÏ ‚6ŸLyžxî­˜ùܾ}]¦³³½Óþº~âá:¡†‚!¦8Ò3Óù9!ì'fâOs!D¡B[%Ž3>¯ÛBïŸÊ=xAÔƒvš¯ÎYOMð©ÊÞËn¿PÞ[±”„ÚøÐºEg¼€1Ã>LΠ†Ðœüè û^koPjÉÀ¹íÖë&åïI˜1jÀ(t’2ƒbP ¡„–-Ô²³¹jQHþ1#ñG ¤&¡T`h\Ë×NÛwÝ+æžííív.©³.…î`¸ ~¿_óÿEÑÊk81kÍ ˆÇB³vŸÝ÷5ÂÞ×›“kÈ¡ ;5xœ ÊÍNM)¿Œ'â¸è€œáÞ[%)W<¤' žSÐstÔO)+KÒW¿ó²åN¿juoqõêåi5ÄHôvÂ" 5𢣰sÓ…i•°!ÿ8Ã,‘ \ûa,¶\ËSsed U[`¾Ì  µUؼøJG´/ °UÄþ«ÃóD€¢55'® ŒEÃóv yýJÎ,ã;z0có½ŒB´Ÿb!ux®àxNøuËÌÔP!Õ}~9Œ+ÒÿÍ'QƒÂ!:a0ªtÖ•IC¢&¾ºÿ`{¦X±\«Þqã„\RÌs„=ÜG¹R`-3‘pù åJ½tf¦˜ÖÑa6>¾I¸­ÖßJb2“Í]ȵ!BˆX¶!¶áóŸÿ¼ÇïK£,„K<·è liwx\®j0äÏÛ†—.?—8ϸt?%LÇeÔÉ øÔ ‡uª@1¹LµÐ„ƒ÷ükD ù+ìžú:Q³ÄÏ <aeZlŸ)IG„R•{Ð" µZO<½×’õ²´#V½bˆÞ÷§†F$š¿OyT¢)EàÏMåÇ™ücê±´ ñg±þJ®Y).v0à/xÜ.KöÏ伸(LĘQ€MJ°—Rê̼D `½E0Ãa!üWI@ѱޑ°/Ýìƒ^·× õPšoó~>åêù¨M4_™PƈÁ‚ÅgÈÎÇV4Šæ&hdѳ¹¤`ÕúRª"ù?½Ì}ýäcßy絓¡p¨ÊO!ÑZñ¿Žº’huÂùr-|<7§DS)”y¨L1 2À@$¥ìY=?T1T)Ö?ÓØèTÝOý’ }–kzz¾‚zƒÁí"B`N&3?‡Ã˜™™Yð¾tÖ{Œ†††BÚ Î—× Ç#lÿe§2ù*êÔŠ ‚AŸ­ÿò¹\Z)¯žðÊ8GVÛ«ñ|œ8u&ôèÿû/Ky'Ú¨7ëLu²PCrê®ÁßgêCíDãBˆÐðTÏñ'úC+Pd'EC”' ¿õÌ—X‘Änºiû¨ÖHådU¨F¨šn̸p#áHÉ£çÓÃÌ¦ÌÆú[Àb|+/Ä)Tl9#3’47µáñ¾ø OÿU«o†ûvð¿X‡|>¯mË ¯[rQµ×‡É²«HjGm¢Õ¼þæáØÞ<üÑ›§‚kûÚ:?~ïͳºrsÒë‘Ô4Y½UáK5뮽=”mªÐÆÿèù‡DG†#¦Q`Œk~êôp|ß¡áˆùw?¸ëCÓ-‰xQ‹cèü8Lãø'ÚÆ€Ùtifd®”1ª Édé•£e Q$n;³"#:ðû}B¼_,ÖÐõÛ*s$€_6É@}Žx_|`©Æà»ÿ˜/œ-(ˆÂ=`~Ÿ×vL°ÛE\Z›/þ~`J{kuè…|kjqÁr¹"}çÇ4Á:>:çù_ó_=+{žkèþ[g>tÕæy¯×#ÏÙ#zOn‰#dÔO4ˆÍ´Š;¦M#\rnòˆIè‘/”\§NŸMœ‹NNÏNž‰B`;o¾v˜#1UFönÿ֊ȪZLù6µ­½~&5¬ûëêHAYØ•ÌAN EÀ™\&x®º¶Ó}>OY± ÄxT…+ºgÌ €oÒìÞtÖ{¯:­VÛg«ì@©,Èør¹ªD"¶¹å’DT#Hd´¡ŽÊ’»ü(½ïe³¦Õû€Avì¹]TDvr,s6_®W•S$]B)õ9§Ô¯« 7/䢱è¡çÿMu»\•Z½á7Þâüs#!G¢>úh`÷îÝ%GÌ/ž°¤aÙÀ±¦«Zµ2½¢x±‰¼ ŒBíÕ§ö¸7Ž…—×=ìiA9-¿N§<ú·O.Y?°·ã¡ûo¸êÊÍs’Dѧp0ÂõÙ3˜]Æ’)¥ÄFfL•¦Z½Ž×ßx»ëkßúQß±ósÅž§Ûw^V)0Ò Ó­½nö©:3 \Z$Ã|¶2d,7 ½©eØO,dŸ €ø^"د½fQ„C<^«jV•j½)À[}3àë÷ç˜#æOXâ/*(ÒÚvš<'Ðþ^·ToöÁzð™h° t$ ‘â†é9Àÿùk?·ï¥7ÚŸøÁþ–rµnkqÌzÿç_zÅŠº~þÁÛÆ6n\›”a üç›xBÒ£Ï`ܘQ ¥ ¯½q°ó+O>·ôøèüiÊ·,K­\±4ÉÌþ¾âPCÓ3N$¹­ÕiåÕ“s§8_ß,øÄäßÛYwì·°ýBÀGh²kÇø‹ûöw~õé}í9K ®ƒg&ýù÷•7l^–ÿÄ9¿zåÒŒþYZ% ‡óã†vvâäÙÄ¿>öUožœ½›ýÑ;n:­°þ)G þ00C»(c”g“ïäËjÁç݆ìã…ÝìÿsaNÖÔ nI²H{¹Öx×.€Ûí¶3:QGÄ/®Ú)vmäë !ÑÓt6]­AL/ôÑ}ªÄÞïšÈ©3b ¢Ñpõ®;wŽ~ø¦k'ö¾¸¿û«ßy±C”o¯®}‡Î…÷ú‡õwݰ1õ±îéíéÊóñ~®ÿ¿¬xáOg²Þ¯=ñÝÕO< ãÝžä«V÷fׯ_=­É7Î—Ê …J<ŠH@ŒLç‡ÏÎ3<ôçcþ¼€@b°D(ø®âÌÆÿ7 >5 =ŒýÖ{€ýD7¢ËåÕ™¿¸ Ào%ç$ƒŸ¶E Ú…,0œ®R£ ¹V›³Ç4 `š'Ë!m6ÓÆb³P(Dï¸ý–‘7nÿñÞW»¿ü­ºJÕº­"xfߑij/?ô‘kfïºãæs-‰xYý©Z@™ñ÷ò+oôüí<³r.Wjzž=.»÷Û''f’Á—Xܪûïºé$Qš{¢×1Ëd£>·Œ?{jùã\¶2óê©äyö›‰?Qi°ö¦Á¢BfßÎÒÃ<™›Ohñõ›Ü'‹¹‡l€ßñ‹«\ËÝô¹h‰va LŸ(Tê5P0F˜š¹Ç(Q‡’L“ˆ!b`Ðå&º„°P(X»ûŽ[GvÜpÍÄsÏ¿¸ä«ß}¥³R³ò”2ò•g÷w<ùÜ›m¿òÀŽñÛn½q8ÔÔ;5™ÎøþõËß\ÿýýÇ[šý†€×MáîëFvÞrý—ËE?õ»ÜjÞg]{~ëÆÁ#{¢r Z œõÕV±ÜȽptö3 :U(IáO(kîû‹bü¢P ­Ðó¯1y–šÜ'‹Q¢Ösxïß8 ÀJÆÃ–œY<|“D]jFfËS[0Åß6Œ÷S"b|Š­Àgê˜ FÔp¡>8' W>vÿgnºéÚ±g¿ÿãþ'~ðF‡u*×êÒß?þþ§~ðZ×ÿññ]g·o¿jìÈÑãíùO®ŸÍÚ[}—$±‡î¸fôž;o9GËØ7¿ýýuÅJÍ¢P¸{ÇI"I n¼¹a !QÉn~9P®5Ê?:2}¨¢OùQ££’Œˆî/ð×Âæs[$ P¯ŠåÚº¥…€9»t†Æé x‘@c1k!4àóX¯S£A›±‰¬Þ ¬\§5¿GrA0¥¤O®Œ—ƒŒ(|f$«´¢¦×ÖÚ/þÒC÷¿yÇöÑoçùeß{õ˜­5Oæ=öß\3ð.™Éøš°o]1ÿKŸ¼ûð’%ÝYõ„å EÏãO¿l{»¼+^¼æê+†‰\¨rêh2¦Íúf üЃzƒÖ^::s0S¬W`ëEÀÅó-p_ý«JõBlþ…=ÿœRj¹ï¼iA J^%4ŸÀ…¸a¿ uXz쬿úr¥^ö¹½A-û–³RÚÎŒ¨Ó2™f' ´lƒUcDóåU%±¤·;÷™OÿâÛÙu.þµoü`Å«GGl™åfÂßUÿû/ß}dû¶­£Ü‰bÁ_Ø¿,]¬X®ÅwÝxÂív5ÔöÆ„˜Ÿ(ÿ Ћš V}åøÜÛSÙjAfµŠ#þHÓPYö[ A¡×•¬áõªàÚ†ýn÷,**`ÓiªæˆøÅU¥E^ˆæÀkýÕFÃÛ `Ùb­ yÙg„¦L™cÉtòLëLè«fò¨¯åòß‹­\±,ùG»cþàÁ#=¹gåñÑÅ'ðܳcÓä/~ò£oÇb‘²j¦Õù}¥RÉýøÓ/®ñ"ÇO´ìãÈÍs™¢'ö×ñp©­%QhmåÛZ[r-­-ù¶¶–¼Çí¦A½Ak¯Ÿ=x~¾TPa?Qĸ¢o¿±ÇŸ“иmF«HIÔêÖkð¹Í\€p²/²(Ú]sã†f Ðþõzç¤ù2‘ð@ºP--i ÅugT`51GŸì­çïªc±”¹¿Z"UQ {•I[r,Ÿ®ܺiró¦õS/¾ôZß—Ÿ|~ÅL¦èY€á§Ëú{Ò>Ÿ¯ªd(\•âÞ_[6›-yE‘‘ïì=´`E›$ÖÛ+¬îoÏë®átÝíŠ$Z¦cñ¶éhKk†;k’ÒYZ€äkªÄ(ÀÈ{d€ZZØy°hÒi:ïˆøÅUY«àÖmÞ†©E,äh}J¥æ÷ûù‚ νØT¦RÜ gÃ1­.^X-N…¢ÄµGÔɽDcеâFô¢"™`P s¹\øðŽkÏ]}Õ–ÑïíyaåcO¿·OÃÍ Ï¥¤S~ºêhiºrÐ<(¥þ€DÔÖ_J‘¢Âá`åÁûï8rÓ ×œýÆS{Ö}gï¡»uz2üŸ_üê¶ë7½²âçîŽË—÷Ï¿¸ïõ•ÉüE‹USJý¹\¾?—Ë÷OOsÓx ˜ÏçOF£‘ép$:ŽÆf¢‰–ÙhKÛl0)›Rpu LáP)U„Åû¡€·© Ð,`§csŽˆ_\`ió¬ŽýV/±l@؉K2H±TÇb¶]å–Ùj!Ðæöê7±¤ë=ö"MéV¦Tð2m¬—$*‚Ï0J˜–òK4·hooÍÿæûÄþ[?¼½å+_ÿîæ×%ìNØK‡‡Û^>ò»¸uëÙמzJVH¥\n-—[ggf ½ó<O>‹M‡£Ñ™H,>‰·ÌE­s¡H´AL–Ÿç X±T ‹øŽXØÛ”7âï#³°™4])•JGÄ/®˜¶S*L3Wq‰|:‹`Y«gæŒmÁJÅJÄÆÿ×Þ`*]Ê÷´âøf 2 ˆ6ëOƒòŒIŒ#ú¸b!ƹF+Çø Bn¨¦âaÂ#l劥süÙO?¿ÿõƒ}_þ¯lžÉì,ÚϽµ|¡“;ÐËo]¿t²£-QÈæKžt:›žœOF+ÕúEi{]«ÕÂsssá¹¹91év»K±Xl.ÎEcñùp<1·$ñxAíÄôœ.f§ Š¥ŠeºQÂ[ ¿Èh˜ïõž3­ñ‡~˜9"~clÂ ó …‚¦D¤ŸMÊ&6ô„, _äof6+ l:S)Vë´æu»\Z[MØõ¶¯qÏ]·ºvÛ•'ŸøÖ÷¯|zï¡¥‹1G¿rߎ»÷#oH¡ZY³’Àd²4òÃÃ3§ª Êâmí$ÞÖžp zE¡”ºR³ÓÙÔ\O>“éÎç²=¥b¡;—ÏwÕkõø{}5 o*•êJ¥R]À°~o¸¤z,KGc±T4‘H‡c‰t$–Èd KBUGÐîþæ˜IÀ|^HöŸuÄû"+€Ý»wW†††&öYür¹ ¿ßJ©¥<Ø®\¸»ÝzTk ¹\ øý¾’€Î—²Ý­×ÊZ±ÜŒ1BTÀOµLT¢MÈ#r—N%%˜1ðƒÖ`Di;¦ ¸\ë«#¹U:1½ù&a­­‰üõÛ·žzóÈÙΉd~ÁÜj¥âr¹HPzs†rµ‘;t6uìèx.­P+*Éféé'IZ;»§Z;»gâ•C±eçgz2©dO!—éÎçr]¹\®«\ª´q~ß²AÝ©Tª-•JµaXW Öøã=¦k²U‹YÙlV„N;â}ñ98Í+’© ÀܬÁ®yCW«ø&Èdòm~¿oMª“ùj9Wj”Â~—Êt Éå+­´ÉV‰?&é1yÈÏÕìZÂ1ÎT” ×Tuˆ<ìCo &ç)¤³¾¯ý×3W?½ïð²ÅžÌǾ÷Ú¦m×\q|ÅŠÆP-œ}áØì°Z.«¿’é,lîaÚ †ÂÅ`(|¶«ù0wŒT¯×<™ùÙŽlj¾;ŸÍtå2™®|.ÛYÈÛ)£ïiAËc ztµ…lÑ¢˜‘%c étÚýcìG¼/8 `‡’µ´´ ^¯[úµÙ5oˆG|è‹û0š62ºéL±­³³uTDÊ2.KéÄ|1½º'Ò¡È1(ˆ®ˆ–ôcèZ£ÎS•Ñgö)9CD}®øþòqª‘# *GH(7tí}ñÕÕÿþÄó[çs%ßò*ä±ÿzöúOúWþêåã³§çòÕª‚h»/Ç)ˆIð¹ª>»„¹7'é¸ÝžzkgÏTkgŠ$£Ô•I͵eSÉÎ|6ݙˤ;òÙl{.—kk4¨÷ÝÜ,’IôƼHD|Bÿ_„Íð¿P(”×Füˆ#Þ—F³ƒdõz]Hä¸ÝnaìöºUQ|íucd1™)tÖ¸’ÖèC¹ÑG抹¥¡KrA®fLÎ~ÕÊ0F¨2Cörw~IÒC~2!¨( Bdd F”> ŒILóÑ!`#çÇã_þêS×¾~|¬½ÙI[ÙÝ’ùÔ'oñ«Oî¹öØùYCŸÂWï-ýÛ3ž•¶–AT9U2”ÿA§^ÔÀT´C´2bÓg›êÈe2-…\¶-δÖjµ&n± €ëVÅíÿ‹ÿUá§”æ=ÏžRˆW IDATG¼/°hÚT*e€k¢&Ž"°¦?˜@©R  ÅH0È2Ïk0F'çK©¾ŽP«æÃ+q{ªBwN€™R$*— K’š†Àá—‰=IATžø£ÏT¸*¿NË•ŠûÙg¸ù±g^Yß ÔÖyõº\_¼çº·î¾óÖ7¼>oÍï÷•wÿÅ¿>ha°Nžx`寭o*U5„Ó2Å)ã(‰K`2(]Љ!q¦$nÌy¼ÎÕz*¯…c±\8+0`Xé , ¹l¤Mµä2éÖl*ÙžÏfZR©t¢V«ù%¯_î2έµb "E3ëovÔ{~PJߤŽx_pÀâ“'“ ”B’$ÔëuK×V¯×+ Ý ôˆ íæç³Ý¡P@éÅgÁÚzf¦îj D=ÊÐ¥g8#rMNäÑH>¦±÷„0F©œÔ£ä#©Õw”*ª@ÝΈ¬x×àÐÛG—üÛãß½fd&nv¢¶oèÿÔC÷þ¨¯¯;ÅZ©5 ‘Öžç‰x*•¾†ß7—Ë]55z®»«où¸^kìêÃUõé €†Ž3ó˜± ¸²Õ¢ SGˆ¦8B‘h>‰;z&À)†Ó§F®:76û!ó9YjsÍy®HU¢ÈÒÌÌŒYøÁ{ÍíK¤熆†FÀbj4ÈçóˆF£¨ÕjÀãñ@’$ œ‹…½¸º?Œ×ÏÃ:3ó¹ÞþþÎãÊí`þ©ß¯•:¥/=Û›ðºþp,ä º]r};  ¾éµtaÎ+ù¦Ã<±Ç$9j@K&S'žüîÕ{ö¿Ó´h§=,ýòƒ;_Úqã5ï4(+ÏfÊ3ç¦òãÇÆs)F]¶zÝc©ý¯\cÆËgÞ9zGWß²RäU‰B(óM%bH_†J‹X ½AJõÉÈ/аˆ´-ß]D‹†ðí±t¾Úg†ÿWõ… ‹ýYlW«U$“I³ðƒ1ö’#Ú—À~˜f±¥R)D£QT«Uáì6¯×+Ìàºn]‹Ed å–b± }9Ó­ÂÌÑcM– £ÉbN’$Ö÷û;âþP,è ú½.Ò!@†î\¦1ÄòÁ´ÌBe˜Ÿÿ)eŒQF±oß+kÿó?¾"[ªØ&Ýܵc˱»îÜõ—Û?þÖéä̱‰|’Re0clÙÚGÞ9tàT©T6L´›½µÑhü“KƒH” […ô#MI>5—…žð_‡ *é©ô °“~ðÙÌÒÝOð”‹åh¶P± ‘¹~½xDƒy¶¤P‘&/ü”R`Ÿ#Ú—V¼à㆛wn¨×ëZ>ïËù|>¡X»¬ÀyËöé™äÀ²¥ÝG¬ÖˆXšá«m²(el,Y*Œ%K9ÐxÀãn‹úüñ ' z~¯Û#IZ8©=Q†sËÖ_û­_ëµWµ™ð{=žÌ¦­[ÿî–>øHkgÏ´>O“€ÈY¸Ì8GƒûvLGݽ½ëÅ( <|à:³ç|zÓO%b_œhåÍzÄX€i¸_¡èb.›Hn1oó¹%l\!ö øá²¼µ7ûÿ"áORJÿÿR+e}ß¼abbB»€årY+V>ŸO˜íò»qÿ•m–í©l©3›-Ä/öIi4ÒÑ7^¹éùg¾5h®—7ß›½½½?ÚyïƒÿcÍ–½JŠ !Ì0ÍGm¡Gx™f«6_ù‘¤‚ÅšÛ¥õ-ar6„ Í9Q“‘gMô¦‰Wa‹S°ÍW6ShMçJ–©ß÷mmCPÐL’$x½^Ã=bg@Ξ=kÿ”Ò§þüÏÿÜiú>)€ïšo¯¹¹9è+•J I’Å5Ðâ-]Âí£cMò'^™älËóßþ¯ß9~ôÈݔڧ»†BÁñk?|Ë_l»õÎ/ûC‘¢.aD`Áˆ:ÁÀ9 I$øýÁj<{Á||.—¿6—N…¡$*Èe°ŽTbÄøùÄømèÕÚhñ‹‹/±›»êB®­ßïÞ#fÿnn™LF„¾áˆôû¤Ç`J bŒAmEU­V…eÀ¡PHòén á–ÕVÔ=Ì÷çòÅØânP² Ç ÛcyBÑ|½^ó7á;*«Ö­{r×}ŸüÓž¥+΂è­D-V’10P9*g‘>fÚ$2èíØ#øMž“‡ÜjÂùê$w`9;r´ná9KΡƒØ3‹!v”®}tëŸ+´Ì¦ò«Ì{xu =‚ê?• 1‹ÿ?22"„ÿøþ"°hàóçõž¸\.âôñÛ¶ [ì‘á‘éÍ&BM(Ì¢[–à1o²åþÀŸ¯ºq뇾-úà¶ööÃ7ßõÑ?Ý|Í {\7Õ3*߃ˆsfbîH²rãÖ.—ËÒmzrò=!HÿÝ„(_Ü™Ðú›Xì?±øœPßWÉœ"‹µøZ¡µ²†Gf¶‰.ÀíW÷Øúþjõÿ0‡ŠkµN:%‚ÿ_ûÜç>WqDúýUOˆˆ@5íWmb¾È¢D!X¹$Žë–Y&c6UèO§²&²Jk i÷;[H‘hö¯Zw¬³«óws¦·nÛþÏ7Ýuß?Æ[Ú“Ò &ÃÎt&örB§tÌŠÀår³¶ö6‹%+•JëÇΞ\ÎyûDÎÐ]…YÐgÊ:t{+ö´?á•*YÀ‡`I&³=sé‚Åú_·,‚•ýb ' Zî ÀÚïott•JÅ‚ü‡#Îï³<àMóöñq9rG)Öx½^ÛˆÀý7ö ·Ÿ>7ý!fèÑÉ¡]‘¯Êš >1‰Ë–í7~Ûív—–-ۻ뾟û‹åk72[M¢Oá¶:ßf¨nuÇ•A¹†²^ €´tÕZ!”>yüne½2F¥ÃLn1Ä¢´*¦‘nÄþ·4Ѩröô¹©›Dßÿ¾Ä×Òëõj±Þ,—Ë–tñcÇŽY„ŸRz¬X,îwÄùýGðŸæ gÏžÕXÜ|>oÑô„Äbâ(Û²ÞnYc}-W¬´ŽOÌ­2߉Äà‡k’0]ØÄ7®Ñ… áX"³ë¾ÿâC;v~ËïT¬œ!6n1±²qà]™EbP ’®ˆ´dùê‘`0xT€¦n+òAȳM„.+pœáU%Ñ0Q^g¤¹Ïµ’šˆv›Ù˜/U-Aþ›Wǰ|‰ø› zO˜sDfff077g)þðwý×í4½cì+ ´m¡PÐrjµš ðù|¶\ÀÇnZ*}öüìU•J5 ¼a™N¬©BΚ»°Älî‚ »ÏŒBeÈEäzY„Þ,œŒ+çUþJœ *J€I¤Þ¾¾gÌ_’R:ñö;˜$×.Âî¡:FÖßE¬¿g1çÃ|jÕ÷WaÊåjèÜØüËMFøð€­ïoýBP©T,¤ñ‘#GD•YJ©ÿ/ð‡ø‡³ž2o?sFïÏ êáñ¸Ø?ìj â¡íÖ̱zƒzOžß.À¬axÃ&|qJAáÛAÏ(@EDþLw ­ÞrÕ‹.—d9QãçÏßË(“”Ç’É‹?ˆHG„"<|³Eb  ôÓsâÔØ-õµdò<´­Ã¶å[$nWù"u¥R) ‹*ÿþí _øBÎåËÇ€0o˜œœÔº·V«Uab×ëµ%oÛ¶ÝakòÈlº0011·Š1ëÏY<­ÞJ6³|M¬<±z`°øŒÞÉm“üþP­³«ë{æ/Q®TúO9°MnÂA,.´ªAãop C¬Ÿ~ž˜ÐµâÈVSs||fý|¦h)úéŽxpÛµ}¶Äïû«jµj ý9rÄ"ü”Ò:€¿rÄø2S燴 ãQ€ÚÉÅ|ñ‰„0<ðà×o÷Ö<5<³½X,G e¹hU)†ñÁ6£?;h®ôó’`hìÁ$#Ii; ­ÚxÅ3Œ`?wêÄfEÂýÕÈEWA¬nãÙAƒ‚ BC„Ä`¡PŠŸ™Ý)º6¿qûR„ü‘@F…|P.—³Xÿ“'OŠ*ÿÿ¾0ìˆñe¦~ï÷~ø’åÆ=wN#vjµšær'X;W`óª6ܾÞ:u«A©çèñó·Ðu-`ÅAš+Þš ¡½p³zëƒE!HæmŒ©­»w¾µ­Õ’ÛžÏ6 Ÿ<¶QÙ×êZ¿«$Ôž’¦Ü…« …u=>zOƒ2KÖäGÖ'°ie›ðZF£QaÜ¿T*Y¬ÿDÂOc8"|yº „üL£Ã(¥ ¶s6ßÑhÔÒEH]ŸØµ\è äŠÕ¶ã§F¯'bÒJ(ôL”ŒÃ¬½@0˜ þ3±j^ÙO2Zjí½,(@í³·rý¦oŠÎÃ飇?¡¢Æ»‚χMÒ‘)* ¹Í\fBHÇOžß•/U-3»Ân|bçr;„(tõc~hffgΜ%þ|ý‹_ü¢Óù÷rU»wï.økË{ú´†êõ:Òé´%þKA{{»0E8ôâ3÷¬~æÔ\nÍùÑéõ6–Ð"ôM ¿–­Çl”€n]™ IsèÏ$bþ ”ö¤òs¦ &?–,[5Ešk&›ÝrþÌñµ²¬îk¦œD(p©”†ß.r ΟŸÚ:=Ÿß"º¿u÷ áÈoBZZZ„Ð?ŸÏš~0Æðê«¯Š„¿Á{ØßËX(ëo`!N)ÅÉ“'õ9“Ñ Ÿ9,hç ¬]šÀ§®Ö=32{ýìlºoñ^Ó› !´X]&1«¥70þ(B䈣“"­Þ°ùë¢ßzâÐÛ?oE¼¢!â胑è3å[´Àû33ÉågÏÏí}¯_¾¾Ë¶ÛO$†ýêõºÅ÷Æää¤(ïÿß¿ô¥/pÄ÷2WƒƒƒK{öìYíb3Æ07'îà”H$„õápÇõر""„ûÇNïJ§óvBO̤±„Á À@éß„HR…P!úì|u]!…”˜Bäç„I«ÖŒÅ¢‡Í¿5›Ín8wüðãû^Ùð¤£dF%DœßÐ,Š$•Êu¿szò~&¸n\Á׋cþ^¯±XÌ‚øTw/ù­V«xùå—EÂ_ðÿ8¢ûÁ@`Œý€Y³¯wäÈÿßÞ›GÇqßw‚Ÿ_]]} €‚IQ¢‹’lE–EE–m­íŒ|g;™y;;ž·»ÙM¿ïý}Ó|«T*( Žjaoo¯cT@à >ýþ1 †%§ O¼qåýù|© Kxò™¾O[¬ôÄ´yõ·:ÉHÀ¾ò;h‹‰AÄÌ x3YàÆöîÿNßï¹S¯Ìnº¨Y,7e ÒvÉÌIs"¹|1þú™É;9ýÃ>óþ1|óïÅqº»»ÇÅ—J¥¦¢Ÿßüæ7( N-¿¾üØcMw »IàsŸû\ÀìçgggÍRaXXXp,E==ÎCv‚> ü‘Ýyš›Š¨šîyíô•Šå¨Ûͼ„ºïn'7%öXWYBH# 9WÕ¿ÁIÈlÿÅs„EsÀˆ î¼á\4m(·ŽD"¯ßtûÁ¿uølü¦ 9gaÍâÿ_(”»Nœžü„ªéM)›A‰Ç?² A¿³ó6‹9Æü5Mkší—J¥püøq'Ûÿ ¥ô+Ø®ð×â:tè7> Á0Ìd2Ç-Žè¬×뎙a’$A×õ¦Ê0F{¶ùñ‹“ M©¾ºNÅùT~4ôNzd©d«tkj)ÞlM88Ü_ÞŽhæ´ª l2 (YlEL´âóù¦®\¼ðž@ pé¦Ûnÿæ»îýI Í;™6ðs6ÍÀnpÏ{4‚äržã§.ÿkUÓN޽?ÿ膶†]íþH$âXí7??ßöÓ4 ãããnýþ>ý7ó7':°ÝdpôèQíСC—|Ìz¾^¯ƒã8s…WUÇ9Vz½^T«UG-¡+,cG·„çÎdšžÓu*&ò;ƒ~yÖçõ,•2ºì‚‡šÚ¦|| ¸i!‹Ço6ûw*â‘H>œ>p×=?ŒtÅSŽ~„FR±Ÿk>Þœ4äô€,,äOœ™ú”¦éŽí›þôƒ;\ãý²,»Ftòù¼™Êäµ×^ÃÙ³g*þ~úo|ãÏ;Ý„÷ßÿYBÈ]vXÏ/,,`ëÖ­&è«Õ*¼^oÓdB|>J¥RS‰(°ØAh{T çH€R!™Êïòˆ|6ô-¬ôÄý”uÖF“ѳ½˜ÂÝáh>G+9$ëNBì«9gYÅ9 É›F% Ž$àDÓ3óûN'f?ªëÔÑû¼ïØëÞá·¯¯Ï1á§^¯79S©ž|òI'ð<ü«_ý*Ûì&%€cÇŽáСC¿ðo`kGžÍf±}ûvs•¨T*ƒMÎ?ÖC©‡vé°-$à¥óY§¥KeŠ£ºª" L/ÑÛÞxÙ‰YâF@sH®ÙŸ@ÍH„ñj³ÙO`?{ÎÁ‡Á5¾Z†Zå'3÷\˜L=ävŸüoïÂÝ79‡cyžG__Ÿ£Ý¯ë:®^½Ú@䊢àñÇG¹\vªø«èºNo½õÖ³¯¾új¾ÛMH†)>tèàÝÖó¬0ˆ™ÌÞMª#k!æFƒ}AŒtyð¼ƒ&€ä •|¶Å‚—xžW[¯úεEıÀ°áJÕœ:8ÝÌÀƒµZ3T@oËão²áÙŠOÿ´Õ±hªï GH[DP¯+¾'/~d.]¼Ý‰ó>÷ðîÜïÜÜ“ã8lÙ²Å5„;77×ÔèóùçŸÇåË—À]×eJé;u]ÿ÷ˆÞ|óÍ¿<~üx§ý×f#xà^ð}vS §§ÇìÌFŠ9u ²,7ÙL¶öø±»×‹Îf ;(ø•šMÎçv|Òœwi¿ÀRnka¬-r@Z8þÞÔ(øIk=iµÝnwÜHsú1gÑFšÌ……ÜÀ‰7.ÿn©¢8–ð Áç?<Š[nˆ»êB}}}æ$(û¶°°Ðô»MLLà…^p¿u/èº~'¥ôw÷ïßòµ×^›è@x“ÀÑ£GuÃøŒýï'“I šö­VÏóŽNAQáñx Š ·Ë‡Û†Cxõ\µ™TM—çæsû4E!ÑHpš¦Áî „öÄÞfצ´ ?k1Ëzíà'„ÐF3ÀÍˆÖ G&ø-×Çé:åÓï>w)ùaM£ŽÅû1™ÇŸ|7npÉòcàwkõžËåÌ*P«ù÷ÓŸþªª.~ë>¤ëú'o¼ñFýæ›oþ—':MC \=tèà=  TU‹Eô÷÷› u¹\†$IŽÅAK‘@$èÁÁ]1\žÊ!YTM‚bu`>•ÛðIIYöÝïÒC ez1±;÷‰ˆc~1Ÿ3Ô{ÒJe·šÔÙ°¦sVðS€ËdòÛNœ¾ü©…ly¿››co¯úñ=èVþR©„d2Ùt~aa;vìÀ¾}û°{÷n #ƒã8d³Yhšæä¥”èº~Ÿ¦iÃ{öì9|êÔ)­çå ¹^ø‹_ü"ÏqÜSšZHíÝ»»víj¸¹¶lÙâÚ2¬R©49•| u ß2'^O·ú"ô¾žÐ¯w o}N’„ŠЭûÅ9œ²Øðl<—1³Çòz§à´õn˜¾ÛNòA#!5>vÊæsÓ ¸z]ñ&.ÌÜw5U¸-’ÂÞ·/†?°²Gpµù™Úïöû°œ~ö{Š¢A" àš¦™µçÏŸÇ‹/¾ˆd2ÙJ;xJ×õßùá؉l€¿üË¿Àâ0‘¦AqÄÖ­[›Kn݃kµfgg›FH›–:ž;>ƒ¿yò TÝ=ò'ð\eû¶®gâÇ9B´f%@oú&À›£¼a#ƒ%ã‘­}é›3½Í EU"5Ôý+“Wo¿2“9¤ºÄö€ç>ûÀ ÞyóV×>ç<Ï·tøU«UÌÎÎB×usø‡[©wóo¶˜V©TpâÄ <þøã¨×ën$pZ×õ‡~üã_îÀzƒ›S èС×|ÜNF³³³èëë3O)E©T‚ÏçkÊ`ŽA¿ßJ¥âH„Û·q÷î(®Lç1_r蔊™\ytv.½£´úR†À’ÜC­?Óg‰{³N{ñ‘k]“?€4¥7õpuR S“ɧÎN~2•)Ð)Ý~›=qÿéc{°oG—+ø%IÂÖ­[]ÍHY×u‚€@ àøÛ¹!¦–‰D°mÛ6LLL V«9‘@®ë{úܹs³ho À¢ <  )ËK–eÜ{ï½ våRš€®ë˜››sœ?`Þ˜Š†§~5¿{vú“o<’Ùÿj ?þAàë+þRZ<Æ›ßL%&ÍS ÞÛAˆYžH›Ã‘v¢(ªgj*yûL2WMQ»Zý¿!øô=[pßíýðˆîëƒÏçs-Ô²¯üŒœÛÖ#"›Íš™‚,û3•JáôéÓ( ¨T*nQƒ¥ôã‡þiÞ\`rï½÷>ËóüMvÛo†d2‰mÛ¶™+¥Åb¢èÜkŽÕØ+Ì,j>v„q×XsóE\Í»”Õ4Ý›ÍWvLϦo©”+$ä=©âæø³:ÿˆ³ƒpŒ 4Œýz3€4UZJ}]€„$Ÿ/õ^¸tõÞ³³¤så}šî®îÀm>üÇGvã–ÝqÇŠ>&ÑhñxÜ1½—9n™O¦]ðçr9\¹r˜ŸŸG¡P0Wz+éÄb1äóyhšf>oÓ$]×?2::ºH$~Õø&Ð - à_Üìtýóïl}™LƬac¸À­Úʼn'N§±°°à4&ŒÉÿÍqÜŸ>|Xï@}ƒ€Aý^Ðßt“vwãÎ;ïlZõc±¢ÑhK•Ò:ŸÐ•,ª*þå73ø‡çgQVÚ¾_ôßs9ñŸ‹Eƒáp`n±¯QÝ·D ¬?†êFSÀí± ýe@)H.[زÎfråÝ…rm„¶Yæí9|â®>ÜsËVød±õk}>Äãqð¼»Ò˜Íf±°°`t p5Êå2Nœ8sçΙ^¶€x<޾¾>p×@LEÁ©S§077‡T*e&Ž9È9ŽûÝÇW:pßà`À†&ÐT[ÚÓÓƒƒ6‘@0t­8³ª˜étÚ5TÈ$_ªã™W§ñý—æP×–W(ð|1ð\ ¼“¡ o*ñÏŠ‚Pwñ89þéa@¢(ª”Ë·äóå|±²=_® «ª\ÎuJ<ÁÇöâÞ[·9öíkð pb±˜ëè6ëJnméÅzþ»ý¯¼òŠÙÂJÖcžç166¯×ëHº®ãìÙ³˜ššÂÕ«W+E y ÀÃãããóÈop0Hà·ü €·Â7î IDAT]M@–eôöö¶ô2+Š‚T*µ¤6År/ŸJâ_œE²¤®ô_¡¢Èçü1é‘„´Ç#æ$QÈK’X’D¾Â |ã¹:Ñ™² jº¨+ª\WU¹^WŠ¢«55\­«]åªWT-²Òß­Ç/à‘;·àö½q}K‡â|>º»»]Ì4¬¹¹¹‹(Š®fB>ŸÇ«¯¾jÚñöÕß¾§”bttápØ‘€ÅvóÌwàæ÷0à¡ñññsØop0Hà!?Ðt§F"Üu×]M‘žçÑÛÛëšbµSÝ:Ù¥®h8}!§~3‡.nÎ Twñž[zqÃH¬¥gßÔd]]]-ý+Àb‚ÏÜÜ\“Å©’“½þøñã&ø[ß® ¢··×Õ/0;;‹3gÎ ™L¶"ø®}3õÂŽ=zÞÈøûuV«UÌÌÌ ¯¯¯!M)E¡P¥Ô,FqT% ¡PǵZ1 RáÐ×íÇûâ¸woÂ"Ò¹2•y:óà£wnÁ§Æ¡wlC_·¿¥gŸÙí±X ñxÜ5±‡}Ï™LóóóM™¢(:横ŠD"|>ßШ=!™Lº®›MEí ‡Íð S÷(>ŸM$‰Søo` À¢ |Àÿç¤ ˆ¢ˆƒ:ö ôx<ˆÇãKfiš†L&ƒ|>ïX^ì `&UĹËY¼|6ƒW&‹ ×y85!Àm~¼c,†±ílí €vß»8Œ%¶tò‹]œ’ɤÀàõz¿óééi\ºtÉ$ƒ¥V|·ç"‘FFFL"bµVíîĉH¥R¦CÒÅ¥ú§üÒ£>Ú!€M@ø¡ÁàM7ï044亢¹­öŠ%ŸÐe¢¹XQ0“,bb*SWòxåJ©eºñZˆÀÜ:àÇÞí!Œö‡°5@À+.“4‰D–ÌУ”šŽT§ï§R©àÒ¥KxÇ;ÞѤþ×ëu¼úê«PÇ™Í@—|ëÞï÷cçÎàyÞÑ/P­Vñúë¯c~~Þ IºÈå8V;°ñIàn,Žw¬EÆþýûW0I’ÐÓÓãš=h×òùßèðO:…cÇŽaff¦U;nèºþ¥ô?èºþˆÑžÛ|ÏçC?A€ªª é¸,óåX´ã»X‹0¡×ë5Í9Ö}˜3§¥¢(Ð4ÍM3ê1œƒÏ&‰élpùùÏδ¿ÐÅ.CĦ¦¦0==m†’–R¹™º[.—‘Ïç‘ÍfQ.—Í›ˆ­^+uεãlT•J…B¡ðì:Úu:‹E¼öÚkxúé§155åºÊÇJéçüÁ7¿ùÍ“·Þzë»u]²¾fjj 0my§||Ö”åd,õ=9EÚ5€Å^‚ÌÉk}Ž}G,LX©T@q‹°&<•H$Îvœ€›ËIx€ÿàÎ¥^+Š"FFF088è>\Ž0›_Ó‘g÷xÛo~æ´: ™sKUU3ª°Z™››ÃÙ³gqñâÅ%U{}Q…ø{Jéúë¿þk³ãîþá>B)ýïö÷ïß¿úЇ ª*.^¼Aœ›ì¸T*arr²Éé¹\ç`»Y„ñx ¤fÕŽ²Ù,Nœ8|>T*åF¢€?¬C›H}ôQâóù> à/`k>ê&ÝÝÝlhO¾™¥X,brrçÏŸ7Ëk—Á]×øã¯ýëÇíŸ÷éOšp\×õ}ö÷=øàƒ8xð >Œ±±1Ó™i'Z­f]ŠÚMnõ\(Âèè¨iÖÙ{ °0a.—3‡‘¸È×8Žû?>¬u`si€Oø<€m}I„ ··ýýýˆÇ㛊 ØTžË—/czzz9ó÷^¤”~þ±ÇûE«Ïÿ½ßû½÷PJêºNìŸóðÛvïÞmf]Ú‰@UU\¾|Ù¬l‡(¥ËZÄNÕ„Œjµ^ýu¤Ói$“I(Šk×èøäøøx¹C›L¾ô¥/ñ„G|ˆ[I,C__º»»Û*¥½–¢ë:r¹æçç155…¹¹¹eyò)¥OQJ¿T.—ŸüÖ·¾ÕÖßüÔ§>õ Jé¿sú¼¾¾>Äb1ȲŒn¸Á,D²®ë˜œœD¹\n¹ú[ϱt았‚ `ll ²,›OkoUUqúôi$“É¥Z½ àãããÉlByì±ÇHµZ}7€ÿÀû–ëeM3»ºº‹Å Û®¶[ Qù|™L©T sssŽÙzK€_¥”þˆRú•¯~õ«//÷>ñ‰OȔҧ)¥w8ý­žžôõõAìÞ½ápØÑ „`ff¹\nI3Àž9¸’jBعs'@ƒ&ÀÔ~J)Î;‡ééi¤R)×!´.xßøøø™lnó`;€ à÷l]éç°QV¡P@À C± VM6„¥×j5”Ëe³L9“ɘ7¥Õy¸ ðÏQJ¿¥ëúýêW¿ºªðÖG?úÑnJé3”Ò=N+cûöí;wî4ÃoNÎÁùùyÌÏÏ/™5èÔ[`¹Ùƒº®cddÑhÔ‘àòå˸pá‚9±ÈEÒ><>>þL‡6¹|å+_áu]?à>„ÅÐÚ}éÆt\I’ÌAƒ›YÓ (ŠYºTHo™à¯x\×õïRJöå/YY«ÿë‘GéÑuý°®ë·9]G ÀÈÈ$IÂàà 9ÞIÈf³˜5C€­À!°FL–c  ¯¯¯É`ßýÜÜÞxã  s‰ƒÔ|f||ü{xëh2€÷ø€Ä7Òõµ þ*¥ô¥ô‡”Òþ⿘^¯ëùà?èðßt]ÿ ÓÄÇcÚÞ[¶lÁÐÐPSX”³0!3µÚѬÎÁåš===l 2°g³Y¼þúëæô#—ðŸ«Õêž~úiÚ!€·üÕ_ý_«Õîp?kî€K¦áÿ„®ëÏRJÇ)¥G¾ð…/ä®Õu=üðÃ<¥ô¿èºþÆu™f@ @,ÃÎ;Íœ}»6À„¬&£“€­à+)& …BqMV*•pâÄ  ÌÍ͵ÊÉø(¥ÿË‘#Gêx‹Ê—¿üe/¥ô6,&Ýà¸Ô ¬3øU]×ß ”þ’Rú,¥ô…G}tâz?=ôÐÿ®ëúWà0ªŒ‚;v "˜aB;pg† ëõz[š«œ\i˜ÐëõbçÎÁÑ/À„,³E„àIŒç;ðö1¢öÛ;m0€Å´äÕ”öeL˜ ”& П¢”¾þùÏ~Cξ{ðÁ? ࿹iJýýýæ¸÷n¸>ŸÏÑ ”šaÂ¥r–Ó[`©0¡Çãq$&L¥RKEN`1B0Õ!€ŽÆÀˆSJ»°Ø‚* @Æb»s+1(*rÆ–0ÿ'ò'›rÀåƒ>x‹I3½NÏÇãq BŦ0¡½®bvvÙl¶­d¡vÒ‡[‘ŒŽŽ¶ ž?ÓÓÓÈd2Èå\­¬i,Î&üu‡:ò¶”ßþíß&„<à§çÃá0vìØaúXoA§ ¶›5è”>Ü®V ë:†‡‡‹ÅšH€ùX˜°P(¸ÎMPð±ñññ'6ÒïÂwnÍM-dlmÉÄÄDvttô{†¯¤ÉOR«ÕË凑ÍfÁqœc1!~¿¢(6´+_ªÃ³½¬x9Õ„étÚ,ev’H$ŸÏ‡B¡I’P©TœH@ðÑÑÑÑùD"ñJ‡: ]ÎÆm°/—Î%‰êÈÈÈ÷ !ŽEA:6›w*Šâ:Õ‰%Wår¹°µ"«ƒ°æ"Öǹ\Š¢ ‰4 ûÛ@‘HÙl²,»uâ¼ott4°{÷;wŽvàíjî:€[O0¯ôs&&&tY– …Mc„X@ EQP*•FWm6ä5—Ë5ÍZu^J(•JæõØ›”PJ!Ë2º»»‘ÍfáñxP­VÝú Þ¥ëú £££' µC›ÜÜ€Ã[C#h›$’É$I$OïØ±cŠò ý>¤”baa’$çyd³Ysp©}uápÅbѱjωì>…å˜Õjµázì×-Iz{{‘Ëå I’™Ñé {¼{ttôŸ‰D¹CäÜ ¸Ö ßŒê[¤011q|ddäeBȰؓ¿A²Ù¬¹².,, ›uöyápÕjÕuj±0BYnÏAÖì”MZ²>ÏóˆÇã(—ËfÂåº|httt<‘H¤;°q@¾Àç®à9l •¾÷NLL\þÇqa1,Ú …BÕjÁ`Ð4 X?ût¡p8l¶J[òf0ÞÛnÏAûc]×1??`0èz=ñxÜìàÄó¼Ûuuøøèèèó‰Db²CëtnÁ¿Zàsxë8±’×_¸pa¾¿¿ÿ‡‚ Ü‹Å9} Âz†Ãa¤ÓiH’Ô0³Á <æ4,‹mk+MF)E*•‚Çãq!ÑÕÕžçQ*•àñxÜ">,ö<—H$NwàÚ„ÁÖüA#XÉgázÂ¥K— ÝÝÝ?eù&£öº^¯#“É "—ËÒ&dàôûý$Éuªq+M`¹3 “ɘaBkoæ‡ÃðûýÈf³ðz½(—ËN$ øÑÑÑj"‘x¾CëB[Oð_KÿÀõÔ °ÖšR8ŽûÇH$ÒK9`ÿñ5MÃÂÂB¡*• êõº¹âÛUuY–á÷û#­ÈÀÉ9¸Ü0¡S£W¿ßH$‚L&ŸÏç! ŽŽÆÇÆÆŽœ?ž¾Ý àz{ k×›_·°°@/^¼ø³ááá*!ä^Ø’˜í ª*ŠÅ¢cXŽ | …BKÎ>t#•„ í×c%Y–ÑÓÓƒt: ÇÓj\ùí”Ò[vîÜùÓD"Q;Àµ¸A¹  %¬·F°‘ò°Ü×PJÉÄÄÄKÃÃÃç8Ž{¯¡7 .Š¢i°Æ*ö1`,L¸œvêöVeíšÕj¹\Îìiÿ QÑÛÛ‹|>Q[EÆ<8::úÓD"Q|«Àõû[É?°´¬ákž¿pჃƒÿÂóüûàPM˜ËåÌ.¿ét¡P¨¡ÝkÑV©TP«ÕÚê׸šÑdÖŒF§öo<Ï£··×ÎÊæ:ÈŒŽŽK$É· ´º!pnàõ»m$ÿÀF$´ûÜÅ‹§·nÝú¸(ŠˆÚo¦R©„J¥‚P(d† Yûq; #‘HÀÒõMfÍh´^‹BÌ0a½^‡$InƒSÃF„àåD"qq=Àx­@ïö˜,㵫}ïZ½g9ç\ϣȉ1ŠœƒAâóùˆ×ë%²,Y–‰ DE"Ùo„Ù•VQ…ZzâQUUi­V£µZV«U”ËeÝ©ÑR©D …Â’S‚m²\§]Åó ÏÝqÇ=‘Häû„Ûœ^ì÷û166Q±cÇÄãq×jBÖ=y©¶ã­º/§šphhȱš9'¯\¹‚‰‰ ÔëuÌÍ͹ù+ª>2>>þÓÍBk\²J“u €åø|>Äãq‹Å¸®®. …¸`0ÈÎëõò>ŸH’Äy<NEÂó!†Éöv €. ÛS]×ÍMÓ4]Ó4ªiš®ªª®ªª®(Š®( #­Z­êårY+•JZ±XÔ2™Œ–N§µt:­ÏÎÎÒ……½ GÚJWý¶ÎÇb1î¶Ûnû Žãþg7•uŠÇã1Ák×J¥®\¹b¾o½G“uuuappÐ i²–nÁ`Ð,xR—/_Æw¿û]·¾iïŸØh@®! \OBh8Çó<¶mÛÆõ÷÷s[¶lâñ¸Fù`0(A–eÞëõò’$ñ‚ ’$1À3á Ð/"ð!„!„pŒدà ~öÐÐØ^gb;Ö,d Õëu­^¯k!h¥RI-‹Z¡PÐÒé´šJ¥´d2©ÎÎÎê©Tжˆ±¯%4»ÿþûÿ­ áæÇÚ±cººº‡MÓÀi"Q½^Ç¥K—ÌtÝ•Œ&[ŽYà÷û1:: ǃ-[¶ »»Û¬'`ÄP.—qöìY|ó›ßtÓ^!„Üõ³ŸýLÙ@Ö6…ÆÀó<†‡‡¹¾¿¿_èéé"‘ˆ‡EŸÏ'ȲÌ˲,H’$0à ‚À‹¢(@gK=ÛèyFL0Žú‰ÓuÀo ¦0ô30öšAšAšAšªªZ½^W!T«UµR©¨årY+‹j>ŸW3™Œ:??¯ÍÌÌ(333úüü¼Þ¢Möz½ï¾û’$é[pé7È»z½^Üpà MG­D i._¾ŒZ­Övûq§îÃíÇq¸í¶Û°oß¾¦jB‹)‡r¹ŒË—/ã±ÇssþÇñññ¿\_fÍ`½€½µ×çb±懆†„­[·Š]]]B(ƒÁ `_”$IEQ`ûEs^8Žèyž8Žã !¼mo’ÛŒ•ßÔ(¥v"€ øì¡Î`ñžÒ)¥T³€‰~J©‰FÆ^UUU5È@­V«j­VSËå²Z*•Ô|>¯ær95•J)W¯^U§¦¦ÔÉÉI½Z­Òµ^ñÝÎýÖoýÖY–ÿâ8¯!chhÈì7ÍÖt´X,¶ÕuØ"l×ÐÝݘO}>Ÿ+ PJQ.—‘H$ðÕ¯~Õ)›±¨ëúØ“O>yu…ÚÀõþzkM£Ñ(·k×.~xxXܲe‹‹Å„p8,ÑëõІ]/ ‚ ˆ‹²¸äó¼Àó¼h^`Àç8N „ƪ/X‰€4Ãñg7¬À§ÆÍ [4Ÿ­þ˜›Aª¡¨ê" ˜{Fõz]­×ëJµZeD  %›Íª êìì¬299©\¹rE¯T*t-W|·×Üu×]ƒÁ`ðû„1§F£f¿Á]»v™éºvmXl:šÉdÚn?¾T÷av<44„±±±¦4bŸÏ×r¸l©T‹/¾ˆï|ç;NOm||üV¥Y¬Ø7" `±ãÌØØ¿sçN±¿¿_êîî#‘ˆ %ŸÏ'ʲ,z<A’$Q’$QÑP÷èEƒô¢A¢ñ˜7ˆ€Àö<Ó ð³cÓ`×g‰P«+€ßØ[Á¯Y7 øU]×ÍÍж7H@©×뚪ªŠ¢(j­VSªÕªb˜Ša(™LF™ŸŸWfff”‹/ª333šmõ¢kD oºé¦p__ßw!w9ݼ^¯{öìÏóAoo¯£OÀ&lÇ!hMæD###p–×ëmðØ5B¡€oûÛ8~ü¸ýéB½^xê©§ ¶ï£m"à×xÕ'+<^Íg­Åëš’QÂá0¹ù曥={öx=}}}žh4ê‰D"ž@ àñù|²,Ë’,ËY–=’$±MEQÁÜó<ïåy^AÁËó¼ll>žçeŽã¼<Ï{½Ïx½ã8Ÿõ¼ñØÇqœyly={lÙËÇy,›dÙDƒDB;fš‰`h&¦Ï¢±˜> ÃÉó<Ï¢œ(Š iú|>"‚µ §ÕwÕ<ž››«jšöO±Xl;!dýVU ˆÅbfAV½ÇVckNÇãiê7è”Ça@Ø“††‡‡Ñßßß\,²â¤ °9’ÝÝÝxöÙg›"Ÿ<ÏŸK$¯-±¸­˜È®èkEëN‘H„»ñÆ¥;vH[¶lñôôôˆápX ’Ïçóø|>ÉãñH’$y<GE(ŠA<‚ x °K‚ x8Ž3Ïq'ó<ï1ö^ 2Çq^c“ p{-àg ÷{+!°÷y !^ŽãdBˆÌqœ‡â!„HvЀ6»ÂçLSÄnžXL“7ÃÜ¢¿’å0y ´ XÍc ÙlV¿|ùòáíÛ·óÇÝi¿™5MÃÜܢѨÙ=(‰8‚ÚëõšÕ„K gµ¾×êèééi¹òÛ¯M×uÇÔaVú<==ÙÙYûÓB"‘øA˜Y6¬Õê¾ _ -£åëA cccÂÐÐÇÅX,&ƒAÉï÷‹ÆŠ/J’$‰¢(ƾ$Š¢Äó¼¹qgî9Žóðù¤ŒDI’þybbbv¾°7Éê:ÿz“ˆëçz½^"I'Š"KÐiðÈÛbôÖÕ’·>¶ªÍgo@)qºù'''qéÒ%T«Uœ¿ßY–›¦ó<Ý»w7µ[ ØÿöÜÜ^~ùe;ù¼–H$¯ Œ¿,ب«öšv¡P€®ë$ FnÃ8»£Ìj¯Ù r쪿U7Óq Ûo®ÜàÖt]¯išVÓ4W5M«èº^a$`Ùj–㺪ªUMÓjªªÖøóuUUèÍcEQêÆ^QE©×늢(J­VSjµš™ T©T”r¹¬Yj.—S3™ŒšJ¥”ÙÙYujjJÍf³úõ¾WÊå²~ñâÅ£CCCUŽãÞ…ætjÌÎÎ"¢^¯£R©44ù´îeYF hh:ºuëVôôô`­D’¤&˜žžn"J鋆°l€°’7­‘¬eÆàš^‹®ë¸té’V­Vi¹\FµZÕkµD"´^¯Óz½N}>Ÿ®(Š.I’îñxtMÓ4EQ4Q5ATUUUžçU#På8NáyÞL´±%Ýð-R‚í!:8xìß̶ù,鿌pÌÜã±™þkSõÃ`€b¬ü K6@­T*J©T2³³Ù¬2??¯\½zU™žžVŒðŸõ»¦×ë˜RŠcÇŽýõ}÷Ý7)IÒcN‚“'Ob×®]Û‘³jB]×Ír`–Æ»cÇ\¼xÑtþ­éMéqpé˜YÁýN­€ëtl rqTÁfggi6›­/,,hú–-[´îîn- j¡PHóù|ªÇãQ½^¯jÿ¨’$),ØVÀR€y ðà­©ÀÖhƒeïä,c×Û@Vƒ±J@VÈR-6?³ûÍͰó«ÊÏjŒÕŸ©üJ.—SÒé´:??_ŸUçæætMӮؗ”§žzêÇ÷ÜsÏŒÏçû.Z={ýýýÄ©S§°{÷nȲl‚Ÿ$Iرc&''›’ŠV#Ny Ìpé•þi)•J…¾ñÆêä䤾}ûvuÛ¶mJOOÔÕÕ¥„ÃaÁçó‰@@”eYðx<¢Çãá%IbuA¬(ˆ·T.vÿ0Rk-©¶æÞ¡Èìh]ýbô $ ë: /.öT3¼þÖ`v¬Y~6¿Æl}¦þC¬5j*•R’ɤšL&µz½N7úoûì³Ï¾|×]w½/ ~Òä^Ÿšš‚ªªÁÉ“'±{÷nøý~ülÏó<öîÝ I’–Õy¸•8%1brÈxcµ+åÛѸ¢÷y<Ò××ÇmÛ¶MŒÇã|4eB‚×ë}>o$ F¡Àó¼ Š"/Ïqã~1cv±ˆ­$Ø©'q"kØ ëQ[W EÀ\øßl ÂÀo”›ž}EQ4üz½®Þ}«“Ïì J¥ÔT*¥f2Ýõ¹¦,çx->£­ãÄâñøÿëÖo0cß¾}Ð4 ;wî4»÷X‹ˆzzz°uëVs`ÉjWV¶l•\.‡Ï~ö³ö—W³Ùlü¥—^ª¬äÿßLŽ¿ ç„ ¤··—ëîîzzz„h4*„B!Áï÷óFö  Ë2ïñxAXW s3ÑÏ2ŽÞlBl¹öö† ÄîŒ4QÔ’©×@¬%˜ ü¬=ë ¤±­V«™[¹\6› 9ù´L&£+ŠâÚk âUÆÎ;åáááop÷~'PʲŒ@Ó4 7U  »»Û´Ó«ÕêŠí~¿ßïØ'àÅ_Ä7¾ñ û響w¥ÿ÷R&µÜl«9^ Y«kY³ãb±ˆb±¨MLLh‚ Ôc±×ÝÝÍýøp8Ìû|>Áï÷ó>Ÿ—$‰÷x<¬ oh 9!–,Dæl¡GkTÂŒ:°Ô\ øY“P]UU³/ ¢(æÞˆåëÕj•^3@¯±^6›Õ²Ù¬^,™³×øëª)œ?¾rõêÕ?8xðà£<Ïÿ»¦e¶ZÅ/ùKÜ~ûí¸téjµL3ÀšºË:¹ m)^¯×üªªâ‰'žpòUýd5ÿw;õòo—ìÀ5=æ8މD¸P(ıNÀ>Ÿ÷ù|¼×ëå<OC²K™5rL`y1 €å0`Ù}ða¬øPUU×4²F Š¢ÐZ­¦ ×+•Š^.—õR©¤‚µB¡@ …‚^«Õ¨Ã¼œzýkM«:¾ÿþûÿ@„ÿ .µ2û÷ïG @$Áðð0AÀÎ;‡›@ë2nZ†[ ñË/¿Œ¯ýë.ªÊÐ3Ï<“^Oè/?¢àú:Aˆ1¸’øý~ΘÀy<N–eN’$b4%F¢ÙœçyÖœ³G,(¥Äpö™mÁÙôf[pìæ¾R©P£0-—˨×ëNM>Ûíâ³RBØdqß}÷ý¶$I߯âèî&C<‡,ËØ¹s'öîÝÛDÌ4«V«PeÅàÏf³ø³?û3d³YûSßÿ÷«Ñª–jyÝòú¼¿åsØa  „ã8ˆ¢H€ý½ªª‚R EQ(ëNÃ6–×¾ sk¹ÏÓëLkN÷ÜsÏÍ>Ÿï{“ûÍ’ß÷¿ÿýèëësýÂ4M3‡2’å8‚ ˜&ƒ“Ôëu|ûÛ߯K/½Ôd•¨ªºçرc“«øŽÚpÑõòß¿œçVò¸Ýßn­e­{ÒkøÜŠˆÃè7øBÈn§¼··»wïÆM7Ý„[n¹ÅµÉçJDÓ4üèG?ÂO~ò'Ûÿ GŽyt5à–×d³ƒ÷zõÄ À½R€±ó«l½ŒÏ[©‰°^ZÁšj “““ÙP(ô~¿ÿVBÈvû?P*•Íf±cÇôôô˜}×ü?þñÁàT>Ÿÿ½©©)m5à_Š6¯g òÕœ®8Ûo;¤°œ×/—ÖÚl¸fNÈ«W¯Ö4Mû±Xl²ÏþOÔj5œŸ9šÌI …Þxã <ñĸpá‚Û÷«RJÿÈ‘#?X à¯ô¹@ÞHcÅÖÓ·R°_««•Äz›ëe&,ùÜ{Þóž{EQü€X«/G–eÜtÓMضmººº Ë2€ÅÄ¢d2‰sçÎáÌ™3KEh ”Òß=räÈã+¸Öu¹y6Ó µ|íz~µ¦¹Î€_oRØ(~`q,Ùˆ×ëý€ëø}¿®iÚ'=zz­VýåúÖ“ÖjE_©n¹;²Î@Z­¿Û¾ÝÏ[©àZ8 ¯¥™€Ë—/§u]ÿn4-BÖðþ¨PJ¿Ïçÿ™gž™[kà¯õ ½Öªø†™¼BÒX+£«ÿ«ÕÖe`èõð#¼ë]ïêõù|Dù ÿ*¾Ï2€¿SUõË–$Ÿ5þzÝ@×Ú ·‘í÷ëeç“ øv^w-œ†×œî¾ûîßïÿW„ÿ‰ò.¸¤ÛW{ÏRJ¤(Ê?>õÔS™eþ†ÖÄð×sÅ_«ßŒ¬Üëü·‚FÐðøÖ[o•b±ØM„=„mXtŠê„,¥ô ¥ôT±X<þ /Ô×{µ¿ž*äF]ѯ…o½ˆ`3™+þZƒÿZh ëùÞMKkÈ뵚_o°“ ò»Òuz]ÇóÑl +¸ž· `~­ÕùµNÞÙL«þzþZƒ#úðv"€Íø`ç“ öâX)ØßR€·ËFê ¼Ñ»È:µ8£. ¤-ÀIÛ.]Àé:½F¿ézû6šF°aWÝ,›YÅßLa¾kEâ×JXkaS~3ÀZ™¬òóVöõ ó‘ öæXoØ\¤°’×®” –ûÝoæßèí¢ Co bXkÕ­Õÿü»]àZ'½-€ñv’hç“ ü›n§àµÐÞöèÈÆ±ó;&Àúi›¼ó¬ë÷÷Vrú­‡fp-4‚Žt`S}ÏdýÆk ,zÿvG:Ðù6VБŽt¤#éHG:Ò‘Žt¤#éHG:Ò‘Žt¤#éHG:Ò‘Žt¤#éHG:Ò‘Žt¤#éHG:Ò‘Žt¤#i-ÿ?_=0.6.0 domdf-sphinx-theme>=0.3.0 extras-require>=0.5.0 html-section>=0.3.0 pandas>=1.1.2 pytest>=6.2.0 pytest-regressions>=2.0.2 pytz>=2019.1 seed-intersphinx-mapping>=1.2.2 sphinx>=3.0.3 sphinx-autofixture>=0.2.1 sphinx-copybutton>=0.2.12 sphinx-debuginfo>=0.2.2 sphinx-highlights>=0.1.0 sphinx-licenseinfo>=0.3.1 sphinx-notfound-page>=0.7.1 sphinx-pyproject>=0.1.0 sphinx-toolbox>=3.5.0 sphinxcontrib-applehelp==1.0.4 sphinxcontrib-devhelp==1.0.2 sphinxcontrib-htmlhelp==2.0.1 sphinxcontrib-httpdomain>=1.7.0 sphinxcontrib-jsmath==1.0.1 sphinxcontrib-qthelp==1.0.3 sphinxcontrib-serializinghtml==1.1.5 sphinxemoji>=0.1.6 toctree-plus>=0.6.1 domdf_python_tools-3.10.0/domdf_python_tools/000077500000000000000000000000001475315453000214265ustar00rootroot00000000000000domdf_python_tools-3.10.0/domdf_python_tools/__init__.py000066400000000000000000000026651475315453000235500ustar00rootroot00000000000000#!/usr/bin/env python # # __init__.py """ Helpful functions for Python ðŸâ€‚ðŸ› ï¸ . """ # # Copyright © 2018-2020 by Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # __author__: str = "Dominic Davis-Foster" __copyright__: str = "2014-2020 Dominic Davis-Foster" __license__: str = "MIT" __version__: str = "3.10.0" __email__: str = "dominic@davis-foster.co.uk" __docs = False domdf_python_tools-3.10.0/domdf_python_tools/_is_match.py000066400000000000000000000042071475315453000237310ustar00rootroot00000000000000# From https://github.com/dgilland/pydash # Stripped back to the bare minimum. # # MIT License # # Copyright (c) 2020 Derrick Gilland # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib from typing import Iterable, Iterator __all__ = ["is_match_with", "iterator"] def is_match_with(obj, source): # noqa: D103 if ( isinstance(obj, dict) and isinstance(source, dict) or isinstance(obj, list) and isinstance(source, list) or isinstance(obj, tuple) and isinstance(source, tuple) ): # Set equal to True if source is empty, otherwise, False and then allow # deep comparison to determine equality. equal = not source # Walk a/b to determine equality. for key, value in iterator(source): try: equal = is_match_with(obj[key], value) except Exception: # pylint: disable=broad-except equal = False if not equal: break else: equal = obj == source return equal def iterator(obj) -> Iterator: # noqa: D103 if isinstance(obj, dict) or hasattr(obj, "items"): return iter(obj.items()) elif isinstance(obj, Iterable): return enumerate(obj) else: # pragma: no cover return iter(getattr(obj, "__dict__", {}).items()) domdf_python_tools-3.10.0/domdf_python_tools/bases.py000066400000000000000000000424721475315453000231060ustar00rootroot00000000000000# !/usr/bin/env python # # bases.py """ Useful base classes. """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # UserList based on CPython. # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib from abc import abstractmethod from numbers import Real from pprint import pformat from typing import ( Any, Dict, Iterable, Iterator, List, MutableSequence, Optional, SupportsFloat, Tuple, Type, TypeVar, Union, overload ) # this package from domdf_python_tools._is_match import is_match_with from domdf_python_tools.doctools import prettify_docstrings from domdf_python_tools.typing import SupportsIndex __all__ = [ "Dictable", "NamedList", "namedlist", "UserList", "UserFloat", "Lineup", "_V", "_LU", "_T", "_S", "_F", ] _F = TypeVar("_F", bound="UserFloat") _LU = TypeVar("_LU", bound="Lineup") _S = TypeVar("_S", bound="UserList") _T = TypeVar("_T") _V = TypeVar("_V") @prettify_docstrings class Dictable(Iterable[Tuple[str, _V]]): """ The basic structure of a class that can be converted into a dictionary. """ @abstractmethod def __init__(self, *args, **kwargs): pass def __repr__(self) -> str: return super().__repr__() def __str__(self) -> str: return self.__repr__() def __iter__(self) -> Iterator[Tuple[str, _V]]: """ Iterate over the attributes of the class. """ yield from self.__dict__.items() def __getstate__(self) -> Dict[str, _V]: return self.__dict__ def __setstate__(self, state): self.__init__(**state) # type: ignore[misc] def __copy__(self): return self.__class__(**self.__dict__) def __deepcopy__(self, memodict={}): return self.__copy__() @property @abstractmethod def __dict__(self): return dict() # pragma: no cover (abc) def __eq__(self, other) -> bool: if isinstance(other, self.__class__): return is_match_with(other.__dict__, self.__dict__) return NotImplemented @prettify_docstrings class UserList(MutableSequence[_T]): """ Typed version of :class:`collections.UserList`. Class that simulates a list. The instance’s contents are kept in a regular list, which is accessible via the :attr:`~.UserList.data` attribute of :class:`~.UserList` instances. The instance’s contents are initially set to a copy of list, defaulting to the empty list ``[]``. .. versionadded:: 0.10.0 :param initlist: The initial values to populate the :class:`~.UserList` with. :default initlist: ``[]`` .. latex:clearpage:: .. admonition:: Subclassing requirements Subclasses of :class:`~.UserList` are expected to offer a constructor which can be called with either no arguments or one argument. List operations which return a new sequence attempt to create an instance of the actual implementation class. To do so, it assumes that the constructor can be called with a single parameter, which is a sequence object used as a data source. If a derived class does not wish to comply with this requirement, all of the special methods supported by this class will need to be overridden; please consult the sources for information about the methods which need to be provided in that case. """ #: A real list object used to store the contents of the :class:`~domdf_python_tools.bases.UserList`. data: List[_T] def __init__(self, initlist: Optional[Iterable[_T]] = None): self.data = [] if initlist is not None: # XXX should this accept an arbitrary sequence? if type(initlist) is type(self.data): # noqa: E721 self.data[:] = initlist elif isinstance(initlist, UserList): self.data[:] = initlist.data[:] else: self.data = list(initlist) def __repr__(self) -> str: return repr(self.data) def __lt__(self, other: object) -> bool: return self.data < self.__cast(other) def __le__(self, other: object) -> bool: return self.data <= self.__cast(other) def __eq__(self, other: object) -> bool: return self.data == self.__cast(other) def __gt__(self, other: object) -> bool: return self.data > self.__cast(other) def __ge__(self, other: object) -> bool: return self.data >= self.__cast(other) @staticmethod def __cast(other): return other.data if isinstance(other, UserList) else other def __contains__(self, item: object) -> bool: return item in self.data def __len__(self) -> int: return len(self.data) def __iter__(self) -> Iterator[_T]: yield from self.data @overload def __getitem__(self, i: int) -> _T: ... @overload def __getitem__(self, i: slice) -> MutableSequence[_T]: ... def __getitem__(self, i: Union[int, slice]) -> Union[_T, MutableSequence[_T]]: if isinstance(i, slice): return self.__class__(self.data[i]) else: return self.data[i] @overload def __setitem__(self, i: int, o: _T) -> None: ... @overload def __setitem__(self, i: slice, o: Iterable[_T]) -> None: ... def __setitem__(self, i: Union[int, slice], item: Union[_T, Iterable[_T]]) -> None: self.data[i] = item # type: ignore[index, assignment] def __delitem__(self, i: Union[int, slice]): del self.data[i] def __add__(self: _S, other: Iterable[_T]) -> _S: if isinstance(other, UserList): return self.__class__(self.data + other.data) elif isinstance(other, type(self.data)): return self.__class__(self.data + other) return self.__class__(self.data + list(other)) def __radd__(self, other): if isinstance(other, UserList): return self.__class__(other.data + self.data) elif isinstance(other, type(self.data)): return self.__class__(other + self.data) return self.__class__(list(other) + self.data) def __iadd__(self: _S, other: Iterable[_T]) -> _S: if isinstance(other, UserList): self.data += other.data elif isinstance(other, type(self.data)): self.data += other else: self.data += list(other) return self def __mul__(self: _S, n: int) -> _S: return self.__class__(self.data * n) __rmul__ = __mul__ def __imul__(self: _S, n: int) -> _S: self.data *= n return self def __copy__(self): inst = self.__class__.__new__(self.__class__) inst.__dict__.update(self.__dict__) # Create a copy and avoid triggering descriptors inst.__dict__["data"] = self.__dict__["data"][:] return inst def append(self, item: _T) -> None: """ Append ``item`` to the end of the :class:`~.domdf_python_tools.bases.UserList`. """ self.data.append(item) def insert(self, i: int, item: _T) -> None: """ Insert ``item`` at position ``i`` in the :class:`~.domdf_python_tools.bases.UserList`. """ self.data.insert(i, item) def pop(self, i: int = -1) -> _T: """ Removes and returns the item at index ``i``. :raises IndexError: if list is empty or index is out of range. """ return self.data.pop(i) def remove(self, item: _T) -> None: """ Removes the first occurrence of ``item`` from the list. :param item: :rtype: :raises ValueError: if the item is not present. .. latex:clearpage:: """ self.data.remove(item) def clear(self) -> None: """ Remove all items from the :class:`~.domdf_python_tools.bases.UserList`. """ self.data.clear() def copy(self: _S) -> _S: """ Returns a copy of the :class:`~.domdf_python_tools.bases.UserList`. """ return self.__class__(self) def count(self, item: _T) -> int: """ Returns the number of occurrences of ``item`` in the :class:`~.domdf_python_tools.bases.UserList`. """ return self.data.count(item) def index(self, item: _T, *args: Any) -> int: """ Returns the index of the fist element matching ``item``. :param item: :param args: :raises ValueError: if the item is not present. """ return self.data.index(item, *args) def reverse(self) -> None: """ Reverse the list in place. """ self.data.reverse() def sort(self, *, key=None, reverse: bool = False) -> None: """ Sort the list in ascending order and return :py:obj:`None`. The sort is in-place (i.e. the list itself is modified) and stable (i.e. the order of two equal elements is maintained). If a key function is given, apply it once to each list item and sort them, ascending or descending, according to their function values. The reverse flag can be set to sort in descending order. """ self.data.sort(key=key, reverse=reverse) def extend(self, other: Iterable[_T]) -> None: """ Extend the :class:`~.domdf_python_tools.bases.NamedList` by appending elements from ``other``. :param other: """ if isinstance(other, UserList): self.data.extend(other.data) else: self.data.extend(other) @prettify_docstrings class UserFloat(Real): """ Class which simulates a float. .. versionadded:: 1.6.0 :param value: The values to initialise the :class:`~domdf_python_tools.bases.UserFloat` with. """ def __init__(self, value: Union[SupportsFloat, SupportsIndex, str, bytes, bytearray] = 0.0): self._value = (float(value), ) def as_integer_ratio(self) -> Tuple[int, int]: """ Returns the float as a fraction. """ return float(self).as_integer_ratio() def hex(self) -> str: # noqa: A003 # pylint: disable=redefined-builtin """ Returns the hexadecimal (base 16) representation of the float. """ return float(self).hex() def is_integer(self) -> bool: """ Returns whether the float is an integer. """ return float(self).is_integer() @classmethod def fromhex(cls: Type[_F], string: str) -> _F: """ Create a floating-point number from a hexadecimal string. :param string: """ return cls(float.fromhex(string)) def __add__(self: _F, other: float) -> _F: return self.__class__(float(self).__add__(other)) def __sub__(self: _F, other: float) -> _F: return self.__class__(float(self).__sub__(other)) def __mul__(self: _F, other: float) -> _F: return self.__class__(float(self).__mul__(other)) def __floordiv__(self: _F, other: float) -> _F: # type: ignore[override] return self.__class__(float(self).__floordiv__(other)) def __truediv__(self: _F, other: float) -> _F: return self.__class__(float(self).__truediv__(other)) def __mod__(self: _F, other: float) -> _F: return self.__class__(float(self).__mod__(other)) def __divmod__(self: _F, other: float) -> Tuple[_F, _F]: return tuple(self.__class__(x) for x in float(self).__divmod__(other)) # type: ignore[return-value] def __pow__(self: _F, other: float, mod=None) -> _F: return self.__class__(float(self).__pow__(other, mod)) def __radd__(self: _F, other: float) -> _F: return self.__class__(float(self).__radd__(other)) def __rsub__(self: _F, other: float) -> _F: return self.__class__(float(self).__rsub__(other)) def __rmul__(self: _F, other: float) -> _F: return self.__class__(float(self).__rmul__(other)) def __rfloordiv__(self: _F, other: float) -> _F: # type: ignore[override] return self.__class__(float(self).__rfloordiv__(other)) def __rtruediv__(self: _F, other: float) -> _F: return self.__class__(float(self).__rtruediv__(other)) def __rmod__(self: _F, other: float) -> _F: return self.__class__(float(self).__rmod__(other)) def __rdivmod__(self: _F, other: float) -> Tuple[_F, _F]: return tuple(self.__class__(x) for x in float(self).__rdivmod__(other)) # type: ignore def __rpow__(self: _F, other: float, mod=None) -> _F: return self.__class__(float(self).__rpow__(other, mod)) def __getnewargs__(self) -> Tuple[float]: return self._value def __trunc__(self) -> int: """ Truncates the float to an integer. """ return float(self).__trunc__() def __round__(self, ndigits: Optional[int] = None) -> Union[int, float]: # type: ignore """ Round the :class:`~.UserFloat` to ``ndigits`` decimal places, defaulting to ``0``. If ``ndigits`` is omitted or :py:obj:`None`, returns an :class:`int`, otherwise returns a :class:`float`. Rounds half toward even. :param ndigits: """ return float(self).__round__(ndigits) def __eq__(self, other: object) -> bool: if isinstance(other, UserFloat) and not isinstance(other, float): return self._value == other._value else: return float(self).__eq__(other) def __ne__(self, other: object) -> bool: if isinstance(other, UserFloat) and not isinstance(other, float): return self._value != other._value else: return float(self).__ne__(other) def __lt__(self, other: Union[float, "UserFloat"]) -> bool: if isinstance(other, UserFloat) and not isinstance(other, float): return self._value < other._value else: return float(self).__lt__(other) def __le__(self, other: Union[float, "UserFloat"]) -> bool: if isinstance(other, UserFloat) and not isinstance(other, float): return self._value <= other._value else: return float(self).__le__(other) def __gt__(self, other: Union[float, "UserFloat"]) -> bool: if isinstance(other, UserFloat) and not isinstance(other, float): return self._value > other._value else: return float(self).__gt__(other) def __ge__(self, other: Union[float, "UserFloat"]) -> bool: if isinstance(other, UserFloat) and not isinstance(other, float): return self._value >= other._value else: return float(self).__ge__(other) def __neg__(self: _F) -> _F: return self.__class__(float(self).__neg__()) def __pos__(self: _F) -> _F: return self.__class__(float(self).__pos__()) def __str__(self) -> str: return str(float(self)) def __int__(self) -> int: return int(float(self)) def __float__(self) -> float: return self._value[0] def __abs__(self: _F) -> _F: return self.__class__(float(self).__abs__()) def __hash__(self) -> int: return float(self).__hash__() def __repr__(self) -> str: return str(self) def __ceil__(self): raise NotImplementedError def __floor__(self): raise NotImplementedError def __bool__(self) -> bool: """ Return ``self != 0``. """ return super().__bool__() def __complex__(self) -> complex: """ Return :class:`complex(self) `. .. code-block:: python complex(self) == complex(float(self), 0) """ return super().__complex__() @prettify_docstrings class NamedList(UserList[_T]): """ A list with a name. The name of the list is taken from the name of the subclass. .. versionchanged:: 0.10.0 :class:`~.NamedList` now subclasses :class:`.UserList` rather than :class:`collections.UserList`. """ def __repr__(self) -> str: return f"{super().__repr__()}" def __str__(self) -> str: return f"{self.__class__.__name__}{pformat(list(self))}" def namedlist(name: str = "NamedList") -> Type[NamedList]: """ A factory function to return a custom list subclass with a name. :param name: The name of the list. """ class cls(NamedList): pass cls.__name__ = name return cls class Lineup(UserList[_T]): """ List-like type with fluent methods and some star players. .. latex:vspace:: -10px """ def replace(self: _LU, what: _T, with_: _T) -> _LU: r""" Replace the first instance of ``what`` with ``with_``. :param what: The object to find and replace. :param with\_: The new value for the position in the list. """ self[self.index(what)] = with_ return self def sort( # type: ignore self: _LU, *, key=None, reverse: bool = False, ) -> _LU: """ Sort the list in ascending order and return the self. The sort is in-place (i.e. the list itself is modified) and stable (i.e. the order of two equal elements is maintained). If a key function is given, apply it once to each list item and sort them, ascending or descending, according to their function values. The reverse flag can be set to sort in descending order. """ super().sort(key=key, reverse=reverse) return self def reverse(self: _LU) -> _LU: # type: ignore # noqa: D102 super().reverse() return self def append( # type: ignore # noqa: D102 self: _LU, item: _T, ) -> _LU: super().append(item) return self def extend( # type: ignore # noqa: D102 self: _LU, other: Iterable[_T], ) -> _LU: super().extend(other) return self def insert( # type: ignore # noqa: D102 self: _LU, i: int, item: _T, ) -> _LU: super().insert(i, item) return self def remove( # type: ignore # noqa: D102 self: _LU, item: _T, ) -> _LU: super().remove(item) return self def clear(self: _LU) -> _LU: # type: ignore # noqa: D102 super().clear() return self domdf_python_tools-3.10.0/domdf_python_tools/compat/000077500000000000000000000000001475315453000227115ustar00rootroot00000000000000domdf_python_tools-3.10.0/domdf_python_tools/compat/__init__.py000066400000000000000000000130411475315453000250210ustar00rootroot00000000000000# !/usr/bin/env python # # compat.py r""" Cross-version compatibility helpers. .. versionadded :: 0.12.0 ----- Provides the following: .. autovariable:: domdf_python_tools.compat.PYPY :no-value: .. raw:: latex \begin{multicols}{2} .. autovariable:: domdf_python_tools.compat.PYPY36 :no-value: .. autovariable:: domdf_python_tools.compat.PYPY37 :no-value: .. autovariable:: domdf_python_tools.compat.PYPY37_PLUS :no-value: .. autovariable:: domdf_python_tools.compat.PYPY38 :no-value: .. autovariable:: domdf_python_tools.compat.PYPY38_PLUS :no-value: .. autovariable:: domdf_python_tools.compat.PYPY39 :no-value: .. autovariable:: domdf_python_tools.compat.PYPY39_PLUS :no-value: .. raw:: latex \end{multicols} .. py:data:: importlib_resources `importlib_resources `_ on Python 3.6; :mod:`importlib.resources` on Python 3.7 and later. .. py:data:: importlib_metadata `importlib_metadata `_ on Python 3.8 and earlier; :mod:`importlib.metadata` on Python 3.9 and later. .. versionadded:: 1.1.0 .. versionchanged:: 2.5.0 `importlib_metadata `__ is now used on Python 3.8 in place of the stdlib version. """ # # Copyright © 2020-2021 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import platform import sys from typing import TYPE_CHECKING, ContextManager, Optional, TypeVar # this package import domdf_python_tools __all__ = [ "importlib_resources", "importlib_metadata", "nullcontext", "PYPY", "PYPY36", "PYPY37", "PYPY37_PLUS", "PYPY38", "PYPY38_PLUS", "PYPY39", "PYPY39_PLUS", ] if TYPE_CHECKING: # pragma: no cover # stdlib from contextlib import nullcontext elif sys.version_info[:2] < (3, 7) or domdf_python_tools.__docs: # pragma: no cover (py37+) _T = TypeVar("_T") class nullcontext(ContextManager[Optional[_T]]): """ Context manager that does no additional processing. Used as a stand-in for a normal context manager, when a particular block of code is only sometimes used with a normal context manager: .. code-block:: python cm = optional_cm if condition else nullcontext() with cm: # Perform operation, using optional_cm if condition is True .. versionadded:: 2.1.0 In Python 3.7 and above the `version from the standard library`_ is used instead of this one, but the implementations are identical. .. _version from the standard library: https://docs.python.org/3/library/contextlib.html#contextlib.nullcontext :param enter_result: An optional value to return when entering the context. """ # From CPython # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. def __init__(self, enter_result: Optional[_T] = None): self.enter_result: Optional[_T] = enter_result def __enter__(self) -> Optional[_T]: return self.enter_result def __exit__(self, *excinfo): pass else: # pragma: no cover (= (3, 10): def packages_distributions() -> Mapping[str, List[str]]: ... class PackageNotFoundError(ModuleNotFoundError): ... class _EntryPointBase(NamedTuple): name: str value: str group: str class EntryPoint(_EntryPointBase): def load(self) -> Any: ... # Callable[[], Any] or an importable module @property def extras(self) -> List[str]: ... class PackagePath(pathlib.PurePosixPath): def read_text(self, encoding: str = ...) -> str: ... def read_binary(self) -> bytes: ... def locate(self) -> PathLike[str]: ... # The following attributes are not defined on PackagePath, but are dynamically added by Distribution.files: hash: Optional[FileHash] # noqa: A003 # pylint: disable=redefined-builtin size: Optional[int] dist: Distribution class FileHash: mode: str value: str def __init__(self, spec: str) -> None: ... class Distribution: @abc.abstractmethod def read_text(self, filename: str) -> Optional[str]: ... @abc.abstractmethod def locate_file(self, path: StrPath) -> PathLike[str]: ... @classmethod def from_name(cls, name: str) -> Distribution: ... @overload @classmethod def discover(cls, *, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload @classmethod def discover( cls, *, context: None = ..., name: Optional[str] = ..., path: List[str] = ..., **kwargs: Any, ) -> Iterable[Distribution]: ... @staticmethod def at(path: StrPath) -> PathDistribution: ... @property def metadata(self) -> Message: ... @property def version(self) -> str: ... @property def entry_points(self) -> List[EntryPoint]: ... @property def files(self) -> Optional[List[PackagePath]]: ... @property def requires(self) -> Optional[List[str]]: ... class DistributionFinder(MetaPathFinder): class Context: name: Optional[str] def __init__(self, *, name: Optional[str] = ..., path: List[str] = ..., **kwargs: Any) -> None: ... @property def path(self) -> List[str]: ... @abc.abstractmethod def find_distributions(self, context: DistributionFinder.Context = ...) -> Iterable[Distribution]: ... class MetadataPathFinder(DistributionFinder): @classmethod def find_distributions(cls, context: DistributionFinder.Context = ...) -> Iterable[PathDistribution]: ... class PathDistribution(Distribution): def __init__(self, path: Path) -> None: ... def read_text(self, filename: StrPath) -> str: ... def locate_file(self, path: StrPath) -> PathLike[str]: ... def distribution(distribution_name: str) -> Distribution: ... @overload def distributions(*, context: DistributionFinder.Context) -> Iterable[Distribution]: ... @overload def distributions( *, context: None = ..., name: Optional[str] = ..., path: List[str] = ..., **kwargs: Any, ) -> Iterable[Distribution]: ... def metadata(distribution_name: str) -> Message: ... def version(distribution_name: str) -> str: ... def entry_points() -> Dict[str, Tuple[EntryPoint, ...]]: ... def files(distribution_name: str) -> Optional[List[PackagePath]]: ... def requires(distribution_name: str) -> Optional[List[str]]: ... domdf_python_tools-3.10.0/domdf_python_tools/compat/importlib_resources.py000066400000000000000000000034001475315453000273530ustar00rootroot00000000000000# noqa: D100,DALL000 # stdlib import os import sys from typing import Any, BinaryIO, TextIO if sys.version_info[:2] < (3, 9): # pragma: no cover (py39+) # 3rd party import importlib_resources globals().update(importlib_resources.__dict__) else: # pragma: no cover ( str: """ Normalize a path by ensuring it is a string. If the resulting string contains path separators, an exception is raised. """ parent, file_name = os.path.split(str(path)) if parent: raise ValueError(f'{path!r} must be only a file name') return file_name def open_binary(package: "Package", resource: "Resource") -> BinaryIO: """ Return a file-like object opened for binary reading of the resource. """ return (files(package) / _normalize_path(resource)).open("rb") def read_binary(package: "Package", resource: "Resource") -> bytes: """ Return the binary contents of the resource. """ return (files(package) / _normalize_path(resource)).read_bytes() def open_text( package: "Package", resource: "Resource", encoding: str = "utf-8", errors: str = "strict", ) -> TextIO: """ Return a file-like object opened for text reading of the resource. """ return (files(package) / _normalize_path(resource)).open( 'r', encoding=encoding, errors=errors, ) def read_text( package: "Package", resource: "Resource", encoding: str = "utf-8", errors: str = "strict", ) -> str: """ Return the decoded string of the resource. """ with open_text(package, resource, encoding, errors) as fp: return fp.read() domdf_python_tools-3.10.0/domdf_python_tools/compat/importlib_resources.pyi000066400000000000000000000020721475315453000275300ustar00rootroot00000000000000# From https://github.com/python/typeshed # Apache-2.0 Licensed # stdlib import os import sys from pathlib import Path from types import ModuleType from typing import Any, BinaryIO, ContextManager, Iterator, TextIO, Union Package = Union[str, ModuleType] Resource = Union[str, os.PathLike[Any]] def open_binary(package: Package, resource: Resource) -> BinaryIO: ... def open_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> TextIO: ... def read_binary(package: Package, resource: Resource) -> bytes: ... def read_text(package: Package, resource: Resource, encoding: str = ..., errors: str = ...) -> str: ... def path(package: Package, resource: Resource) -> ContextManager[Path]: ... def is_resource(package: Package, name: str) -> bool: ... def contents(package: Package) -> Iterator[str]: ... if sys.version_info >= (3, 9): # stdlib from contextlib import AbstractContextManager from importlib.abc import Traversable def files(package: Package) -> Traversable: ... def as_file(path: Traversable) -> AbstractContextManager[Path]: ... domdf_python_tools-3.10.0/domdf_python_tools/dates.py000066400000000000000000000251521475315453000231050ustar00rootroot00000000000000# !/usr/bin/env python # # dates.py """ Utilities for working with dates and times. .. extras-require:: dates :pyproject: **Data:** .. autosummary:: ~domdf_python_tools.dates.months ~domdf_python_tools.dates.month_full_names ~domdf_python_tools.dates.month_short_names """ # # Copyright © 2020 Dominic Davis-Foster # # Parts of the docstrings based on the Python 3.8.2 Documentation # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # calc_easter from https://code.activestate.com/recipes/576517-calculate-easter-western-given-a-year/ # Copyright © 2008 Martin Diers # Licensed under the MIT License # # stdlib import datetime import sys import time import typing from collections import OrderedDict from types import ModuleType from typing import Optional, Union __all__ = [ "current_tzinfo", "set_timezone", "utc_timestamp_to_datetime", "months", "parse_month", "get_month_number", "check_date", "calc_easter", "month_short_names", "month_full_names", "is_bst", ] def current_tzinfo() -> Optional[datetime.tzinfo]: """ Returns a tzinfo object for the current timezone. """ return datetime.datetime.now().astimezone().tzinfo # pragma: no cover (hard to test) # # def datetime_to_utc_timestamp(datetime, current_tzinfo=None): # """ # Convert a :class:`datetime.datetime` object to seconds since UNIX epoch, in UTC time # # :param datetime: # :type datetime: :class:`datetime.datetime` # :param current_tzinfo: A tzinfo object representing the current timezone. # If None it will be inferred. # :type current_tzinfo: :class:`datetime.tzinfo` # # :return: Timestamp in UTC timezone # :rtype: float # """ # # return datetime.astimezone(current_tzinfo).timestamp() # def set_timezone(obj: datetime.datetime, tzinfo: datetime.tzinfo) -> datetime.datetime: """ Sets the timezone / tzinfo of the given :class:`datetime.datetime` object. This will not convert the time (i.e. the hours will stay the same). Use :meth:`datetime.datetime.astimezone` to accomplish that. :param obj: :param tzinfo: """ return obj.replace(tzinfo=tzinfo) def utc_timestamp_to_datetime( utc_timestamp: Union[float, int], output_tz: Optional[datetime.tzinfo] = None, ) -> datetime.datetime: """ Convert UTC timestamp (seconds from UNIX epoch) to a :class:`datetime.datetime` object. If ``output_tz`` is :py:obj:`None` the timestamp is converted to the platform’s local date and time, and the local timezone is inferred and set for the object. If ``output_tz`` is not :py:obj:`None`, it must be an instance of a :class:`datetime.tzinfo` subclass, and the timestamp is converted to ``output_tz``’s time zone. :param utc_timestamp: The timestamp to convert to a datetime object :param output_tz: The timezone to output the datetime object for. If :py:obj:`None` it will be inferred. :return: The timestamp as a datetime object. :raises OverflowError: if the timestamp is out of the range of values supported by the platform C localtime() or gmtime() functions, and OSError on localtime() or gmtime() failure. It’s common for this to be restricted to years in 1970 through 2038. """ new_datetime = datetime.datetime.fromtimestamp(utc_timestamp, output_tz) return new_datetime.astimezone(output_tz) if sys.version_info <= (3, 7, 2): # pragma: no cover (py37+) MonthsType = OrderedDict else: # pragma: no cover ( str: """ Converts an integer or shorthand month into the full month name. :param month: The month number or shorthand name :return: The full name of the month """ error_text = f"The given month ({month!r}) is not recognised." try: month = int(month) except ValueError: try: return months[month.capitalize()[:3]] # type: ignore except KeyError: raise ValueError(error_text) # Only get here if first try succeeded if 0 < month <= 12: return list(months.values())[month - 1] else: raise ValueError(error_text) def get_month_number(month: Union[str, int]) -> int: """ Returns the number of the given month. If ``month`` is already a number between 1 and 12 it will be returned immediately. :param month: The month to convert to a number :return: The number of the month """ if isinstance(month, int): if 0 < month <= 12: return month else: raise ValueError(f"The given month ({month!r}) is not recognised.") else: month = parse_month(month) return list(months.values()).index(month) + 1 def check_date(month: Union[str, int], day: int, leap_year: bool = True) -> bool: """ Returns :py:obj:`True` if the day number is valid for the given month. .. note:: This function will return :py:obj:`True` for the 29th Feb. If you don't want this behaviour set ``leap_year`` to :py:obj:`False`. .. latex:vspace:: -10px :param month: The month to test. :param day: The day number to test. :param leap_year: Whether to return :py:obj:`True` for 29th Feb. """ # Ensure day is an integer day = int(day) month = get_month_number(month) year = 2020 if leap_year else 2019 try: datetime.date(year, month, day) return True except ValueError: return False def calc_easter(year: int) -> datetime.date: """ Returns the date of Easter in the given year. .. versionadded:: 1.4.0 :param year: """ a = year % 19 b = year // 100 c = year % 100 d = (19 * a + b - b // 4 - ((b - (b + 8) // 25 + 1) // 3) + 15) % 30 e = (32 + 2 * (b % 4) + 2 * (c // 4) - d - (c % 4)) % 7 f = d + e - 7 * ((a + 11 * d + 22 * e) // 451) + 114 month = f // 31 day = f % 31 + 1 return datetime.date(year, month, day) def get_utc_offset( tz: Union[datetime.tzinfo, str], date: Optional[datetime.datetime] = None, ) -> Optional[datetime.timedelta]: """ Returns the offset between UTC and the requested timezone on the given date. If ``date`` is :py:obj:`None` then the current date is used. :param tz: ``pytz.timezone`` or a string representing the timezone :param date: The date to obtain the UTC offset for """ if date is None: date = datetime.datetime.now(pytz.utc) timezone: Optional[datetime.tzinfo] if isinstance(tz, str): timezone = get_timezone(tz, date) else: timezone = tz # pragma: no cover (hard to test) return date.replace(tzinfo=pytz.utc).astimezone(timezone).utcoffset() def get_timezone(tz: str, date: Optional[datetime.datetime] = None) -> Optional[datetime.tzinfo]: """ Returns a localized ``pytz.timezone`` object for the given date. If ``date`` is :py:obj:`None` then the current date is used. .. latex:vspace:: -10px :param tz: A string representing a pytz timezone :param date: The date to obtain the timezone for """ if date is None: # pragma: no cover (hard to test) date = datetime.datetime.now(pytz.utc) d = date.replace(tzinfo=None) return pytz.timezone(tz).localize(d).tzinfo def is_bst(the_date: Union[time.struct_time, datetime.date]) -> bool: """ Calculates whether the given day falls within British Summer Time. This function should also be applicable to other timezones which change to summer time on the same date (e.g. Central European Summer Time). .. note:: This function does not consider the time of day, and therefore does not handle the fact that the time changes at 1 AM GMT. It also does not account for historic deviations from the current norm. .. versionadded:: 3.5.0 :param the_date: A :class:`time.struct_time`, :class:`datetime.date` or :class:`datetime.datetime` representing the target date. :returns: :py:obj:`True` if the date falls within British Summer Time, :py:obj:`False` otherwise. """ if isinstance(the_date, datetime.date): the_date = the_date.timetuple() day, month, dow = the_date.tm_mday, the_date.tm_mon, (the_date.tm_wday + 1) % 7 if 3 > month > 10: return False elif 3 < month < 10: return True elif month == 3: return day - dow >= 25 elif month == 10: return day - dow < 25 else: return False _pytz_functions = ["get_utc_offset", "get_timezone"] try: # 3rd party import pytz __all__.extend(_pytz_functions) except ImportError as e: if __name__ == "__main__": # stdlib import warnings # this package from domdf_python_tools.words import word_join warnings.warn( f"""\ '{word_join(_pytz_functions)}' require pytz (https://pypi.org/project/pytz/), but it could not be imported. The error was: {e}. """ ) else: _actual_module = sys.modules[__name__] class SelfWrapper(ModuleType): def __getattr__(self, name): if name in _pytz_functions: raise ImportError( f"{name!r} requires pytz (https://pypi.org/project/pytz/), but it could not be imported." ) else: return getattr(_actual_module, name) sys.modules[__name__] = SelfWrapper(__name__) domdf_python_tools-3.10.0/domdf_python_tools/delegators.py000066400000000000000000000105201475315453000241270ustar00rootroot00000000000000#!/usr/bin/env python # cython: language_level=3 # # delegators.py """ Decorators for functions that delegate parts of their functionality to other functions. .. versionadded:: 0.10.0 """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # delegate_kwargs based on https://github.com/fastai/fastcore # | Licensed under the Apache License, Version 2.0 (the "License"); you may # | not use this file except in compliance with the License. You may obtain # | a copy of the License at # | # | http://www.apache.org/licenses/LICENSE-2.0 # | # | Unless required by applicable law or agreed to in writing, software # | distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # | WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # | License for the specific language governing permissions and limitations # | under the License. # # stdlib import inspect from typing import Callable, TypeVar, get_type_hints __all__ = ["delegate_kwargs", "delegates"] _C = TypeVar("_C", bound="Callable") def delegate_kwargs(to: Callable, *except_: str) -> Callable[[_C], _C]: r""" Decorator to replace ``**kwargs`` in function signatures with the parameter names from the delegated function. :param to: The function \*\*kwargs is passed on to. :param \*except\_: Parameter names not to delegate. :raises ValueError: if a non-default argument follows a default argument. """ # noqa: D400 # TODO: return annotation def _f(f: _C) -> _C: to_f, from_f = to, f to_sig = inspect.signature(to_f) from_sig = inspect.signature(from_f) to_annotations = get_type_hints(to_f) from_annotations = get_type_hints(from_f) to_params = {k: v for k, v in to_sig.parameters.items() if k not in except_} from_params = dict(from_sig.parameters) if from_params.pop("kwargs", False): if "kwargs" in from_annotations: del from_annotations["kwargs"] for param in from_params: if param in to_params: del to_params[param] f.__signature__ = from_sig.replace( # type: ignore parameters=[*from_params.values(), *to_params.values()] ) f.__annotations__ = {**to_annotations, **from_annotations} return f return _f def delegates(to: Callable) -> Callable[[_C], _C]: r""" Decorator to replace ``*args, **kwargs`` function signatures with the signature of the delegated function. :param to: The function the arguments are passed on to. """ # noqa: D400 def copy_annotations(f): if hasattr(to, "__annotations__"): if hasattr(f, "__annotations__"): return_annotation = f.__annotations__.get("return", inspect.Parameter.empty) f.__annotations__.update(to.__annotations__) if return_annotation is not inspect.Parameter.empty: f.__annotations__["return"] = return_annotation else: f.__annotations__ = to.__annotations__ def _f(f: _C) -> _C: to_sig = inspect.signature(to) from_sig = inspect.signature(f) from_params = dict(from_sig.parameters) if tuple(from_params.keys()) == ("args", "kwargs"): f.__signature__ = to_sig # type: ignore copy_annotations(f) elif tuple(from_params.keys()) == ("self", "args", "kwargs"): f.__signature__ = from_sig.replace( # type: ignore parameters=[from_params["self"], *to_sig.parameters.values()] ) copy_annotations(f) return f return _f domdf_python_tools-3.10.0/domdf_python_tools/doctools.py000066400000000000000000000300551475315453000236310ustar00rootroot00000000000000# !/usr/bin/env python # # doctools.py """ Utilities for documenting functions, classes and methods. .. autosummary-widths:: 5/16 .. automodulesumm:: domdf_python_tools.doctools :autosummary-sections: Data .. autosummary-widths:: 17/32 .. automodulesumm:: domdf_python_tools.doctools :autosummary-sections: Functions """ # # Copyright © 2020 Dominic Davis-Foster # Based on https://softwareengineering.stackexchange.com/a/386758 # Copyright © amon (https://softwareengineering.stackexchange.com/users/60357/amon) # Licensed under CC BY-SA 4.0 # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import builtins from contextlib import suppress from inspect import cleandoc from types import MethodType from typing import Any, Callable, Dict, Optional, Sequence, Type, TypeVar, Union # this package from domdf_python_tools.compat import PYPY, PYPY37 from domdf_python_tools.typing import MethodDescriptorType, MethodWrapperType, WrapperDescriptorType __all__ = [ "_F", "_T", "deindent_string", "document_object_from_another", "append_doctring_from_another", "make_sphinx_links", "is_documented_by", "append_docstring_from", "sphinxify_docstring", "prettify_docstrings", ] _F = TypeVar("_F", bound=Callable[..., Any]) _T = TypeVar("_T", bound=Type) def deindent_string(string: Optional[str]) -> str: """ Removes all indentation from the given string. :param string: The string to deindent :return: The string without indentation """ if not string: # Short circuit if empty string or None return '' split_string = string.split('\n') deindented_string = [line.lstrip("\t ") for line in split_string] return '\n'.join(deindented_string) # Functions that do the work def document_object_from_another(target: Union[Type, Callable], original: Union[Type, Callable]): """ Sets the docstring of the ``target`` function to that of the ``original`` function. This may be useful for subclasses or wrappers that use the same arguments. :param target: The object to set the docstring for :param original: The object to copy the docstring from """ target.__doc__ = original.__doc__ def append_doctring_from_another(target: Union[Type, Callable], original: Union[Type, Callable]): """ Sets the docstring of the ``target`` function to that of the ``original`` function. This may be useful for subclasses or wrappers that use the same arguments. Any indentation in either docstring is removed to ensure consistent indentation between the two docstrings. Bear this in mind if additional indentation is used in the docstring. :param target: The object to append the docstring to :param original: The object to copy the docstring from """ # this package from domdf_python_tools.stringlist import StringList target_doc = target.__doc__ original_doc = original.__doc__ if isinstance(original_doc, str) and isinstance(target_doc, str): docstring = StringList(cleandoc(target_doc)) docstring.blankline(ensure_single=True) docstring.append(cleandoc(original_doc)) docstring.blankline(ensure_single=True) target.__doc__ = str(docstring) elif not isinstance(target_doc, str) and isinstance(original_doc, str): docstring = StringList(cleandoc(original_doc)) docstring.blankline(ensure_single=True) target.__doc__ = str(docstring) def make_sphinx_links(input_string: str, builtins_list: Optional[Sequence[str]] = None) -> str: r""" Make proper sphinx links out of double-backticked strings in docstring. i.e. :inline-code:`\`\`str\`\`` becomes :inline-code:`:class:\`str\`` Make sure to include the following in your ``conf.py`` file for Sphinx: .. code-block:: python intersphinx_mapping = {"python": ("https://docs.python.org/3/", None)} :param input_string: The string to process. :param builtins_list: A list of builtins to make links for. :default builtins_list: dir(:py:obj:`builtins`) :return: Processed string with links. """ if builtins_list is None: builtins_list = dir(builtins) working_string = f"{input_string}" for builtin in builtins_list: if builtin.startswith("__"): continue if builtin in {"None", "False", "None"}: working_string = working_string.replace(f"``{builtin}``", f":py:obj:`{builtin}`") else: working_string = working_string.replace(f"``{builtin}``", f":class:`{builtin}`") return working_string # Decorators that call the above functions def is_documented_by(original: Callable) -> Callable[[_F], _F]: """ Decorator to set the docstring of the ``target`` function to that of the ``original`` function. This may be useful for subclasses or wrappers that use the same arguments. :param original: """ def wrapper(target: _F) -> _F: document_object_from_another(target, original) return target return wrapper def append_docstring_from(original: Callable) -> Callable[[_F], _F]: """ Decorator to appends the docstring from the ``original`` function to the ``target`` function. This may be useful for subclasses or wrappers that use the same arguments. Any indentation in either docstring is removed to ensure consistent indentation between the two docstrings. Bear this in mind if additional indentation is used in the docstring. :param original: """ def wrapper(target: _F) -> _F: append_doctring_from_another(target, original) return target return wrapper def sphinxify_docstring() -> Callable[[_F], _F]: r""" Decorator to make proper sphinx links out of double-backticked strings in the docstring. i.e. :inline-code:`\`\`str\`\`` becomes :inline-code:`:class:\`str\`` Make sure to include the following in your ``conf.py`` file for Sphinx: .. code-block:: python intersphinx_mapping = { "python": ("https://docs.python.org/3/", None), } """ def wrapper(target: _F) -> _F: target_doc = target.__doc__ if target_doc: target.__doc__ = make_sphinx_links(target_doc) return target return wrapper # Check against object base_new_docstrings = { "__delattr__": "Implement :func:`delattr(self, name) `.", "__dir__": "Default :func:`dir` implementation.", "__eq__": "Return ``self == other``.", # __format__ "__getattribute__": "Return :func:`getattr(self, name) `.", "__ge__": "Return ``self >= other``.", "__gt__": "Return ``self > other``.", "__hash__": "Return :func:`hash(self) `.", # __init_subclass__ # __init__ # not usually shown in sphinx "__lt__": "Return ``self < other``.", "__le__": "Return ``self <= other``.", # __new__ "__ne__": "Return ``self != other``.", # __reduce_ex__ # __reduce__ # __repr__ is defined within the function "__setattr__": "Implement :func:`setattr(self, name) `.", "__sizeof__": "Returns the size of the object in memory, in bytes.", "__str__": "Return :class:`str(self) `.", # __subclasshook__ } # Check against dict container_docstrings = { "__contains__": "Return ``key in self``.", "__getitem__": "Return ``self[key]``.", "__setitem__": "Set ``self[key]`` to ``value``.", "__delitem__": "Delete ``self[key]``.", } # Check against int operator_docstrings = { "__and__": "Return ``self & value``.", "__add__": "Return ``self + value``.", "__abs__": "Return :func:`abs(self) `.", "__divmod__": "Return :func:`divmod(self, value) `.", "__floordiv__": "Return ``self // value``.", "__invert__": "Return ``~ self``.", "__lshift__": "Return ``self << value``.", "__mod__": "Return ``self % value``.", "__mul__": "Return ``self * value``.", "__neg__": "Return ``- self``.", "__or__": "Return ``self | value``.", "__pos__": "Return ``+ self``.", "__pow__": "Return :func:`pow(self, value, mod) `.", "__radd__": "Return ``value + self``.", "__rand__": "Return ``value & self``.", "__rdivmod__": "Return :func:`divmod(value, self) `.", "__rfloordiv__": "Return ``value // self``.", "__rlshift__": "Return ``value << self``.", "__rmod__": "Return ``value % self``.", "__rmul__": "Return ``value * self``.", "__ror__": "Return ``value | self``.", "__rpow__": "Return :func:`pow(value, self, mod) `.", "__rrshift__": "Return ``self >> value``.", "__rshift__": "Return ``self >> value``.", "__rsub__": "Return ``value - self``.", "__rtruediv__": "Return ``value / self``.", "__rxor__": "Return ``value ^ self``.", "__sub__": "Return ``value - self``.", "__truediv__": "Return ``self / value``.", "__xor__": "Return ``self ^ value``.", } # Check against int base_int_docstrings = { # "__bool__": "Return ``self != 0``.", # TODO # __ceil__ "__float__": "Return :class:`float(self) `.", # __floor__ "__int__": "Return :class:`int(self) `.", # __round__ } new_return_types = { "__eq__": bool, "__ge__": bool, "__gt__": bool, "__lt__": bool, "__le__": bool, "__ne__": bool, "__repr__": str, "__str__": str, "__int__": int, "__float__": float, "__bool__": bool, } def _do_prettify(obj: Type, base: Type, new_docstrings: Dict[str, str]): """ Perform the actual prettifying for :func`~.prettify_docstrings`. .. versionadded:: 0.8.0 (private) :param obj: :param base: :param new_docstrings: """ for attr_name in new_docstrings: if not hasattr(obj, attr_name): continue attribute = getattr(obj, attr_name) if not PYPY and isinstance( attribute, (WrapperDescriptorType, MethodDescriptorType, MethodWrapperType, MethodType), ): continue # pragma: no cover (!PyPy) elif PYPY and isinstance(attribute, MethodType): continue # pragma: no cover elif PYPY37: # pragma: no cover (not (PyPy and py37)) if attribute is getattr(object, attr_name, None): continue elif attribute is getattr(float, attr_name, None): continue elif attribute is getattr(str, attr_name, None): continue if attribute is None: continue base_docstring: Optional[str] = None if hasattr(base, attr_name): base_docstring = getattr(base, attr_name).__doc__ doc: Optional[str] = attribute.__doc__ if doc in {None, base_docstring}: with suppress(AttributeError, TypeError): attribute.__doc__ = new_docstrings[attr_name] def prettify_docstrings(obj: _T) -> _T: """ Decorator to prettify the default :class:`object` docstrings for use in Sphinx documentation. .. versionadded:: 0.8.0 :param obj: The object to prettify the method docstrings for. """ repr_docstring = f"Return a string representation of the :class:`~{obj.__module__}.{obj.__name__}`." new_docstrings = {**base_new_docstrings, "__repr__": repr_docstring} _do_prettify(obj, object, new_docstrings) _do_prettify(obj, dict, container_docstrings) _do_prettify(obj, int, operator_docstrings) _do_prettify(obj, int, base_int_docstrings) for attribute in new_return_types: if hasattr(obj, attribute): annotations: Dict = getattr(getattr(obj, attribute), "__annotations__", {}) if "return" not in annotations or annotations["return"] is Any: annotations["return"] = new_return_types[attribute] with suppress(AttributeError, TypeError): getattr(obj, attribute).__annotations__ = annotations if issubclass(obj, tuple) and obj.__repr__.__doc__ == "Return a nicely formatted representation string": obj.__repr__.__doc__ = repr_docstring return obj domdf_python_tools-3.10.0/domdf_python_tools/getters.py000066400000000000000000000166231475315453000234650ustar00rootroot00000000000000#!/usr/bin/env python # # getters.py """ Variants of :func:`operator.attrgetter`, :func:`operator.itemgetter` and :func:`operator.methodcaller` which operate on values within sequences. .. versionadded:: 3.2.0 """ # noqa: D400 # # Copyright © 2021 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # Adapted from https://github.com/python/cpython/blob/master/Lib/operator.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib from functools import partial from typing import TYPE_CHECKING, Any, Dict, Tuple __all__ = ["attrgetter", "itemgetter", "methodcaller"] class attrgetter: """ Returns a callable object that fetches ``attr`` from the item at index ``idx`` in its operand. The attribute name can contain dots. For example: * After ``f = attrgetter(0, 'name')``, the call call ``f(b)`` returns ``b[0].name``. * After ``f = attrgetter(3, 'name.first')``, the call ``f(b)`` returns ``b[3].name.first``. .. code-block:: python >>> from pathlib import Path >>> attrgetter(0, 'name')([Path("dir/code.py")]) 'code.py' >>> attrgetter(2, 'parent.name')([Path("dir/coincidence.py"), Path("dir/wheel.py"), Path("dir/operator.py")]) 'dir' .. seealso:: :func:`operator.attrgetter` and :func:`operator.itemgetter` :param idx: The index of the item to obtain the attribute from. :param attr: The name of the attribute. """ __slots__ = ("_attrs", "_call") def __init__(self, idx: int, attr: str): if not isinstance(idx, int): raise TypeError("'idx' must be an integer") if not isinstance(attr, str): raise TypeError("attribute name must be a string") self._attrs: Dict[str, Any] = {"idx": idx, "attr": attr} def __call__(self, obj: Any) -> Any: # noqa: D102 names = self._attrs["attr"].split('.') obj = obj[self._attrs["idx"]] for name in names: obj = getattr(obj, name) return obj def __repr__(self) -> str: data = {**self._attrs, "module": self.__class__.__module__, "qualname": self.__class__.__qualname__} return "{module}.{qualname}(idx={idx}, attr={attr!r})".format_map(data) def __reduce__(self): return self.__class__, tuple(self._attrs.values()) class itemgetter: """ Returns a callable object that fetches ``item`` from the item at index ``idx`` in its operand, using the ``__getitem__()`` method. For example: * After ``f = itemgetter(0, 2)``, the call call ``f(r)`` returns ``r[0][2]``. * After ``g = itemgetter(3, 5)``, the call ``g(r)`` returns ``r[3][5]``. The items can be any type accepted by the item's ``__getitem__()`` method. Dictionaries accept any hashable value. Lists, tuples, and strings accept an index or a slice: .. code-block:: python >>> itemgetter(0, 1)(['ABCDEFG']) 'B' >>> itemgetter(1, 2)(['ABC', 'DEF']) 'F' >>> itemgetter(0, slice(2, None))(['ABCDEFG']) 'CDEFG' >>> army = [dict(rank='captain', name='Blackadder'), dict(rank='Private', name='Baldrick')] >>> itemgetter(0, 'rank')(army) 'captain' .. seealso:: :func:`operator.itemgetter` :param idx: The index of the item to call ``__getitem__()`` on. :param item: The value to pass to ``__getitem__()``. """ # noqa: D400 __slots__ = ("_items", "_call") def __init__(self, idx: int, item: Any): if not isinstance(idx, int): raise TypeError("'idx' must be an integer") self._items: Dict[str, Any] = {"idx": idx, "item": item} def __call__(self, obj: Any) -> Any: # noqa: D102 return obj[self._items["idx"]][self._items["item"]] def __repr__(self) -> str: data = {**self._items, "module": self.__class__.__module__, "qualname": self.__class__.__qualname__} return "{module}.{qualname}(idx={idx}, item={item!r})".format_map(data) def __reduce__(self): return self.__class__, tuple(self._items.values()) class methodcaller: r""" Returns a callable object that calls the method name on the item at index ``idx`` in its operand. If additional arguments and/or keyword arguments are given, they will be passed to the method as well. For example: * After ``f = methodcaller(0, 'name')``, the call ``f(b)`` returns ``b[0].name()``. * After ``f = methodcaller(1, 'name', 'foo', bar=1)``, the call ``f(b)`` returns ``b[1].name('foo', bar=1)``. .. code-block:: python >>> from datetime import date >>> methodcaller(0, 'upper')(["hello", "world"]) 'HELLO' >>> methodcaller(1, 'center', 9, "=")(["hello", "world"]) '==world==' >>> methodcaller(0, 'replace', year=2019)([date(2021, 7, 6)]) datetime.date(2019, 7, 6) .. seealso:: :func:`operator.methodcaller` and :func:`operator.itemgetter` :param \_idx: The index of the item to call the method on. :param \_attr: The name of the method to call. :param \*args: Positional arguments to pass to the method. :param \*\*kwargs: Keyword arguments to pass to the method. """ __slots__ = ("_idx", "_name", "_args", "_kwargs") _idx: int _name: str _args: Tuple[Any, ...] _kwargs: Dict[str, Any] if TYPE_CHECKING: def __init__(__self, __idx: int, __name: str, *args, **kwargs): if not isinstance(__idx, int): raise TypeError("'idx' must be an integer") if not isinstance(__name, str): raise TypeError("method name must be a string") __self._idx = __idx __self._name = __name __self._args = args __self._kwargs = kwargs else: def __init__(_self, _idx: int, _name: str, *args, **kwargs): if not isinstance(_idx, int): raise TypeError("'_idx' must be an integer") if not isinstance(_name, str): raise TypeError("method name must be a string") _self._idx = _idx _self._name = _name _self._args = args _self._kwargs = kwargs def __call__(self, obj: Any) -> Any: # noqa: D102 return getattr(obj[self._idx], self._name)(*self._args, **self._kwargs) def __repr__(self) -> str: args = [repr(self._idx), repr(self._name)] args.extend(map(repr, self._args)) args.extend(f'{k}={v!r}' for k, v in self._kwargs.items()) return f'{self.__class__.__module__}.{self.__class__.__name__}({", ".join(args)})' def __reduce__(self): if not self._kwargs: return self.__class__, (self._idx, self._name) + self._args else: return partial(self.__class__, self._idx, self._name, **self._kwargs), self._args domdf_python_tools-3.10.0/domdf_python_tools/google-10000-english-no-swears.txt000066400000000000000000002226211475315453000274510ustar00rootroot00000000000000the of and to a in for is on that by this with i you it not or be are from at as your all have new more an was we will home can us about if page my has search free but our one other do no information time they site he up may what which their news out use any there see only so his when contact here business who web also now help get pm view online c e first am been would how were me s services some these click its like service x than find price date back top people had list name just over state year day into email two health n world re next used go b work last most products music buy data make them should product system post her city t add policy number such please available copyright support message after best software then jan good video well d where info rights public books high school through m each links she review years order very privacy book items company r read group need many user said de does set under general research university january mail full map reviews program life know games way days management p part could great united hotel real f item international center ebay must store travel comments made development report off member details line terms before hotels did send right type because local those using results office education national car design take posted internet address community within states area want phone dvd shipping reserved subject between forum family l long based w code show o even black check special prices website index being women much sign file link open today technology south case project same pages uk version section own found sports house related security both g county american photo game members power while care network down computer systems three total place end following download h him without per access think north resources current posts big media law control water history pictures size art personal since including guide shop directory board location change white text small rating rate government children during usa return students v shopping account times sites level digital profile previous form events love old john main call hours image department title description non k y insurance another why shall property class cd still money quality every listing content country private little visit save tools low reply customer december compare movies include college value article york man card jobs provide j food source author different press u learn sale around print course job canada process teen room stock training too credit point join science men categories advanced west sales look english left team estate box conditions select windows photos gay thread week category note live large gallery table register however june october november market library really action start series model features air industry plan human provided tv yes required second hot accessories cost movie forums march la september better say questions july yahoo going medical test friend come dec server pc study application cart staff articles san feedback again play looking issues april never users complete street topic comment financial things working against standard tax person below mobile less got blog party payment equipment login student let programs offers legal above recent park stores side act problem red give memory performance social q august quote language story sell options experience rates create key body young america important field few east paper single ii age activities club example girls additional password z latest something road gift question changes night ca hard texas oct pay four poker status browse issue range building seller court february always result audio light write war nov offer blue groups al easy given files event release analysis request fax china making picture needs possible might professional yet month major star areas future space committee hand sun cards problems london washington meeting rss become interest id child keep enter california share similar garden schools million added reference companies listed baby learning energy run delivery net popular term film stories put computers journal reports co try welcome central images president notice original head radio until cell color self council away includes track australia discussion archive once others entertainment agreement format least society months log safety friends sure faq trade edition cars messages marketing tell further updated association able having provides david fun already green studies close common drive specific several gold feb living sep collection called short arts lot ask display limited powered solutions means director daily beach past natural whether due et electronics five upon period planning database says official weather mar land average done technical window france pro region island record direct microsoft conference environment records st district calendar costs style url front statement update parts aug ever downloads early miles sound resource present applications either ago document word works material bill apr written talk federal hosting rules final adult tickets thing centre requirements via cheap kids finance true minutes else mark third rock gifts europe reading topics bad individual tips plus auto cover usually edit together videos percent fast function fact unit getting global tech meet far economic en player projects lyrics often subscribe submit germany amount watch included feel though bank risk thanks everything deals various words linux jul production commercial james weight town heart advertising received choose treatment newsletter archives points knowledge magazine error camera jun girl currently construction toys registered clear golf receive domain methods chapter makes protection policies loan wide beauty manager india position taken sort listings models michael known half cases step engineering florida simple quick none wireless license paul friday lake whole annual published later basic sony shows corporate google church method purchase customers active response practice hardware figure materials fire holiday chat enough designed along among death writing speed html countries loss face brand discount higher effects created remember standards oil bit yellow political increase advertise kingdom base near environmental thought stuff french storage oh japan doing loans shoes entry stay nature orders availability africa summary turn mean growth notes agency king monday european activity copy although drug pics western income force cash employment overall bay river commission ad package contents seen players engine port album regional stop supplies started administration bar institute views plans double dog build screen exchange types soon sponsored lines electronic continue across benefits needed season apply someone held ny anything printer condition effective believe organization effect asked eur mind sunday selection casino pdf lost tour menu volume cross anyone mortgage hope silver corporation wish inside solution mature role rather weeks addition came supply nothing certain usr executive running lower necessary union jewelry according dc clothing mon com particular fine names robert homepage hour gas skills six bush islands advice career military rental decision leave british teens pre huge sat woman facilities zip bid kind sellers middle move cable opportunities taking values division coming tuesday object lesbian appropriate machine logo length actually nice score statistics client ok returns capital follow sample investment sent shown saturday christmas england culture band flash ms lead george choice went starting registration fri thursday courses consumer hi airport foreign artist outside furniture levels channel letter mode phones ideas wednesday structure fund summer allow degree contract button releases wed homes super male matter custom virginia almost took located multiple asian distribution editor inn industrial cause potential song cnet ltd los hp focus late fall featured idea rooms female responsible inc communications win associated thomas primary cancer numbers reason tool browser spring foundation answer voice eg friendly schedule documents communication purpose feature bed comes police everyone independent ip approach cameras brown physical operating hill maps medicine deal hold ratings chicago forms glass happy tue smith wanted developed thank safe unique survey prior telephone sport ready feed animal sources mexico population pa regular secure navigation operations therefore simply evidence station christian round paypal favorite understand option master valley recently probably thu rentals sea built publications blood cut worldwide improve connection publisher hall larger anti networks earth parents nokia impact transfer introduction kitchen strong tel carolina wedding properties hospital ground overview ship accommodation owners disease tx excellent paid italy perfect hair opportunity kit classic basis command cities william express award distance tree peter assessment ensure thus wall ie involved el extra especially interface partners budget rated guides success maximum ma operation existing quite selected boy amazon patients restaurants beautiful warning wine locations horse vote forward flowers stars significant lists technologies owner retail animals useful directly manufacturer ways est son providing rule mac housing takes iii gmt bring catalog searches max trying mother authority considered told xml traffic programme joined input strategy feet agent valid bin modern senior ireland teaching door grand testing trial charge units instead canadian cool normal wrote enterprise ships entire educational md leading metal positive fl fitness chinese opinion mb asia football abstract uses output funds mr greater likely develop employees artists alternative processing responsibility resolution java guest seems publication pass relations trust van contains session multi photography republic fees components vacation century academic assistance completed skin graphics indian prev ads mary il expected ring grade dating pacific mountain organizations pop filter mailing vehicle longer consider int northern behind panel floor german buying match proposed default require iraq boys outdoor deep morning otherwise allows rest protein plant reported hit transportation mm pool mini politics partner disclaimer authors boards faculty parties fish membership mission eye string sense modified pack released stage internal goods recommended born unless richard detailed japanese race approved background target except character usb maintenance ability maybe functions ed moving brands places php pretty trademarks phentermine spain southern yourself etc winter battery youth pressure submitted boston debt keywords medium television interested core break purposes throughout sets dance wood msn itself defined papers playing awards fee studio reader virtual device established answers rent las remote dark programming external apple le regarding instructions min offered theory enjoy remove aid surface minimum visual host variety teachers isbn martin manual block subjects agents increased repair fair civil steel understanding songs fixed wrong beginning hands associates finally az updates desktop classes paris ohio gets sector capacity requires jersey un fat fully father electric saw instruments quotes officer driver businesses dead respect unknown specified restaurant mike trip pst worth mi procedures poor teacher eyes relationship workers farm georgia peace traditional campus tom showing creative coast benefit progress funding devices lord grant sub agree fiction hear sometimes watches careers beyond goes families led museum themselves fan transport interesting blogs wife evaluation accepted former implementation ten hits zone complex th cat galleries references die presented jack flat flow agencies literature respective parent spanish michigan columbia setting dr scale stand economy highest helpful monthly critical frame musical definition secretary angeles networking path australian employee chief gives kb bottom magazines packages detail francisco laws changed pet heard begin individuals colorado royal clean switch russian largest african guy titles relevant guidelines justice connect bible dev cup basket applied weekly vol installation described demand pp suite vegas na square chris attention advance skip diet army auction gear lee os difference allowed correct charles nation selling lots piece sheet firm seven older illinois regulations elements species jump cells module resort facility random pricing dvds certificate minister motion looks fashion directions visitors documentation monitor trading forest calls whose coverage couple giving chance vision ball ending clients actions listen discuss accept automotive naked goal successful sold wind communities clinical situation sciences markets lowest highly publishing appear emergency developing lives currency leather determine temperature palm announcements patient actual historical stone bob commerce ringtones perhaps persons difficult scientific satellite fit tests village accounts amateur ex met pain xbox particularly factors coffee www settings buyer cultural steve easily oral ford poster edge functional root au fi closed holidays ice pink zealand balance monitoring graduate replies shot nc architecture initial label thinking scott llc sec recommend canon league waste minute bus provider optional dictionary cold accounting manufacturing sections chair fishing effort phase fields bag fantasy po letters motor va professor context install shirt apparel generally continued foot mass crime count breast techniques ibm rd johnson sc quickly dollars websites religion claim driving permission surgery patch heat wild measures generation kansas miss chemical doctor task reduce brought himself nor component enable exercise bug santa mid guarantee leader diamond israel se processes soft servers alone meetings seconds jones arizona keyword interests flight congress fuel username walk produced italian paperback classifieds wait supported pocket saint rose freedom argument competition creating jim drugs joint premium providers fresh characters attorney upgrade di factor growing thousands km stream apartments pick hearing eastern auctions therapy entries dates generated signed upper administrative serious prime samsung limit began louis steps errors shops del efforts informed ga ac thoughts creek ft worked quantity urban practices sorted reporting essential myself tours platform load affiliate labor immediately admin nursing defense machines designated tags heavy covered recovery joe guys integrated configuration merchant comprehensive expert universal protect drop solid cds presentation languages became orange compliance vehicles prevent theme rich im campaign marine improvement vs guitar finding pennsylvania examples ipod saying spirit ar claims challenge motorola acceptance strategies mo seem affairs touch intended towards sa goals hire election suggest branch charges serve affiliates reasons magic mount smart talking gave ones latin multimedia xp avoid certified manage corner rank computing oregon element birth virus abuse interactive requests separate quarter procedure leadership tables define racing religious facts breakfast kong column plants faith chain developer identify avenue missing died approximately domestic sitemap recommendations moved houston reach comparison mental viewed moment extended sequence inch attack sorry centers opening damage lab reserve recipes cvs gamma plastic produce snow placed truth counter failure follows eu weekend dollar camp ontario automatically des minnesota films bridge native fill williams movement printing baseball owned approval draft chart played contacts cc jesus readers clubs lcd wa jackson equal adventure matching offering shirts profit leaders posters institutions assistant variable ave dj advertisement expect parking headlines yesterday compared determined wholesale workshop russia gone codes kinds extension seattle statements golden completely teams fort cm wi lighting senate forces funny brother gene turned portable tried electrical applicable disc returned pattern ct boat named theatre laser earlier manufacturers sponsor classical icon warranty dedicated indiana direction harry basketball objects ends delete evening assembly nuclear taxes mouse signal criminal issued brain sexual wisconsin powerful dream obtained false da cast flower felt personnel passed supplied identified falls pic soul aids opinions promote stated stats hawaii professionals appears carry flag decided nj covers hr em advantage hello designs maintain tourism priority newsletters adults clips savings iv graphic atom payments rw estimated binding brief ended winning eight anonymous iron straight script served wants miscellaneous prepared void dining alert integration atlanta dakota tag interview mix framework disk installed queen vhs credits clearly fix handle sweet desk criteria pubmed dave massachusetts diego hong vice associate ne truck behavior enlarge ray frequently revenue measure changing votes du duty looked discussions bear gain festival laboratory ocean flights experts signs lack depth iowa whatever logged laptop vintage train exactly dry explore maryland spa concept nearly eligible checkout reality forgot handling origin knew gaming feeds billion destination scotland faster intelligence dallas bought con ups nations route followed specifications broken tripadvisor frank alaska zoom blow battle residential anime speak decisions industries protocol query clip partnership editorial nt expression es equity provisions speech wire principles suggestions rural shared sounds replacement tape strategic judge spam economics acid bytes cent forced compatible fight apartment height null zero speaker filed gb netherlands obtain bc consulting recreation offices designer remain managed pr failed marriage roll korea banks fr participants secret bath aa kelly leads negative austin favorites toronto theater springs missouri andrew var perform healthy translation estimates font assets injury mt joseph ministry drivers lawyer figures married protected proposal sharing philadelphia portal waiting birthday beta fail gratis banking officials brian toward won slightly assist conduct contained lingerie legislation calling parameters jazz serving bags profiles miami comics matters houses doc postal relationships tennessee wear controls breaking combined ultimate wales representative frequency introduced minor finish departments residents noted displayed mom reduced physics rare spent performed extreme samples davis daniel bars reviewed row oz forecast removed helps singles administrator cycle amounts contain accuracy dual rise usd sleep mg bird pharmacy brazil creation static scene hunter addresses lady crystal famous writer chairman violence fans oklahoma speakers drink academy dynamic gender eat permanent agriculture dell cleaning constitutes portfolio practical delivered collectibles infrastructure exclusive seat concerns colour vendor originally intel utilities philosophy regulation officers reduction aim bids referred supports nutrition recording regions junior toll les cape ann rings meaning tip secondary wonderful mine ladies henry ticket announced guess agreed prevention whom ski soccer math import posting presence instant mentioned automatic healthcare viewing maintained ch increasing majority connected christ dan dogs sd directors aspects austria ahead moon participation scheme utility preview fly manner matrix containing combination devel amendment despite strength guaranteed turkey libraries proper distributed degrees singapore enterprises delta fear seeking inches phoenix rs convention shares principal daughter standing comfort colors wars cisco ordering kept alpha appeal cruise bonus certification previously hey bookmark buildings specials beat disney household batteries adobe smoking bbc becomes drives arms alabama tea improved trees avg achieve positions dress subscription dealer contemporary sky utah nearby rom carried happen exposure panasonic hide permalink signature gambling refer miller provision outdoors clothes caused luxury babes frames certainly indeed newspaper toy circuit layer printed slow removal easier src liability trademark hip printers faqs nine adding kentucky mostly eric spot taylor trackback prints spend factory interior revised grow americans optical promotion relative amazing clock dot hiv identity suites conversion feeling hidden reasonable victoria serial relief revision broadband influence ratio pda importance rain onto dsl planet webmaster copies recipe zum permit seeing proof dna diff tennis bass prescription bedroom empty instance hole pets ride licensed orlando specifically tim bureau maine sql represent conservation pair ideal specs recorded don pieces finished parks dinner lawyers sydney stress cream ss runs trends yeah discover ap patterns boxes louisiana hills javascript fourth nm advisor mn marketplace nd evil aware wilson shape evolution irish certificates objectives stations suggested gps op remains acc greatest firms concerned euro operator structures generic encyclopedia usage cap ink charts continuing mixed census interracial peak tn competitive exist wheel transit suppliers salt compact poetry lights tracking angel bell keeping preparation attempt receiving matches accordance width noise engines forget array discussed accurate stephen elizabeth climate reservations pin playstation alcohol greek instruction managing annotation sister raw differences walking explain smaller newest establish gnu happened expressed jeff extent sharp lesbians ben lane paragraph kill mathematics aol compensation ce export managers aircraft modules sweden conflict conducted versions employer occur percentage knows mississippi describe concern backup requested citizens connecticut heritage personals immediate holding trouble spread coach kevin agricultural expand supporting audience assigned jordan collections ages participate plug specialist cook affect virgin experienced investigation raised hat institution directed dealers searching sporting helping perl affected lib bike totally plate expenses indicate blonde ab proceedings favourite transmission anderson utc characteristics der lose organic seek experiences albums cheats extremely verzeichnis contracts guests hosted diseases concerning developers equivalent chemistry tony neighborhood nevada kits thailand variables agenda anyway continues tracks advisory cam curriculum logic template prince circle soil grants anywhere psychology responses atlantic wet circumstances edward investor identification ram leaving wildlife appliances matt elementary cooking speaking sponsors fox unlimited respond sizes plain exit entered iran arm keys launch wave checking costa belgium printable holy acts guidance mesh trail enforcement symbol crafts highway buddy hardcover observed dean setup poll booking glossary fiscal celebrity styles denver unix filled bond channels ericsson appendix notify blues chocolate pub portion scope hampshire supplier cables cotton bluetooth controlled requirement authorities biology dental killed border ancient debate representatives starts pregnancy causes arkansas biography leisure attractions learned transactions notebook explorer historic attached opened tm husband disabled authorized crazy upcoming britain concert retirement scores financing efficiency sp comedy adopted efficient weblog linear commitment specialty bears jean hop carrier edited constant visa mouth jewish meter linked portland interviews concepts nh gun reflect pure deliver wonder lessons fruit begins qualified reform lens alerts treated discovery draw mysql classified relating assume confidence alliance fm confirm warm neither lewis howard offline leaves engineer lifestyle consistent replace clearance connections inventory converter organisation babe checks reached becoming safari objective indicated sugar crew legs sam stick securities allen pdt relation enabled genre slide montana volunteer tested rear democratic enhance switzerland exact bound parameter adapter processor node formal dimensions contribute lock hockey storm micro colleges laptops mile showed challenges editors mens threads bowl supreme brothers recognition presents ref tank submission dolls estimate encourage navy kid regulatory inspection consumers cancel limits territory transaction manchester weapons paint delay pilot outlet contributions continuous db czech resulting cambridge initiative novel pan execution disability increases ultra winner idaho contractor ph episode examination potter dish plays bulletin ia pt indicates modify oxford adam truly epinions painting committed extensive affordable universe candidate databases patent slot psp outstanding ha eating perspective planned watching lodge messenger mirror tournament consideration ds discounts sterling sessions kernel stocks buyers journals gray catalogue ea jennifer antonio charged broad taiwan und chosen demo greece lg swiss sarah clark labour hate terminal publishers nights behalf caribbean liquid rice nebraska loop salary reservation foods gourmet guard properly orleans saving nfl remaining empire resume twenty newly raise prepare avatar gary depending illegal expansion vary hundreds rome arab lincoln helped premier tomorrow purchased milk decide consent drama visiting performing downtown keyboard contest collected nw bands boot suitable ff absolutely millions lunch audit push chamber guinea findings muscle featuring iso implement clicking scheduled polls typical tower yours sum misc calculator significantly chicken temporary attend shower alan sending jason tonight dear sufficient holdem shell province catholic oak vat awareness vancouver governor beer seemed contribution measurement swimming spyware formula constitution packaging solar jose catch jane pakistan ps reliable consultation northwest sir doubt earn finder unable periods classroom tasks democracy attacks kim wallpaper merchandise const resistance doors symptoms resorts biggest memorial visitor twin forth insert baltimore gateway ky dont alumni drawing candidates charlotte ordered biological fighting transition happens preferences spy romance instrument bruce split themes powers heaven br bits pregnant twice classification focused egypt physician hollywood bargain wikipedia cellular norway vermont asking blocks normally lo spiritual hunting diabetes suit ml shift chip res sit bodies photographs cutting wow simon writers marks flexible loved favourites mapping numerous relatively birds satisfaction represents char indexed pittsburgh superior preferred saved paying cartoon shots intellectual moore granted choices carbon spending comfortable magnetic interaction listening effectively registry crisis outlook massive denmark employed bright treat header cs poverty formed piano echo que grid sheets patrick experimental puerto revolution consolidation displays plasma allowing earnings voip mystery landscape dependent mechanical journey delaware bidding consultants risks banner applicant charter fig barbara cooperation counties acquisition ports implemented sf directories recognized dreams blogger notification kg licensing stands teach occurred textbooks rapid pull hairy diversity cleveland ut reverse deposit seminar investments latina nasa wheels specify accessibility dutch sensitive templates formats tab depends boots holds router concrete si editing poland folder womens css completion upload pulse universities technique contractors voting courts notices subscriptions calculate mc detroit alexander broadcast converted metro toshiba anniversary improvements strip specification pearl accident nick accessible accessory resident plot qty possibly airline typically representation regard pump exists arrangements smooth conferences uniprotkb strike consumption birmingham flashing lp narrow afternoon threat surveys sitting putting consultant controller ownership committees legislative researchers vietnam trailer anne castle gardens missed malaysia unsubscribe antique labels willing bio molecular acting heads stored exam logos residence attorneys antiques density hundred ryan operators strange sustainable philippines statistical beds mention innovation pcs employers grey parallel honda amended operate bills bold bathroom stable opera definitions von doctors lesson cinema asset ag scan elections drinking reaction blank enhanced entitled severe generate stainless newspapers hospitals vi deluxe humor aged monitors exception lived duration bulk successfully indonesia pursuant sci fabric edt visits primarily tight domains capabilities pmid contrast recommendation flying recruitment sin berlin cute organized ba para siemens adoption improving cr expensive meant capture pounds buffalo organisations plane pg explained seed programmes desire expertise mechanism camping ee jewellery meets welfare peer caught eventually marked driven measured medline bottle agreements considering innovative marshall massage rubber conclusion closing tampa thousand meat legend grace susan ing ks adams python monster alex bang villa bone columns disorders bugs collaboration hamilton detection ftp cookies inner formation tutorial med engineers entity cruises gate holder proposals moderator sw tutorials settlement portugal lawrence roman duties valuable tone collectables ethics forever dragon busy captain fantastic imagine brings heating leg neck hd wing governments purchasing scripts abc stereo appointed taste dealing commit tiny operational rail airlines liberal livecam jay trips gap sides tube turns corresponding descriptions cache belt jacket determination animation oracle er matthew lease productions aviation hobbies proud excess disaster console commands jr telecommunications instructor giant achieved injuries shipped seats approaches biz alarm voltage anthony nintendo usual loading stamps appeared franklin angle rob vinyl highlights mining designers melbourne ongoing worst imaging betting scientists liberty wyoming blackjack argentina era convert possibility analyst commissioner dangerous garage exciting reliability thongs gcc unfortunately respectively volunteers attachment ringtone finland morgan derived pleasure honor asp oriented eagle desktops pants columbus nurse prayer appointment workshops hurricane quiet luck postage producer represented mortgages dial responsibilities cheese comic carefully jet productivity investors crown par underground diagnosis maker crack principle picks vacations gang semester calculated fetish applies casinos appearance smoke apache filters incorporated nv craft cake notebooks apart fellow blind lounge mad algorithm semi coins andy gross strongly cafe valentine hilton ken proteins horror su exp familiar capable douglas debian till involving pen investing christopher admission epson shoe elected carrying victory sand madison terrorism joy editions cpu mainly ethnic ran parliament actor finds seal situations fifth allocated citizen vertical corrections structural municipal describes prize sr occurs jon absolute disabilities consists anytime substance prohibited addressed lies pipe soldiers nr guardian lecture simulation layout initiatives ill concentration classics lbs lay interpretation horses lol dirty deck wayne donate taught bankruptcy mp worker optimization alive temple substances prove discovered wings breaks genetic restrictions participating waters promise thin exhibition prefer ridge cabinet modem harris mph bringing sick dose evaluate tiffany tropical collect bet composition toyota streets nationwide vector definitely shaved turning buffer purple existence commentary larry limousines developments def immigration destinations lets mutual pipeline necessarily syntax li attribute prison skill chairs nl everyday apparently surrounding mountains moves popularity inquiry ethernet checked exhibit throw trend sierra visible cats desert postposted ya oldest rhode nba coordinator obviously mercury steven handbook greg navigate worse summit victims epa spaces fundamental burning escape coupons somewhat receiver substantial tr progressive cialis bb boats glance scottish championship arcade richmond sacramento impossible ron russell tells obvious fiber depression graph covering platinum judgment bedrooms talks filing foster modeling passing awarded testimonials trials tissue nz memorabilia clinton masters bonds cartridge alberta explanation folk org commons cincinnati subsection fraud electricity permitted spectrum arrival okay pottery emphasis roger aspect workplace awesome mexican confirmed counts priced wallpapers hist crash lift desired inter closer assumes heights shadow riding infection firefox lisa expense grove eligibility venture clinic korean healing princess mall entering packet spray studios involvement dad buttons placement observations vbulletin funded thompson winners extend roads subsequent pat dublin rolling fell motorcycle yard disclosure establishment memories nelson te arrived creates faces tourist av mayor murder sean adequate senator yield presentations grades cartoons pour digest reg lodging tion dust hence wiki entirely replaced radar rescue undergraduate losses combat reducing stopped occupation lakes donations associations citysearch closely radiation diary seriously kings shooting kent adds nsw ear flags pci baker launched elsewhere pollution conservative guestbook shock effectiveness walls abroad ebony tie ward drawn arthur ian visited roof walker demonstrate atmosphere suggests kiss beast ra operated experiment targets overseas purchases dodge counsel federation pizza invited yards assignment chemicals gordon mod farmers rc queries bmw rush ukraine absence nearest cluster vendors mpeg whereas yoga serves woods surprise lamp rico partial shoppers phil everybody couples nashville ranking jokes cst http ceo simpson twiki sublime counseling palace acceptable satisfied glad wins measurements verify globe trusted copper milwaukee rack medication warehouse shareware ec rep dicke kerry receipt supposed ordinary nobody ghost violation configure stability mit applying southwest boss pride institutional expectations independence knowing reporter metabolism keith champion cloudy linda ross personally chile anna plenty solo sentence throat ignore maria uniform excellence wealth tall rm somewhere vacuum dancing attributes recognize brass writes plaza pdas outcomes survival quest publish sri screening toe thumbnail trans jonathan whenever nova lifetime api pioneer booty forgotten acrobat plates acres venue athletic thermal essays behaviour vital telling fairly coastal config cf charity intelligent edinburgh vt excel modes obligation campbell wake stupid harbor hungary traveler urw segment realize regardless lan enemy puzzle rising aluminum wells wishlist opens insight sms restricted republican secrets lucky latter merchants thick trailers repeat syndrome philips attendance penalty drum glasses enables nec iraqi builder vista jessica chips terry flood foto ease arguments amsterdam arena adventures pupils stewart announcement tabs outcome appreciate expanded casual grown polish lovely extras gm centres jerry clause smile lands ri troops indoor bulgaria armed broker charger regularly believed pine cooling tend gulf rt rick trucks cp mechanisms divorce laura shopper tokyo partly nikon customize tradition candy pills tiger donald folks sensor exposed telecom hunt angels deputy indicators sealed thai emissions physicians loaded fred complaint scenes experiments afghanistan dd boost spanking scholarship governance mill founded supplements chronic icons moral den catering aud finger keeps pound locate camcorder pl trained burn implementing roses labs ourselves bread tobacco wooden motors tough roberts incident gonna dynamics lie crm rf conversation decrease chest pension billy revenues emerging worship capability ak fe craig herself producing churches precision damages reserves contributed solve shorts reproduction minority td diverse amp ingredients sb ah johnny sole franchise recorder complaints facing sm nancy promotions tones passion rehabilitation maintaining sight laid clay defence patches weak refund usc towns environments trembl divided blvd reception amd wise emails cyprus wv odds correctly insider seminars consequences makers hearts geography appearing integrity worry ns discrimination eve carter legacy marc pleased danger vitamin widely processed phrase genuine raising implications functionality paradise hybrid reads roles intermediate emotional sons leaf pad glory platforms ja bigger billing diesel versus combine overnight geographic exceed bs rod saudi fault cuba hrs preliminary districts introduce silk promotional kate chevrolet babies bi karen compiled romantic revealed specialists generator albert examine jimmy graham suspension bristol margaret compaq sad correction wolf slowly authentication communicate rugby supplement showtimes cal portions infant promoting sectors samuel fluid grounds fits kick regards meal ta hurt machinery bandwidth unlike equation baskets probability pot dimension wright img barry proven schedules admissions cached warren slip studied reviewer involves quarterly rpm profits devil grass comply marie florist illustrated cherry continental alternate deutsch achievement limitations kenya webcam cuts funeral nutten earrings enjoyed automated chapters pee charlie quebec passenger convenient dennis mars francis tvs sized manga noticed socket silent literary egg mhz signals caps orientation pill theft childhood swing symbols lat meta humans analog facial choosing talent dated flexibility seeker wisdom shoot boundary mint packard offset payday philip elite gi spin holders believes swedish poems deadline jurisdiction robot displaying witness collins equipped stages encouraged sur winds powder broadway acquired assess wash cartridges stones entrance gnome roots declaration losing attempts gadgets noble glasgow automation impacts rev gospel advantages shore loves induced ll knight preparing loose aims recipient linking extensions appeals cl earned illness islamic athletics southeast ieee ho alternatives pending parker determining lebanon corp personalized kennedy gt sh conditioning teenage soap ae triple cooper nyc vincent jam secured unusual answered partnerships destruction slots increasingly migration disorder routine toolbar basically rocks conventional titans applicants wearing axis sought genes mounted habitat firewall median guns scanner herein occupational animated judicial rio hs adjustment hero integer treatments bachelor attitude camcorders engaged falling basics montreal carpet rv struct lenses binary genetics attended difficulty punk collective coalition pi dropped enrollment duke walter ai pace besides wage producers ot collector arc hosts interfaces advertisers moments atlas strings dawn representing observation feels torture carl deleted coat mitchell mrs rica restoration convenience returning ralph opposition container yr defendant warner confirmation app embedded inkjet supervisor wizard corps actors liver peripherals liable brochure morris bestsellers petition eminem recall antenna picked assumed departure minneapolis belief killing bikini memphis shoulder decor lookup texts harvard brokers roy ion diameter ottawa doll ic podcast seasons peru interactions refine bidder singer evans herald literacy fails aging nike intervention fed plugin attraction diving invite modification alice latinas suppose customized reed involve moderate terror younger thirty mice opposite understood rapidly dealtime ban temp intro mercedes zus assurance clerk happening vast mills outline amendments tramadol holland receives jeans metropolitan compilation verification fonts ent odd wrap refers mood favor veterans quiz mx sigma gr attractive xhtml occasion recordings jefferson victim demands sleeping careful ext beam gardening obligations arrive orchestra sunset tracked moreover minimal polyphonic lottery tops framed aside outsourcing licence adjustable allocation michelle essay discipline amy ts demonstrated dialogue identifying alphabetical camps declared dispatched aaron handheld trace disposal shut florists packs ge installing switches romania voluntary ncaa thou consult phd greatly blogging mask cycling midnight ng commonly pe photographer inform turkish coal cry messaging pentium quantum murray intent tt zoo largely pleasant announce constructed additions requiring spoke aka arrow engagement sampling rough weird tee refinance lion inspired holes weddings blade suddenly oxygen cookie meals canyon goto meters merely calendars arrangement conclusions passes bibliography pointer compatibility stretch durham furthermore permits cooperative muslim xl neil sleeve netscape cleaner cricket beef feeding stroke township rankings measuring cad hats robin robinson jacksonville strap headquarters sharon crowd tcp transfers surf olympic transformation remained attachments dv dir entities customs administrators personality rainbow hook roulette decline gloves israeli medicare cord skiing cloud facilitate subscriber valve val hewlett explains proceed flickr feelings knife jamaica priorities shelf bookstore timing liked parenting adopt denied fotos incredible britney freeware donation outer crop deaths rivers commonwealth pharmaceutical manhattan tales katrina workforce islam nodes tu fy thumbs seeds cited lite ghz hub targeted organizational skype realized twelve founder decade gamecube rr dispute portuguese tired titten adverse everywhere excerpt eng steam discharge ef drinks ace voices acute halloween climbing stood sing tons perfume carol honest albany hazardous restore stack methodology somebody sue ep housewares reputation resistant democrats recycling hang gbp curve creator amber qualifications museums coding slideshow tracker variation passage transferred trunk hiking lb pierre jelsoft headset photograph oakland colombia waves camel distributor lamps underlying hood wrestling suicide archived photoshop jp chi bt arabia gathering projection juice chase mathematical logical sauce fame extract specialized diagnostic panama indianapolis af payable corporations courtesy criticism automobile confidential rfc statutory accommodations athens northeast downloaded judges sl seo retired isp remarks detected decades paintings walked arising nissan bracelet ins eggs juvenile injection yorkshire populations protective afraid acoustic railway cassette initially indicator pointed hb jpg causing mistake norton locked eliminate tc fusion mineral sunglasses ruby steering beads fortune preference canvas threshold parish claimed screens cemetery planner croatia flows stadium venezuela exploration mins fewer sequences coupon nurses ssl stem proxy astronomy lanka opt edwards drew contests flu translate announces mlb costume tagged berkeley voted killer bikes gates adjusted rap tune bishop pulled corn gp shaped compression seasonal establishing farmer counters puts constitutional grew perfectly tin slave instantly cultures norfolk coaching examined trek encoding litigation submissions oem heroes painted lycos ir zdnet broadcasting horizontal artwork cosmetic resulted portrait terrorist informational ethical carriers ecommerce mobility floral builders ties struggle schemes suffering neutral fisher rat spears prospective bedding ultimately joining heading equally artificial bearing spectacular coordination connector brad combo seniors worlds guilty affiliated activation naturally haven tablet jury dos tail subscribers charm lawn violent mitsubishi underwear basin soup potentially ranch constraints crossing inclusive dimensional cottage drunk considerable crimes resolved mozilla byte toner nose latex branches anymore oclc delhi holdings alien locator selecting processors pantyhose plc broke nepal zimbabwe difficulties juan complexity msg constantly browsing resolve barcelona presidential documentary cod territories melissa moscow thesis thru jews nylon palestinian discs rocky bargains frequent trim nigeria ceiling pixels ensuring hispanic cv cb legislature hospitality gen anybody procurement diamonds espn fleet untitled bunch totals marriott singing theoretical afford exercises starring referral nhl surveillance optimal quit distinct protocols lung highlight substitute inclusion hopefully brilliant turner sucking cents reuters ti fc gel todd spoken omega evaluated stayed civic assignments fw manuals doug sees termination watched saver thereof grill households gs redeem rogers grain aaa authentic regime wanna wishes bull montgomery architectural louisville depend differ macintosh movements ranging monica repairs breath amenities virtually cole mart candle hanging colored authorization tale verified lynn formerly projector bp situated comparative std seeks herbal loving strictly routing docs stanley psychological surprised retailer vitamins elegant gains renewal vid genealogy opposed deemed scoring expenditure brooklyn liverpool sisters critics connectivity spots oo algorithms hacker madrid similarly margin coin solely fake salon collaborative norman fda excluding turbo headed voters cure madonna commander arch ni murphy thinks thats suggestion hdtv soldier phillips asin aimed justin bomb harm interval mirrors spotlight tricks reset brush investigate thy expansys panels repeated assault connecting spare logistics deer kodak tongue bowling tri danish pal monkey proportion filename skirt florence invest honey um analyses drawings significance scenario ye fs lovers atomic approx symposium arabic gauge essentials junction protecting nn faced mat rachel solving transmitted weekends screenshots produces oven ted intensive chains kingston sixth engage deviant noon switching quoted adapters correspondence farms imports supervision cheat bronze expenditures sandy separation testimony suspect celebrities macro sender mandatory boundaries crucial syndication gym celebration kde adjacent filtering tuition spouse exotic viewer signup threats luxembourg puzzles reaching vb damaged cams receptor laugh joel surgical destroy citation pitch autos yo premises perry proved offensive imperial dozen benjamin deployment teeth cloth studying colleagues stamp lotus salmon olympus separated proc cargo tan directive fx salem mate dl starter upgrades likes butter pepper weapon luggage burden chef tapes zones races isle stylish slim maple luke grocery offshore governing retailers depot kenneth comp alt pie blend harrison ls julie occasionally cbs attending emission pete spec finest realty janet bow penn recruiting apparent instructional phpbb autumn traveling probe midi permissions biotechnology toilet ranked jackets routes packed excited outreach helen mounting recover tied lopez balanced prescribed catherine timely talked debug delayed chuck reproduced hon dale explicit calculation villas ebook consolidated exclude peeing occasions brooks equations newton oils sept exceptional anxiety bingo whilst spatial respondents unto lt ceramic prompt precious minds annually considerations scanners atm xanax eq pays fingers sunny ebooks delivers je queensland necklace musicians leeds composite unavailable cedar arranged lang theaters advocacy raleigh stud fold essentially designing threaded uv qualify blair hopes assessments cms mason diagram burns pumps footwear sg vic beijing peoples victor mario pos attach licenses utils removing advised brunswick spider phys ranges pairs sensitivity trails preservation hudson isolated calgary interim assisted divine streaming approve chose compound intensity technological syndicate abortion dialog venues blast wellness calcium newport antivirus addressing pole discounted indians shield harvest membrane prague previews bangladesh constitute locally concluded pickup desperate mothers nascar iceland demonstration governmental manufactured candles graduation mega bend sailing variations moms sacred addiction morocco chrome tommy springfield refused brake exterior greeting ecology oliver congo glen botswana nav delays synthesis olive undefined unemployment cyber verizon scored enhancement newcastle clone velocity lambda relay composed tears performances oasis baseline cab angry fa societies silicon brazilian identical petroleum compete ist norwegian lover belong honolulu beatles lips retention exchanges pond rolls thomson barnes soundtrack wondering malta daddy lc ferry rabbit profession seating dam cnn separately physiology lil collecting das exports omaha tire participant scholarships recreational dominican chad electron loads friendship heather passport motel unions treasury warrant sys solaris frozen occupied josh royalty scales rally observer sunshine strain drag ceremony somehow arrested expanding provincial investigations icq ripe yamaha rely medications hebrew gained rochester dying laundry stuck solomon placing stops homework adjust assessed advertiser enabling encryption filling downloadable sophisticated imposed silence scsi focuses soviet possession cu laboratories treaty vocal trainer organ stronger volumes advances vegetables lemon toxic dns thumbnails darkness pty ws nuts nail bizrate vienna implied span stanford sox stockings joke respondent packing statute rejected satisfy destroyed shelter chapel gamespot manufacture layers wordpress guided vulnerability accountability celebrate accredited appliance compressed bahamas powell mixture bench univ tub rider scheduling radius perspectives mortality logging hampton christians borders therapeutic pads butts inns bobby impressive sheep accordingly architect railroad lectures challenging wines nursery harder cups ash microwave cheapest accidents travesti relocation stuart contributors salvador ali salad np monroe tender violations foam temperatures paste clouds competitions discretion tft tanzania preserve jvc poem unsigned staying cosmetics easter theories repository praise jeremy venice concentrations estonia christianity veteran streams landing signing executed katie negotiations realistic dt cgi showcase integral asks relax namibia generating christina congressional synopsis hardly prairie reunion composer bean sword absent photographic sells ecuador hoping accessed spirits modifications coral pixel float colin bias imported paths bubble por acquire contrary millennium tribune vessel acids focusing viruses cheaper admitted dairy admit mem fancy equality samoa gc achieving tap stickers fisheries exceptions reactions leasing lauren beliefs ci macromedia companion squad analyze ashley scroll relate divisions swim wages additionally suffer forests fellowship nano invalid concerts martial males victorian retain colours execute tunnel genres cambodia patents copyrights yn chaos lithuania mastercard wheat chronicles obtaining beaver updating distribute readings decorative kijiji confused compiler enlargement eagles bases vii accused bee campaigns unity loud conjunction bride rats defines airports instances indigenous begun cfr brunette packets anchor socks validation parade corruption stat trigger incentives cholesterol gathered essex slovenia notified differential beaches folders dramatic surfaces terrible routers cruz pendant dresses baptist scientist starsmerchant hiring clocks arthritis bios females wallace nevertheless reflects taxation fever pmc cuisine surely practitioners transcript myspace theorem inflation thee nb ruth pray stylus compounds pope drums contracting arnold structured reasonably jeep chicks bare hung cattle mba radical graduates rover recommends controlling treasure reload distributors flame levitra tanks assuming monetary elderly pit arlington mono particles floating extraordinary tile indicating bolivia spell hottest stevens coordinate kuwait exclusively emily alleged limitation widescreen compile webster struck rx illustration plymouth warnings construct apps inquiries bridal annex mag gsm inspiration tribal curious affecting freight rebate meetup eclipse sudan ddr downloading rec shuttle aggregate stunning cycles affects forecasts detect actively ciao ampland knee prep pb complicated chem fastest butler shopzilla injured decorating payroll cookbook expressions ton courier uploaded shakespeare hints collapse americas connectors unlikely oe gif pros conflicts techno beverage tribute wired elvis immune latvia travelers forestry barriers cant jd rarely gpl infected offerings martha genesis barrier argue incorrect trains metals bicycle furnishings letting arise guatemala celtic thereby irc jamie particle perception minerals advise humidity bottles boxing wy dm bangkok renaissance pathology sara bra ordinance hughes photographers infections jeffrey chess operates brisbane configured survive oscar festivals menus joan possibilities duck reveal canal amino phi contributing herbs clinics mls cow manitoba analytical missions watson lying costumes strict dive saddam circulation drill offense bryan cet protest assumption jerusalem hobby tries transexuales invention nickname fiji technician inline executives enquiries washing audi staffing cognitive exploring trick enquiry closure raid ppc timber volt intense div playlist registrar showers supporters ruling steady dirt statutes withdrawal myers drops predicted wider saskatchewan jc cancellation plugins enrolled sensors screw ministers publicly hourly blame geneva freebsd veterinary acer prostores reseller dist handed suffered intake informal relevance incentive butterfly tucson mechanics heavily swingers fifty headers mistakes numerical ons geek uncle defining counting reflection sink accompanied assure invitation devoted princeton jacob sodium randy spirituality hormone meanwhile proprietary timothy childrens brick grip naval thumbzilla medieval porcelain avi bridges pichunter captured watt thehun decent casting dayton translated shortly cameron columnists pins carlos reno donna andreas warrior diploma cabin innocent scanning ide consensus polo valium copying rpg delivering cordless patricia horn eddie uganda fired journalism pd prot trivia adidas perth frog grammar intention syria disagree klein harvey tires logs undertaken tgp hazard retro leo statewide semiconductor gregory episodes boolean circular anger diy mainland illustrations suits chances interact snap happiness arg substantially bizarre glenn ur auckland olympics fruits identifier geo ribbon calculations doe jpeg conducting startup suzuki trinidad ati kissing wal handy swap exempt crops reduces accomplished calculators geometry impression abs slovakia flip guild correlation gorgeous capitol sim dishes rna barbados chrysler nervous refuse extends fragrance mcdonald replica plumbing brussels tribe neighbors trades superb buzz transparent nuke rid trinity charleston handled legends boom calm champions floors selections projectors inappropriate exhaust comparing shanghai speaks burton vocational davidson copied scotia farming gibson pharmacies fork troy ln roller introducing batch organize appreciated alter nicole latino ghana edges uc mixing handles skilled fitted albuquerque harmony distinguished asthma projected assumptions shareholders twins developmental rip zope regulated triangle amend anticipated oriental reward windsor zambia completing gmbh buf ld hydrogen webshots sprint comparable chick advocate sims confusion copyrighted tray inputs warranties genome escorts documented thong medal paperbacks coaches vessels harbour walks sol keyboards sage knives eco vulnerable arrange artistic bat honors booth indie reflected unified bones breed detector ignored polar fallen precise sussex respiratory notifications msgid transexual mainstream invoice evaluating lip subcommittee sap gather suse maternity backed alfred colonial mf carey motels forming embassy cave journalists danny rebecca slight proceeds indirect amongst wool foundations msgstr arrest volleyball mw adipex horizon nu deeply toolbox ict marina liabilities prizes bosnia browsers decreased patio dp tolerance surfing creativity lloyd describing optics pursue lightning overcome eyed ou quotations grab inspector attract brighton beans bookmarks ellis disable snake succeed leonard lending oops reminder xi searched behavioral riverside bathrooms plains sku ht raymond insights abilities initiated sullivan za midwest karaoke trap lonely fool ve nonprofit lancaster suspended hereby observe julia containers attitudes karl berry collar simultaneously racial integrate bermuda amanda sociology mobiles screenshot exhibitions kelkoo confident retrieved exhibits officially consortium dies terrace bacteria pts replied seafood novels rh rrp recipients ought delicious traditions fg jail safely finite kidney periodically fixes sends durable mazda allied throws moisture hungarian roster referring symantec spencer wichita nasdaq uruguay ooo hz transform timer tablets tuning gotten educators tyler futures vegetable verse highs humanities independently wanting custody scratch launches ipaq alignment henderson bk britannica comm ellen competitors nhs rocket aye bullet towers racks lace nasty visibility latitude consciousness ste tumor ugly deposits beverly mistress encounter trustees watts duncan reprints hart bernard resolutions ment accessing forty tubes attempted col midlands priest floyd ronald analysts queue dx sk trance locale nicholas biol yu bundle hammer invasion witnesses runner rows administered notion sq skins mailed oc fujitsu spelling arctic exams rewards beneath strengthen defend aj frederick medicaid treo infrared seventh gods une welsh belly aggressive tex advertisements quarters stolen cia soonest haiti disturbed determines sculpture poly ears dod wp fist naturals neo motivation lenders pharmacology fitting fixtures bloggers mere agrees passengers quantities petersburg consistently powerpoint cons surplus elder sonic obituaries cheers dig taxi punishment appreciation subsequently om belarus nat zoning gravity providence thumb restriction incorporate backgrounds treasurer guitars essence flooring lightweight ethiopia tp mighty athletes humanity transcription jm holmes complications scholars dpi scripting gis remembered galaxy chester snapshot caring loc worn synthetic shaw vp segments testament expo dominant twist specifics itunes stomach partially buried cn newbie minimize darwin ranks wilderness debut generations tournaments bradley deny anatomy bali judy sponsorship headphones fraction trio proceeding cube defects volkswagen uncertainty breakdown milton marker reconstruction subsidiary strengths clarity rugs sandra adelaide encouraging furnished monaco settled folding emirates terrorists airfare comparisons beneficial distributions vaccine belize fate viewpicture promised volvo penny robust bookings threatened minolta republicans discusses gui porter gras jungle ver rn responded rim abstracts zen ivory alpine dis prediction pharmaceuticals andale fabulous remix alias thesaurus individually battlefield literally newer kay ecological spice oval implies cg soma ser cooler appraisal consisting maritime periodic submitting overhead ascii prospect shipment breeding citations geographical donor mozambique tension href benz trash shapes wifi tier fwd earl manor envelope diane homeland disclaimers championships excluded andrea breeds rapids disco sheffield bailey aus endif finishing emotions wellington incoming prospects lexmark cleaners bulgarian hwy eternal cashiers guam cite aboriginal remarkable rotation nam preventing productive boulevard eugene ix gdp pig metric compliant minus penalties bennett imagination hotmail refurbished joshua armenia varied grande closest activated actress mess conferencing assign armstrong politicians trackbacks lit accommodate tigers aurora una slides milan premiere lender villages shade chorus christine rhythm digit argued dietary symphony clarke sudden accepting precipitation marilyn lions findlaw ada pools tb lyric claire isolation speeds sustained matched approximate rope carroll rational programmer fighters chambers dump greetings inherited warming incomplete vocals chronicle fountain chubby grave legitimate biographies burner yrs foo investigator gba plaintiff finnish gentle bm prisoners deeper muslims hose mediterranean nightlife footage howto worthy reveals architects saints entrepreneur carries sig freelance duo excessive devon screensaver helena saves regarded valuation unexpected cigarette fog characteristic marion lobby egyptian tunisia metallica outlined consequently headline treating punch appointments str gotta cowboy narrative bahrain enormous karma consist betty queens academics pubs quantitative lucas screensavers subdivision tribes vip defeat clicks distinction honduras naughty hazards insured harper livestock mardi exemption tenant sustainability cabinets tattoo shake algebra shadows holly formatting silly nutritional yea mercy hartford freely marcus sunrise wrapping mild fur nicaragua weblogs timeline tar belongs rj readily affiliation soc fence nudist infinite diana ensures relatives lindsay clan legally shame satisfactory revolutionary bracelets sync civilian telephony mesa fatal remedy realtors breathing briefly thickness adjustments graphical genius discussing aerospace fighter meaningful flesh retreat adapted barely wherever estates rug democrat borough maintains failing shortcuts ka retained voyeurweb pamela andrews marble extending jesse specifies hull logitech surrey briefing belkin dem accreditation wav blackberry highland meditation modular microphone macedonia combining brandon instrumental giants organizing shed balloon moderators winston memo ham solved tide kazakhstan hawaiian standings partition invisible gratuit consoles funk fbi qatar magnet translations porsche cayman jaguar reel sheer commodity posing kilometers rp bind thanksgiving rand hopkins urgent guarantees infants gothic cylinder witch buck indication eh congratulations tba cohen sie usgs puppy kathy acre graphs surround cigarettes revenge expires enemies lows controllers aqua chen emma consultancy finances accepts enjoying conventions eva patrol smell pest hc italiano coordinates rca fp carnival roughly sticker promises responding reef physically divide stakeholders hydrocodone gst consecutive cornell satin bon deserve attempting mailto promo jj representations chan worried tunes garbage competing combines mas beth bradford len phrases kai peninsula chelsea boring reynolds dom jill accurately speeches reaches schema considers sofa catalogs ministries vacancies quizzes parliamentary obj prefix lucia savannah barrel typing nerve dans planets deficit boulder pointing renew coupled viii myanmar metadata harold circuits floppy texture handbags jar ev somerset incurred acknowledge thoroughly antigua nottingham thunder tent caution identifies questionnaire qualification locks modelling namely miniature dept hack dare euros interstate pirates aerial hawk consequence rebel systematic perceived origins hired makeup textile lamb madagascar nathan tobago presenting cos troubleshooting uzbekistan indexes pac rl erp centuries gl magnitude ui richardson hindu dh fragrances vocabulary licking earthquake vpn fundraising fcc markers weights albania geological assessing lasting wicked eds introduces kills roommate webcams pushed webmasters ro df computational acdbentity participated junk handhelds wax lucy answering hans impressed slope reggae failures poet conspiracy surname theology nails evident whats rides rehab epic saturn organizer nut allergy sake twisted combinations preceding merit enzyme cumulative zshops planes edmonton tackle disks condo pokemon amplifier ambien arbitrary prominent retrieve lexington vernon sans worldcat titanium irs fairy builds contacted shaft lean bye cdt recorders occasional leslie casio deutsche ana postings innovations kitty postcards dude drain monte fires algeria blessed luis reviewing cardiff cornwall favors potato panic explicitly sticks leone transsexual ez citizenship excuse reforms basement onion strand pf sandwich uw lawsuit alto informative girlfriend bloomberg cheque hierarchy influenced banners reject eau abandoned bd circles italic beats merry mil scuba gore complement cult dash passive mauritius valued cage checklist requesting courage verde lauderdale scenarios gazette hitachi divx extraction batman elevation hearings coleman hugh lap utilization beverages calibration jake eval efficiently anaheim ping textbook dried entertaining prerequisite luther frontier settle stopping refugees knights hypothesis palmer medicines flux derby sao peaceful altered pontiac regression doctrine scenic trainers muze enhancements renewable intersection passwords sewing consistency collectors conclude recognised munich oman celebs gmc propose hh azerbaijan lighter rage adsl uh prix astrology advisors pavilion tactics trusts occurring supplemental travelling talented annie pillow induction derek precisely shorter harley spreading provinces relying finals paraguay steal parcel refined fd bo fifteen widespread incidence fears predict boutique acrylic rolled tuner avon incidents peterson rays asn shannon toddler enhancing flavor alike walt homeless horrible hungry metallic acne blocked interference warriors palestine listprice libs undo cadillac atmospheric malawi wm pk sagem knowledgestorm dana halo ppm curtis parental referenced strikes lesser publicity marathon ant proposition gays pressing gasoline apt dressed scout belfast exec dealt niagara inf eos warcraft charms catalyst trader bucks allowance vcr denial uri designation thrown prepaid raises gem duplicate electro criterion badge wrist civilization analyzed vietnamese heath tremendous ballot lexus varying remedies validity trustee maui weighted angola performs plastics realm corrected jenny helmet salaries postcard elephant yemen encountered tsunami scholar nickel internationally surrounded psi buses expedia geology pct wb creatures coating commented wallet cleared smilies vids accomplish boating drainage shakira corners broader vegetarian rouge yeast yale newfoundland sn qld pas clearing investigated dk ambassador coated intend stephanie contacting vegetation doom findarticles louise kenny specially owen routines hitting yukon beings bite issn aquatic reliance habits striking myth infectious podcasts singh gig gilbert sas ferrari continuity brook fu outputs phenomenon ensemble insulin assured biblical weed conscious accent mysimon eleven wives ambient utilize mileage oecd prostate adaptor auburn unlock hyundai pledge vampire angela relates nitrogen xerox dice merger softball referrals quad dock differently firewire mods nextel framing organised musician blocking rwanda sorts integrating vsnet limiting dispatch revisions papua restored hint armor riders chargers remark dozens varies msie reasoning wn liz rendered picking charitable guards annotated ccd sv convinced openings buys burlington replacing researcher watershed councils occupations acknowledged kruger pockets granny pork zu equilibrium viral inquire pipes characterized laden aruba cottages realtor merge privilege edgar develops qualifying chassis dubai estimation barn pushing llp fleece pediatric boc fare dg asus pierce allan dressing techrepublic sperm vg bald filme craps fuji frost leon institutes mold dame fo sally yacht tracy prefers drilling brochures herb tmp alot ate breach whale traveller appropriations suspected tomatoes benchmark beginners instructors highlighted bedford stationery idle mustang unauthorized clusters antibody competent momentum fin wiring io pastor mud calvin uni shark contributor demonstrates phases grateful emerald gradually laughing grows cliff desirable tract ul ballet ol journalist abraham js bumper afterwards webpage religions garlic hostels shine senegal explosion pn banned wendy briefs signatures diffs cove mumbai ozone disciplines casa mu daughters conversations radios tariff nvidia opponent pasta simplified muscles serum wrapped swift motherboard runtime inbox focal bibliographic eden distant incl champagne ala decimal hq deviation superintendent propecia dip nbc samba hostel housewives employ mongolia penguin magical influences inspections irrigation miracle manually reprint reid wt hydraulic centered robertson flex yearly penetration wound belle rosa conviction hash omissions writings hamburg lazy mv mpg retrieval qualities cindy fathers carb charging cas marvel lined cio dow prototype importantly rb petite apparatus upc terrain dui pens explaining yen strips gossip rangers nomination empirical mh rotary worm dependence discrete beginner boxed lid sexuality polyester cubic deaf commitments suggesting sapphire kinase skirts mats remainder crawford labeled privileges televisions specializing marking commodities pvc serbia sheriff griffin declined guyana spies blah mime neighbor motorcycles elect highways thinkpad concentrate intimate reproductive preston deadly feof bunny chevy molecules rounds longest refrigerator tions intervals sentences dentists usda exclusion workstation holocaust keen flyer peas dosage receivers urls customise disposition variance navigator investigators cameroon baking marijuana adaptive computed needle baths enb gg cathedral brakes og nirvana ko fairfield owns til invision sticky destiny generous madness emacs climb blowing fascinating landscapes heated lafayette jackie wto computation hay cardiovascular ww sparc cardiac salvation dover adrian predictions accompanying vatican brutal learners gd selective arbitration configuring token editorials zinc sacrifice seekers guru isa removable convergence yields gibraltar levy suited numeric anthropology skating kinda aberdeen emperor grad malpractice dylan bras belts blacks educated rebates reporters burke proudly pix necessity rendering mic inserted pulling basename kyle obesity curves suburban touring clara vertex bw hepatitis nationally tomato andorra waterproof expired mj travels flush waiver pale specialties hayes humanitarian invitations functioning delight survivor garcia cingular economies alexandria bacterial moses counted undertake declare continuously johns valves gaps impaired achievements donors tear jewel teddy lf convertible ata teaches ventures nil bufing stranger tragedy julian nest pam dryer painful velvet tribunal ruled nato pensions prayers funky secretariat nowhere cop paragraphs gale joins adolescent nominations wesley dim lately cancelled scary mattress mpegs brunei likewise banana introductory slovak cakes stan reservoir occurrence idol mixer remind wc worcester sbjct demographic charming mai tooth disciplinary annoying respected stays disclose affair drove washer upset restrict springer beside mines portraits rebound logan mentor interpreted evaluations fought baghdad elimination metres hypothetical immigrants complimentary helicopter pencil freeze hk performer abu titled commissions sphere powerseller moss ratios concord graduated endorsed ty surprising walnut lance ladder italia unnecessary dramatically liberia sherman cork maximize cj hansen senators workout mali yugoslavia bleeding characterization colon likelihood lanes purse fundamentals contamination mtv endangered compromise optimize stating dome caroline leu expiration namespace align peripheral bless engaging negotiation crest opponents triumph nominated confidentiality electoral changelog welding deferred alternatively heel alloy condos plots polished yang gently greensboro tulsa locking casey controversial draws fridge blanket bloom qc simpsons lou elliott recovered fraser justify upgrading blades pgp loops surge frontpage trauma aw tahoe advert possess demanding defensive sip flashers subaru forbidden tf vanilla programmers pj monitored installations deutschland picnic souls arrivals spank cw practitioner motivated wr dumb smithsonian hollow vault securely examining fioricet groove revelation rg pursuit delegation wires bl dictionaries mails backing greenhouse sleeps vc blake transparency dee travis wx endless figured orbit currencies niger bacon survivors positioning heater colony cannon circus promoted forbes mae moldova mel descending paxil spine trout enclosed feat temporarily ntsc cooked thriller transmit apnic fatty gerald pressed frequencies scanned reflections hunger mariah sic municipality usps joyce detective surgeon cement experiencing fireplace endorsement bg planners disputes textiles missile intranet closes seq psychiatry persistent deborah conf marco assists summaries glow gabriel auditor wma aquarium violin prophet cir bracket looksmart isaac oxide oaks magnificent erik colleague naples promptly modems adaptation hu harmful paintball prozac sexually enclosure acm dividend newark kw paso glucose phantom norm playback supervisors westminster turtle ips distances absorption treasures dsc warned neural ware fossil mia hometown badly transcripts apollo wan disappointed persian continually communist collectible handmade greene entrepreneurs robots grenada creations jade scoop acquisitions foul keno gtk earning mailman sanyo nested biodiversity excitement somalia movers verbal blink presently seas carlo workflow mysterious novelty bryant tiles voyuer librarian subsidiaries switched stockholm tamil garmin ru pose fuzzy indonesian grams therapist richards mrna budgets toolkit promising relaxation goat render carmen ira sen thereafter hardwood erotica temporal sail forge commissioners dense dts brave forwarding qt awful nightmare airplane reductions southampton istanbul impose organisms sega telescope viewers asbestos portsmouth cdna meyer enters pod savage advancement wu harassment willow resumes bolt gage throwing existed generators lu wagon barbie dat favour soa knock urge smtp generates potatoes thorough replication inexpensive kurt receptors peers roland optimum neon interventions quilt huntington creature ours mounts syracuse internship lone refresh aluminium snowboard beastality webcast michel evanescence subtle coordinated notre shipments maldives stripes firmware antarctica cope shepherd lm canberra cradle chancellor mambo lime kirk flour controversy legendary bool sympathy choir avoiding beautifully blond expects cho jumping fabrics antibodies polymer hygiene wit poultry virtue burst examinations surgeons bouquet immunology promotes mandate wiley departmental bbs spas ind corpus johnston terminology gentleman fibre reproduce convicted shades jets indices roommates adware qui intl threatening spokesman zoloft activists frankfurt prisoner daisy halifax encourages ultram cursor assembled earliest donated stuffed restructuring insects terminals crude morrison maiden simulations cz sufficiently examines viking myrtle bored cleanup yarn knit conditional mug crossword bother budapest conceptual knitting attacked hl bhutan liechtenstein mating compute redhead arrives translator automobiles tractor allah continent ob unwrap fares longitude resist challenged telecharger hoped pike safer insertion instrumentation ids hugo wagner constraint groundwater touched strengthening cologne gzip wishing ranger smallest insulation newman marsh ricky ctrl scared theta infringement bent laos subjective monsters asylum lightbox robbie stake cocktail outlets swaziland varieties arbor mediawiki configurations poison domdf_python_tools-3.10.0/domdf_python_tools/import_tools.py000066400000000000000000000171521475315453000245400ustar00rootroot00000000000000#!/usr/bin/env python # # import_tools.py """ Functions for importing classes. .. versionadded:: 0.5.0 """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # Based on https://github.com/asottile/git-code-debt/blob/master/git_code_debt/util/discovery.py # Copyright (c) 2014 Anthony Sottile # Licensed under the MIT License # # stdlib import importlib.machinery import importlib.util import inspect import itertools import pkgutil from types import ModuleType from typing import Any, Callable, Dict, Iterator, List, Optional, Type, overload # 3rd party from typing_extensions import Literal, TypedDict # this package from domdf_python_tools.compat import importlib_metadata from domdf_python_tools.paths import PathPlus, sort_paths __all__ = [ "discover", "discover_in_module", "discover_entry_points", "discover_entry_points_by_name", "iter_submodules" ] class _DiscoverKwargsType(TypedDict): match_func: Optional[Callable[[Any], bool]] exclude_side_effects: bool @overload def discover( package: ModuleType, match_func: Optional[Callable[[Any], bool]] = ..., exclude_side_effects: Literal[True] = ..., ) -> List[Type[Any]]: ... @overload def discover( package: ModuleType, match_func: Optional[Callable[[Any], bool]] = ..., exclude_side_effects: Literal[False] = ..., ) -> List[Any]: ... def discover( package: ModuleType, match_func: Optional[Callable[[Any], bool]] = None, exclude_side_effects: bool = True, ) -> List[Any]: """ Returns a list of objects in the given package, optionally filtered by ``match_func``. :param package: A Python package :param match_func: Function taking an object and returning :py:obj:`True` if the object is to be included in the output. :default match_func: :py:obj:`None`, which includes all objects. :param exclude_side_effects: Don't include objects that are only there because of an import side effect. :rtype: .. versionchanged:: 1.0.0 Added the ``exclude_side_effects`` parameter. .. latex:clearpage:: """ kwargs: _DiscoverKwargsType = dict(exclude_side_effects=exclude_side_effects, match_func=match_func) matching_objects = discover_in_module(package, **kwargs) if hasattr(package, "__path__"): package_path = package.__path__ for _, module_name, _ in pkgutil.walk_packages(package_path, prefix=f'{package.__name__}.'): module = __import__(module_name, fromlist=["__trash"], level=0) matching_objects.extend(discover_in_module(module, **kwargs)) return matching_objects def discover_in_module( module: ModuleType, match_func: Optional[Callable[[Any], bool]] = None, exclude_side_effects: bool = True, ) -> List[Any]: """ Returns a list of objects in the given module, optionally filtered by ``match_func``. .. versionadded:: 2.6.0 :param module: A Python module. :param match_func: Function taking an object and returning :py:obj:`True` if the object is to be included in the output. :default match_func: :py:obj:`None`, which includes all objects. :param exclude_side_effects: Don't include objects that are only there because of an import side effect. """ matching_objects = [] # Check all the functions in that module for _, imported_object in inspect.getmembers(module, match_func): if exclude_side_effects: if not hasattr(imported_object, "__module__"): continue if imported_object.__module__ != module.__name__: continue matching_objects.append(imported_object) return matching_objects # # def import_module(filename: str): # """ # Import the module with the given filename. # :param filename: # :return: # """ # # spec = importlib.util.spec_from_file_location("typing", filename) # mod = importlib.util.module_from_spec(spec) # spec.loader.exec_module(mod) # sys.modules[mod.__name__] = mod # return mod def discover_entry_points( group_name: str, match_func: Optional[Callable[[Any], bool]] = None, ) -> List[Any]: """ Returns a list of entry points in the given category, optionally filtered by ``match_func``. .. versionadded:: 1.1.0 :param group_name: The entry point group name, e.g. ``'entry_points'``. :param match_func: Function taking an object and returning :py:obj:`True` if the object is to be included in the output. :default match_func: :py:obj:`None`, which includes all objects. :return: List of matching objects. """ return list(discover_entry_points_by_name(group_name, object_match_func=match_func).values()) def discover_entry_points_by_name( group_name: str, name_match_func: Optional[Callable[[Any], bool]] = None, object_match_func: Optional[Callable[[Any], bool]] = None, ) -> Dict[str, Any]: """ Returns a mapping of entry point names to the entry points in the given category, optionally filtered by ``name_match_func`` and ``object_match_func``. .. versionadded:: 2.5.0 :param group_name: The entry point group name, e.g. ``'entry_points'``. :param name_match_func: Function taking the entry point name and returning :py:obj:`True` if the entry point is to be included in the output. :default name_match_func: :py:obj:`None`, which includes all entry points. :param object_match_func: Function taking an object and returning :py:obj:`True` if the object is to be included in the output. :default object_match_func: :py:obj:`None`, which includes all objects. """ # noqa: D400 matching_objects = {} eps = itertools.chain.from_iterable(dist.entry_points for dist in importlib_metadata.distributions()) for entry_point in eps: if entry_point.group != group_name: continue if name_match_func is not None and not name_match_func(entry_point.name): continue entry_point_obj = entry_point.load() if object_match_func is not None and not object_match_func(entry_point_obj): continue matching_objects[entry_point.name] = entry_point_obj return matching_objects def iter_submodules(module: str) -> Iterator[str]: """ Returns an iterator over the names of the submodules and subpackages of the given module. .. versionadded:: 2.6.0 :param module: """ spec: Optional[importlib.machinery.ModuleSpec] = importlib.util.find_spec(module) if spec is None or spec.origin is None: return yield module if spec.submodule_search_locations is None or PathPlus(spec.origin).name != "__init__.py": return for submodule_search_path in spec.submodule_search_locations: for item in sort_paths(*PathPlus(submodule_search_path).iterdir()): if item.name == "__init__.py": continue elif item.suffix == ".py": yield f"{module}.{item.stem}" elif item.name == "__pycache__": continue elif item.is_dir(): yield from sorted(iter_submodules(f"{module}.{item.name}")) domdf_python_tools-3.10.0/domdf_python_tools/iterative.py000066400000000000000000000272761475315453000240120ustar00rootroot00000000000000#!/usr/bin/env python # # iterative.py """ Functions for iteration, looping etc. .. versionadded:: 1.4.0 """ # # Copyright © 2018-2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # chunks from https://stackoverflow.com/a/312464/3092681 # Copyright © 2008 Ned Batchelder # Licensed under CC-BY-SA # # stdlib import itertools import textwrap from operator import itemgetter from typing import ( Any, Callable, Iterable, Iterator, List, Optional, Sequence, Sized, Tuple, Type, TypeVar, Union, cast ) # 3rd party from natsort import natsorted, ns from typing_extensions import final # this package from domdf_python_tools.utils import magnitude __all__ = [ "chunks", "permutations", "split_len", "Len", "double_chain", "flatten", "make_tree", "natmin", "natmax", "groupfloats", "ranges_from_iterable", "extend", "extend_with", "extend_with_none", "count", "AnyNum", ] _T = TypeVar("_T") AnyNum = TypeVar("AnyNum", float, complex) def chunks(l: Sequence[_T], n: int) -> Iterator[Sequence[_T]]: """ Yield successive ``n``-sized chunks from ``l``. :param l: The objects to yield chunks from. :param n: The size of the chunks. :rtype: .. versionchanged:: 1.4.0 Moved from :mod:`domdf_python_tools.utils` """ for i in range(0, len(l), n): yield l[i:i + n] def permutations(data: Iterable[_T], n: int = 2) -> List[Tuple[_T, ...]]: """ Return permutations containing ``n`` items from ``data`` without any reverse duplicates. If ``n`` is equal to or greater than the length of the data an empty list of returned. :param data: :param n: :rtype: .. versionchanged:: 1.4.0 Moved from :mod:`domdf_python_tools.utils` .. seealso:: :func:`itertools.permutations` and :func:`itertools.combinations` .. latex:clearpage:: """ if n == 0: raise ValueError("'n' cannot be 0") perms = [] for i in itertools.permutations(data, n): if i[::-1] not in perms: perms.append(i) return perms def split_len(string: str, n: int) -> List[str]: """ Split ``string`` every ``n`` characters. :param string: :param n: The number of characters to split after :return: The split string .. versionchanged:: 1.4.0 Moved from :mod:`domdf_python_tools.utils` """ return [string[i:i + n] for i in range(0, len(string), n)] def Len(obj: Sized, start: int = 0, step: int = 1) -> range: """ Shorthand for ``range(len(obj))``. Returns an object that produces a sequence of integers from ``start`` (inclusive) to :func:`len(obj) ` (exclusive) by ``step``. .. versionadded:: 0.4.7 :param obj: The object to iterate over the length of. :param start: The start value of the range. :param step: The step of the range. :rtype: .. versionchanged:: 1.4.0 Moved from :mod:`domdf_python_tools.utils` """ return range(start, len(obj), step) def double_chain(iterable: Iterable[Iterable[Iterable[_T]]]) -> Iterator[_T]: """ Flatten a list of lists of lists into a single list. Literally just: .. code-block:: python chain.from_iterable(chain.from_iterable(iterable)) .. compound:: Will convert .. code-block:: python [[(1, 2), (3, 4)], [(5, 6), (7, 8)]] to .. code-block:: python [1, 2, 3, 4, 5, 6, 7, 8] .. versionadded:: 0.4.7 :param iterable: The iterable to chain. :rtype: .. versionchanged:: 1.4.0 Moved from :mod:`domdf_python_tools.utils` """ return itertools.chain.from_iterable(itertools.chain.from_iterable(iterable)) def flatten(iterable: Iterable[_T], primitives: Tuple[Type, ...] = (str, int, float)) -> Iterator[_T]: """ Flattens a mixed list of primitive types and iterables of those types into a single list, regardless of nesting. .. versionadded:: 1.4.0 :param iterable: :param primitives: The primitive types to allow. """ # noqa: D400 for item in iterable: if isinstance(item, primitives): yield item elif isinstance(item, Iterable): yield from flatten(item) else: raise NotImplementedError Branch = Union[Sequence[str], Sequence[Union[Sequence[str], Sequence]]] def make_tree(tree: Branch) -> Iterator[str]: """ Returns the string representation of a mixed list of strings and lists of strings, similar to :manpage:`tree(1)`. .. versionadded:: 1.4.0 :param tree: """ # noqa: D400 last_string = 0 for idx, entry in enumerate(tree): if isinstance(entry, str): last_string = idx for idx, entry in enumerate(tree[:-1]): if isinstance(entry, str): if idx > last_string: yield f"│ {entry}" elif idx == last_string: yield f"└── {entry}" else: yield f"├── {entry}" elif isinstance(entry, Iterable): for line in make_tree(entry): if idx - 1 == last_string: yield textwrap.indent(line, "└── ") else: yield textwrap.indent(line, "│ ") if tree: if isinstance(tree[-1], str): yield f"└── {tree[-1]}" elif isinstance(tree[-1], Iterable): for line in make_tree(tree[-1]): yield textwrap.indent(line, " ") def natmin(seq: Iterable[_T], key: Optional[Callable[[Any], Any]] = None, alg: int = ns.DEFAULT) -> _T: """ Returns the minimum value from ``seq`` when sorted naturally. .. versionadded:: 1.8.0 :param seq: :param key: A key used to determine how to sort each element of the iterable. It is **not** applied recursively. The callable should accept a single argument and return a single value. :param alg: This option is used to control which algorithm :mod:`natsort` uses when sorting. """ return natsorted(seq, key=key, alg=cast(ns, alg))[0] def natmax(seq: Iterable[_T], key: Optional[Callable[[Any], Any]] = None, alg: int = ns.DEFAULT) -> _T: """ Returns the maximum value from ``seq`` when sorted naturally. .. versionadded:: 1.8.0 :param seq: :param key: A key used to determine how to sort each element of the iterable. It is **not** applied recursively. The callable should accept a single argument and return a single value. :param alg: This option is used to control which algorithm :mod:`natsort` uses when sorting. """ return natsorted(seq, key=key, alg=cast(ns, alg))[-1] _group = Tuple[float, ...] def groupfloats( iterable: Iterable[float], step: float = 1, ) -> Iterable[_group]: """ Returns an iterator over the discrete ranges of values in ``iterable``. For example: .. code-block:: python >>> list(groupfloats( ... [170.0, 170.05, 170.1, 170.15, 171.05, 171.1, 171.15, 171.2], ... step=0.05, ... )) [(170.0, 170.05, 170.1, 170.15), (171.05, 171.1, 171.15, 171.2)] >>> list(groupfloats([1, 2, 3, 4, 5, 7, 8, 9, 10])) [(1, 2, 3, 4, 5), (7, 8, 9, 10)] .. versionadded:: 2.0.0 :param iterable: :param step: The step between values in ``iterable``. :rtype: .. seealso:: :func:`~.ranges_from_iterable`, which returns an iterator over the min and max values for each range. """ # Based on https://stackoverflow.com/a/4629241 # By user97370 # CC BY-SA 4.0 modifier = 1 / 10**magnitude(step) a: float b: Iterable[_group] def key(pair): return (pair[1] * modifier) - ((pair[0] * modifier) * step) for a, b in itertools.groupby(enumerate(iterable), key=key): yield tuple(map(itemgetter(1), list(b))) def ranges_from_iterable(iterable: Iterable[float], step: float = 1) -> Iterable[Tuple[float, float]]: """ Returns an iterator over the minimum and maximum values for each discrete ranges of values in ``iterable``. For example: .. code-block:: python >>> list(ranges_from_iterable([170.0, 170.05, 170.1, 170.15, 171.05, 171.1, 171.15, 171.2], step=0.05)) [(170.0, 170.15), (171.05, 171.2)] >>> list(ranges_from_iterable([1, 2, 3, 4, 5, 7, 8, 9, 10])) [(1, 5), (7, 10)] :param iterable: :param step: The step between values in ``iterable``. """ for group in groupfloats(iterable, step): yield group[0], group[-1] def extend(sequence: Iterable[_T], minsize: int) -> List[_T]: """ Extend ``sequence`` by repetition until it is at least as long as ``minsize``. .. versionadded:: 2.3.0 :param sequence: :param minsize: :rtype: .. seealso:: :func:`~.extend_with` and :func:`~.extend_with_none` """ output = list(sequence) cycle = itertools.cycle(output) while len(output) < minsize: output.append(next(cycle)) return output def extend_with(sequence: Iterable[_T], minsize: int, with_: _T) -> List[_T]: r""" Extend ``sequence`` by adding ``with\_`` to the right hand end until it is at least as long as ``minsize``. .. versionadded:: 2.3.0 :param sequence: :param minsize: :param with\_: :rtype: .. seealso:: :func:`~.extend` and :func:`~.extend_with_none` .. latex:clearpage:: """ output = list(sequence) while len(output) < minsize: output.append(with_) return output def extend_with_none(sequence: Iterable[_T], minsize: int) -> Sequence[Optional[_T]]: r""" Extend ``sequence`` by adding :py:obj:`None` to the right hand end until it is at least as long as ``minsize``. .. versionadded:: 2.3.0 :param sequence: :param minsize: :rtype: .. seealso:: :func:`~.extend` and :func:`~.extend_with` """ output: Sequence[Optional[_T]] = list(sequence) filler: Sequence[Optional[_T]] = [None] * max(0, minsize - len(output)) return tuple((*output, *filler)) def count(start: AnyNum = 0, step: AnyNum = 1) -> Iterator[AnyNum]: """ Make an iterator which returns evenly spaced values starting with number ``start``. Often used as an argument to :func:`map` to generate consecutive data points. Can also be used with :func:`zip` to add sequence numbers. .. versionadded:: 2.7.0 :param start: :param step: The step between values. :rtype: .. seealso:: :func:`itertools.count`. The difference is that this returns more exact floats, whereas the values from :func:`itertools.count` drift. .. only:: html A demonstration of the drift can be seen in this file: :download:`count_demo.py`. .. latex:clearpage:: """ if not isinstance(start, (int, float, complex)): raise TypeError("a number is required") if not isinstance(step, (int, float, complex)): raise TypeError("a number is required") # count(10) --> 10 11 12 13 14 ... # count(2.5, 0.5) -> 2.5 3.0 3.5 ... pos: int = 0 def get_next(): if pos: return start + (step * pos) else: return start @final class count(Iterator[AnyNum]): def __next__(self): nonlocal pos val = get_next() pos += 1 return val def __iter__(self): return self if isinstance(step, int) and step == 1: def __repr__(self) -> str: return f"{self.__class__.__name__}({get_next()})" else: def __repr__(self) -> str: return f"{self.__class__.__name__}{get_next(), step}" def __init_subclass__(cls, **kwargs): raise TypeError("type 'domdf_python_tools.iterative.count' is not an acceptable base type") count.__qualname__ = count.__name__ = "count" return count() # type: ignore domdf_python_tools-3.10.0/domdf_python_tools/pagesizes/000077500000000000000000000000001475315453000234205ustar00rootroot00000000000000domdf_python_tools-3.10.0/domdf_python_tools/pagesizes/__init__.py000066400000000000000000000034541475315453000255370ustar00rootroot00000000000000#!/usr/bin/env python # # __init__.py """ List of common pagesizes and some tools for working with them. This module defines a few common page sizes in points (1/72 inch). """ # # Copyright © 2020 Dominic Davis-Foster # # Based on reportlab.lib.pagesizes and reportlab.lib.units # www.reportlab.co.uk # Copyright ReportLab Europe Ltd. 2000-2017 # Copyright (c) 2000-2018, ReportLab Inc. # All rights reserved. # Licensed under the BSD License # # Includes data from en.wikipedia.org. # Licensed under the Creative Commons Attribution-ShareAlike License # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # this package from .classes import * # noqa: F401 from .sizes import * # noqa: F401 from .units import * # noqa: F401 from .utils import * # noqa: F401 domdf_python_tools-3.10.0/domdf_python_tools/pagesizes/classes.py000066400000000000000000000137701475315453000254370ustar00rootroot00000000000000# !/usr/bin/env python # # classes.py """ Classes representing pagesizes. """ # # Copyright © 2020 Dominic Davis-Foster # # Based on reportlab.lib.pagesizes and reportlab.lib.units # www.reportlab.co.uk # Copyright ReportLab Europe Ltd. 2000-2017 # Copyright (c) 2000-2018, ReportLab Inc. # All rights reserved. # Licensed under the BSD License # # Includes data from en.wikipedia.org. # Licensed under the Creative Commons Attribution-ShareAlike License # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib from collections import namedtuple from typing import List, Tuple # this package from domdf_python_tools.doctools import prettify_docstrings from domdf_python_tools.typing import AnyNumber # this package from .units import Unit, _rounders, cm, inch, mm, pica, pt, um from .utils import convert_from __all__ = [ "BaseSize", "Size_mm", "Size_inch", "Size_cm", "Size_um", "Size_pica", "PageSize", ] @prettify_docstrings class BaseSize(namedtuple("__BaseSize", "width, height")): """ Base class namedtuple representing a page size, in point. """ __slots__: List[str] = [] _unit: Unit = pt #: The page width. width: Unit #: The page height. height: Unit def __new__(cls, width: AnyNumber, height: AnyNumber): """ Create a new :class:`~.BaseSize` object. :param width: The page width. :param height: The page height. """ return super().__new__( cls, cls._unit(width), cls._unit(height), ) def __str__(self) -> str: return f"{self.__class__.__name__}(width={_rounders(self.width, '0')}, height={_rounders(self.height, '0')})" @classmethod def from_pt(cls, size: Tuple[float, float]): """ Create a :class:`~domdf_python_tools.pagesizes.classes.BaseSize` object from a page size in point. :param size: The size, in point, to convert from. :rtype: A subclass of :class:`~domdf_python_tools.pagesizes.classes.BaseSize` """ # noqa: D400 assert isinstance(size, PageSize) return cls(cls._unit.from_pt(size[0]), cls._unit.from_pt(size[1])) @classmethod def from_size(cls, size: Tuple[AnyNumber, AnyNumber]) -> "BaseSize": """ Create a :class:`~domdf_python_tools.pagesizes.classes.BaseSize` object from a tuple. """ return cls(*size) def is_landscape(self) -> bool: """ Returns whether the page is in the landscape orientation. """ return self.width >= self.height def is_portrait(self) -> bool: """ Returns whether the page is in the portrait orientation. """ return self.width < self.height def is_square(self) -> bool: """ Returns whether the given pagesize is square. """ return self.width == self.height def landscape(self) -> "BaseSize": """ Returns the pagesize in landscape orientation. """ if self.is_portrait(): return self.__class__(self.height, self.width) else: return self def portrait(self) -> "BaseSize": """ Returns the pagesize in portrait orientation. """ if self.is_landscape(): return self.__class__(self.height, self.width) else: return self def to_pt(self) -> "PageSize": """ Returns the page size in point. """ return PageSize(self.width.as_pt(), self.height.as_pt()) # TODO: conversion to Point for the __eq__ function in the below class Size_mm(BaseSize): """ Represents a pagesize in millimeters. """ _unit = mm class Size_inch(BaseSize): """ Represents a pagesize in inches. """ _unit = inch class Size_cm(BaseSize): """ Represents a pagesize in centimeters. """ _unit = cm class Size_um(BaseSize): """ Represents a pagesize in micrometers. """ _unit = um class Size_pica(BaseSize): """ Represents a pagesize in pica. """ _unit = pica class PageSize(BaseSize): """ Represents a pagesize in point. :param width: The page width :param height: The page height The pagesize can be converted to other units using the properties below. """ __slots__: List[str] = [] def __new__( cls, width: AnyNumber, height: AnyNumber, unit: AnyNumber = pt, # pylint: disable=used-before-assignment ): """ Create a new :class:`~domdf_python_tools.pagesizes.classes.PageSize` object. :param width: The page width. :param height: The page height. :param unit: """ width, height = convert_from((width, height), unit) return super().__new__(cls, width, height) @property def pt(self) -> "PageSize": """ Returns the pagesize in pt. """ return self @property def inch(self) -> Size_inch: """ Returns the pagesize in inches. """ return Size_inch.from_pt(self) @property def cm(self) -> Size_cm: """ Returns the pagesize in centimeters. """ return Size_cm.from_pt(self) @property def mm(self) -> Size_mm: """ Returns the pagesize in millimeters. """ return Size_mm.from_pt(self) @property def um(self) -> Size_um: """ Returns the pagesize in micrometers. """ return Size_um.from_pt(self) µm = um @property def pc(self) -> Size_pica: """ Returns the pagesize in pica. """ return Size_pica.from_pt(self) pica = pc domdf_python_tools-3.10.0/domdf_python_tools/pagesizes/sizes.py000066400000000000000000000211431475315453000251300ustar00rootroot00000000000000# !/usr/bin/env python # # sizes.py """ Common pagesizes in point/pt. .. TODO:: finish the list of the page sizes .. |iso216| replace:: `ISO 216 `__ Each pagesize is an instance of :class:`domdf_python_tools.pagesizes.PageSize`. The following sizes are available: """ # # Copyright © 2020 Dominic Davis-Foster # # Based on reportlab.lib.pagesizes and reportlab.lib.units # www.reportlab.co.uk # Copyright ReportLab Europe Ltd. 2000-2017 # Copyright (c) 2000-2018, ReportLab Inc. # All rights reserved. # Licensed under the BSD License # # Includes data from en.wikipedia.org. # Licensed under the Creative Commons Attribution-ShareAlike License # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # __all__ = [ "_4A0", "_2A0", "A0", "A1", "A2", "A3", "A4", "A5", "A6", "A7", "A8", "A9", "A10", "B0", "B1", "B2", "B3", "B4", "B5", "B6", "B7", "B8", "B9", "B10", "C0", "C1", "C2", "C3", "C4", "C5", "C6", "C7", "C8", "C9", "C10", "A2EXTRA", "A3EXTRA", "A3SUPER", "SUPERA3", "A4EXTRA", "A4SUPER", "SUPERA4", "A4LONG", "A5EXTRA", "SOB5EXTRA", "LETTER", "LEGAL", "TABLOID", "ELEVENSEVENTEEN", "JUNIOR_LEGAL", "HALF_LETTER", "GOV_LETTER", "GOV_LEGAL", "LEDGER", "EMPEROR", "QUAD_ROYAL", "QUAD_DEMY", "ANTIQUARIAN", "GRAND_EAGLE", "DOUBLE_ELEPHANT", "ATLAS", "DOUBLE_ROYAL", "COLOMBIER", "DOUBLE_DEMY_US", "DOUBLE_DEMY", "DOUBLE_DEMY_UK", "IMPERIAL", "DOUBLE_LARGE_POST", "ELEPHANT", "PRINCESS", "CARTRIDGE", "ROYAL", "SHEET", "HALF_POST", "DOUBLE_POST", "SUPER_ROYAL", "BROADSHEET", "MEDIUM_US", "MEDIUM_UK", "DEMY", "COPY_DRAUGHT", "LARGE_POST_US", "LARGE_POST_UK", "POST_US", "POST_UK", "CROWN", "PINCHED_POST", "FOOLSCAP_US", "FOOLSCAP_UK", "SMALL_FOOLSCAP", "BRIEF", "POTT", "QUARTO_US", "EXECUTIVE", "MONARCH", "FOLIO", "FOOLSCAP_FOLIO", "QUARTO", "QUARTO_UK", "KINGS", "DUKES", "ID_1", "ID_2", "ID_3", "ID_000", ] # this package from .classes import Size_inch, Size_mm # ISO 216 standard paper sizes; see eg https://en.wikipedia.org/wiki/ISO_216 # also http://www.printernational.org/iso-paper-sizes.php _4A0 = Size_mm(1682, 2378).to_pt() #: |iso216| 4A0 Paper _2A0 = Size_mm(1189, 1682).to_pt() #: |iso216| 2A0 Paper A0 = Size_mm(841, 1189).to_pt() #: |iso216| A0 Paper A1 = Size_mm(594, 841).to_pt() #: |iso216| A1 Paper A2 = Size_mm(420, 594).to_pt() #: |iso216| A2 Paper A3 = Size_mm(297, 420).to_pt() #: |iso216| A3 Paper A4 = Size_mm(210, 297).to_pt() #: |iso216| A3 Paper A5 = Size_mm(148, 210).to_pt() #: |iso216| A5 Paper A6 = Size_mm(105, 148).to_pt() #: |iso216| A6 Paper A7 = Size_mm(74, 105).to_pt() #: |iso216| A7 Paper A8 = Size_mm(52, 74).to_pt() #: |iso216| A8 Paper A9 = Size_mm(37, 52).to_pt() #: |iso216| A0 Paper A10 = Size_mm(26, 37).to_pt() #: |iso216| A10 Paper # _W, _H = (21 * cm, 29.7 * cm) # A6 = (_W * .5, _H * .5) # A5 = (_H * .5, _W) # A4 = (_W, _H) # A3 = (_H, _W * 2) # A2 = (_W * 2, _H * 2) # A1 = (_H * 2, _W * 4) # A0 = (_W * 4, _H * 4) B0 = Size_mm(1000, 1414) #: |iso216| B0 Paper B1 = Size_mm(707, 1000) #: |iso216| B1 Paper B2 = Size_mm(500, 707) #: |iso216| B2 Paper B3 = Size_mm(353, 500) #: |iso216| B3 Paper B4 = Size_mm(250, 353) #: |iso216| B4 Paper B5 = Size_mm(176, 250) #: |iso216| B5 Paper B6 = Size_mm(125, 176) #: |iso216| B6 Paper B7 = Size_mm(88, 125) #: |iso216| B7 Paper B8 = Size_mm(62, 88) #: |iso216| B8 Paper B9 = Size_mm(44, 62) #: |iso216| B9 Paper B10 = Size_mm(31, 44) #: |iso216| B10 Paper # _BW, _BH = (25 * cm, 35.3 * cm) # B6 = (_BW * .5, _BH * .5) # B5 = (_BH * .5, _BW) # B4 = (_BW, _BH) # B3 = (_BH * 2, _BW) # B2 = (_BW * 2, _BH * 2) # B1 = (_BH * 4, _BW * 2) # B0 = (_BW * 4, _BH * 4) C0 = Size_mm(917, 1297) #: |iso216| C0 Paper C1 = Size_mm(648, 917) #: |iso216| C1 Paper C2 = Size_mm(458, 648) #: |iso216| C2 Paper C3 = Size_mm(324, 458) #: |iso216| C3 Paper C4 = Size_mm(229, 324) #: |iso216| C4 Paper C5 = Size_mm(162, 229) #: |iso216| C5 Paper C6 = Size_mm(114, 162) #: |iso216| C6 Paper C7 = Size_mm(81, 114) #: |iso216| C7 Paper C8 = Size_mm(57, 81) #: |iso216| C8 Paper C9 = Size_mm(40, 57) #: |iso216| C9 Paper C10 = Size_mm(28, 40) #: |iso216| C10 Paper A2EXTRA = Size_mm(445, 619) #: A2 Extra Paper A3EXTRA = Size_mm(322, 445) #: A3 Extra Paper A3SUPER = Size_mm(305, 508) #: A3 Super Paper (different to (Super A3) SUPERA3 = Size_mm(305, 487) #: Super A3 Paper (different to A3 Super) A4EXTRA = Size_mm(235, 322) #: A4 Extra Paper A4SUPER = Size_mm(229, 322) #: A4 Super Paper (different to (Super A4) SUPERA4 = Size_mm(227, 356) #: Super A3 Paper (different to A4 Super) A4LONG = Size_mm(210, 348) #: A4 Long Paper A5EXTRA = Size_mm(173, 235) #: A4 Extra Paper SOB5EXTRA = Size_mm(202, 276) #: SO B5 Extra Paper # American paper sizes LETTER = Size_inch(8.5, 11).to_pt() #: North American "Letter" Paper LEGAL = Size_inch(8.5, 14).to_pt() #: North American "Legal" Paper TABLOID = ELEVENSEVENTEEN = Size_inch(11, 17).to_pt() # From https://en.wikipedia.org/wiki/Paper_size JUNIOR_LEGAL = Size_inch(5, 8).to_pt() #: Junior Legal HALF_LETTER = Size_inch(5.5, 8).to_pt() #: Half Letter GOV_LETTER = Size_inch(8, 10.5).to_pt() #: Government Letter GOV_LEGAL = Size_inch(8.5, 13).to_pt() #: Government Legal LEDGER = Size_inch(17, 11).to_pt() #: Ledger EMPEROR = Size_inch(48, 72).to_pt() #: Emperor QUAD_ROYAL = Size_inch(40, 50).to_pt() #: Quad Royal QUAD_DEMY = Size_inch(35, 40).to_pt() #: ANTIQUARIAN = Size_inch(31, 53).to_pt() #: GRAND_EAGLE = Size_inch(28.75, 42).to_pt() #: DOUBLE_ELEPHANT = Size_inch(26.75, 40).to_pt() #: ATLAS = Size_inch(26, 34).to_pt() #: DOUBLE_ROYAL = Size_inch(25, 40).to_pt() #: COLOMBIER = Size_inch(23.5, 34.5).to_pt() #: DOUBLE_DEMY_US = Size_inch(22.5, 35.5).to_pt() #: DOUBLE_DEMY = DOUBLE_DEMY_UK = Size_inch(22.5, 35).to_pt() #: IMPERIAL = Size_inch(22, 30).to_pt() #: DOUBLE_LARGE_POST = Size_inch(21, 33).to_pt() #: ELEPHANT = Size_inch(23, 28).to_pt() #: PRINCESS = Size_inch(22.5, 28).to_pt() #: CARTRIDGE = Size_inch(21, 26).to_pt() #: ROYAL = Size_inch(20, 25).to_pt() #: SHEET = HALF_POST = Size_inch(19.5, 23.5).to_pt() #: DOUBLE_POST = Size_inch(19, 30.5).to_pt() #: SUPER_ROYAL = Size_inch(19, 27).to_pt() #: BROADSHEET = Size_inch(18, 24).to_pt() #: MEDIUM_US = Size_inch(17.5, 23).to_pt() #: MEDIUM_UK = Size_inch(18, 23).to_pt() #: DEMY = Size_inch(17.5, 22.5).to_pt() #: COPY_DRAUGHT = Size_inch(16, 20).to_pt() #: LARGE_POST_US = Size_inch(15.5, 20).to_pt() #: LARGE_POST_UK = Size_inch(16.5, 21).to_pt() #: POST_US = Size_inch(15.5, 19.35).to_pt() #: POST_UK = Size_inch(15.5, 19.5).to_pt() #: CROWN = Size_inch(15, 20).to_pt() #: PINCHED_POST = Size_inch(14.75, 18.5).to_pt() #: FOOLSCAP_US = Size_inch(13.5, 17).to_pt() #: FOOLSCAP_UK = Size_inch(13, 18).to_pt() #: SMALL_FOOLSCAP = Size_inch(13.35, 16.5).to_pt() #: BRIEF = Size_inch(13.5, 16).to_pt() #: POTT = Size_inch(12.5, 15).to_pt() #: QUARTO_US = Size_inch(9, 11).to_pt() #: EXECUTIVE = MONARCH = Size_inch(7.35, 10.5).to_pt() #: FOLIO = FOOLSCAP_FOLIO = Size_inch(8, 13).to_pt() #: QUARTO = QUARTO_UK = Size_inch(8, 10).to_pt() #: # IMPERIAL = Size_inch(7*inch, 9*inch).to_pt() there are two of these? KINGS = Size_inch(6.5, 8).to_pt() #: DUKES = Size_inch(5.5, 7).to_pt() #: # https://en.wikipedia.org/wiki/ISO/IEC_7810 ID_1 = Size_mm(85.60, 53.98).to_pt() #: Most banking cards and ID cards ID_2 = Size_mm(105, 74).to_pt() #: French and other ID cards; Visas ID_3 = Size_mm(125, 88).to_pt() #: US government ID cards ID_000 = Size_mm(25, 15).to_pt() #: SIM cards domdf_python_tools-3.10.0/domdf_python_tools/pagesizes/units.py000066400000000000000000000213221475315453000251340ustar00rootroot00000000000000#!/usr/bin/env python # # units.py """ Provides a variety of units for use with pagesizes. """ # # Copyright © 2020 Dominic Davis-Foster # # Based on reportlab.lib.pagesizes and reportlab.lib.units # www.reportlab.co.uk # Copyright ReportLab Europe Ltd. 2000-2017 # Copyright (c) 2000-2018, ReportLab Inc. # All rights reserved. # Licensed under the BSD License # # Includes data from en.wikipedia.org. # Licensed under the Creative Commons Attribution-ShareAlike License # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import math from decimal import ROUND_HALF_UP, Decimal from typing import SupportsFloat, Union # this package from domdf_python_tools.doctools import prettify_docstrings __all__ = [ "pt", "inch", "cm", "mm", "um", "pc", "pica", "Unit", "Unitpt", "UnitInch", "Unitcm", "Unitmm", "Unitum", "Unitpc", ] def _rounders(val_to_round: Union[str, int, float, Decimal], round_format: str) -> Decimal: return Decimal(Decimal(val_to_round).quantize(Decimal(str(round_format)), rounding=ROUND_HALF_UP)) @prettify_docstrings class Unit(float): r""" Represents a unit, such as a point. Behaves much like a float (which it inherits from). :bold-title:`Addition` Units can be added to each other: .. code-block:: python >>> (3*mm) + (7*mm) When adding different :class:`~domdf_python_tools.pagesizes.units.Unit` objects, the result has the type of the former unit: .. code-block:: python >>> (2.54*cm) + inch >>> inch + (2.54*cm) :class:`~domdf_python_tools.pagesizes.units.Unit` objects can also be added to :class:`float` and :class:`int` objects: .. code-block:: python >>> (3*cm) + 7 >>> 7 + (3*cm) :bold-title:`Subtraction` Subtraction works the same as addition: .. code-block:: python >>> (17*mm) - (7*mm) >>> (2.54*cm) - inch >>> inch - (2.54*cm) >>> (17*cm) - 7 >>> 17 - (7*cm) :bold-title:`Multiplication` :class:`~domdf_python_tools.pagesizes.units.Unit` objects can only be multipled by :class:`float` and :class:`int` objects: .. code-block:: python >>> (3*mm) * 3 >>> 3 * (3*mm) >>> 3.5 * (3*mm) Multiplication works either way round. Multiplying by another :class:`~domdf_python_tools.pagesizes.units.Unit` results in a :exc:`NotImplementedError`: .. code-block:: python >>> inch * (7*cm) Traceback (most recent call last): NotImplementedError: Multiplying a unit by another unit is not allowed. :bold-title:`Division` :class:`~domdf_python_tools.pagesizes.units.Unit`\s can only be divided by :class:`float` and :class:`int` objects: .. code-block:: python >>> (3*mm) / 3 >>> (10*mm) / 2.5 Dividing by another unit results in a :exc:`NotImplementedError`: .. code-block:: python >>> inch / (7*cm) Traceback (most recent call last): NotImplementedError: Dividing a unit by another unit is not allowed. Likewise, trying to divide a:class:`float` and :class:`int` object by a unit results in a :exc:`NotImplementedError`: .. code-block:: python >>> 3 / (3*mm) Traceback (most recent call last): NotImplementedError: Dividing by a unit is not allowed. :bold-title:`Powers` Powers (using ``**``) are not officially supported. :bold-title:`Modulo Division` Modulo division of a :class:`~domdf_python_tools.pagesizes.units.Unit` by a :class:`float` or :class:`int` object is allowed: .. code-block:: python >>> (3*mm) % 2.5 Dividing by a unit, or modulo division of two units, is not officially supported. .. latex:clearpage:: """ name: str = "pt" _in_pt: float = 1 def __repr__(self): value = _rounders(float(self), "0.000") as_pt = _rounders(self.as_pt(), "0.000") return f"" def __str__(self): value = _rounders(float(self), "0.000") as_pt = _rounders(self.as_pt(), "0.000") return f"" def __mul__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): raise NotImplementedError("Multiplying a unit by another unit is not allowed.") return self.__class__(super().__mul__(other)) __rmul__ = __mul__ def __truediv__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): raise NotImplementedError("Dividing a unit by another unit is not allowed.") return self.__class__(super().__truediv__(other)) def __floordiv__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): raise NotImplementedError("Dividing a unit by another unit is not allowed.") return self.__class__(super().__floordiv__(other)) def __eq__(self, other: object) -> bool: if isinstance(other, Unit): if self._in_pt != 1: self_value = self.as_pt() else: self_value = self if other._in_pt != 1: other_value = other.as_pt() else: other_value = other return math.isclose(float(self_value), float(other_value), abs_tol=1e-8) else: return super().__eq__(other) def __mod__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): raise NotImplementedError("Modulo division of a unit by another unit is not allowed.") return self.__class__(super().__mod__(other)) def __pow__(self, power, modulo=None): raise NotImplementedError("Powers are not supported for units.") def __rtruediv__(self, other): raise NotImplementedError("Dividing by a unit is not allowed.") __rdiv__ = __rtruediv__ def __add__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): return self.__class__.from_pt(float(self.as_pt()) + float(other.as_pt())) else: return self.__class__(super().__add__(other)) __radd__ = __add__ def __sub__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): return self.__class__.from_pt(float(self.as_pt()) - float(other.as_pt())) else: return self.__class__(super().__sub__(other)) def __rsub__(self, other: Union[float, "Unit"]) -> "Unit": if isinstance(other, Unit): # pragma: no cover (sub should be called instead) return self.__class__.from_pt(float(other.as_pt()) - float(self.as_pt())) else: return self.__class__(super().__rsub__(other)) def as_pt(self) -> "Unit": """ Returns the unit in point. """ return Unit(float(_rounders(float(self) * self._in_pt, "0.000000"))) @classmethod def from_pt(cls, value: float) -> "Unit": """ Construct a :class:`~.Unit` object from a value in point. :param value: """ return cls(value / cls._in_pt) def __call__(self, value: Union[SupportsFloat, str, bytes, bytearray] = 0.0) -> "Unit": """ Returns an instance of the :class:`Unit` with the given value. :param value: """ return self.__class__(value) class Unitpt(Unit): """ Point. """ name = "pt" _in_pt = 1 class UnitInch(Unit): """ Inch. """ name = "inch" _in_pt = 72.0 class Unitcm(Unit): """ Centimetres. """ name = "cm" _in_pt = 28.3464566929 class Unitmm(Unit): """ Millimetres. """ name = "mm" _in_pt = 2.83464566929 class Unitum(Unit): """ Micrometres. """ name = "µm" _in_pt = 0.00283464566929 class Unitpc(Unit): """ Pica. """ name = "pc" _in_pt = 12.0 # Units pt = Unitpt(1) #: Point inch = UnitInch(1) #: Inch cm = Unitcm(1) #: Centimetre mm = Unitmm(1) #: Millimetre um = Unitum(1) #: Micrometre pc = pica = Unitpc(1) #: Pica domdf_python_tools-3.10.0/domdf_python_tools/pagesizes/utils.py000066400000000000000000000070011475315453000251300ustar00rootroot00000000000000# !/usr/bin/env python # # utils.py """ Tools for working with pagesizes. """ # # Copyright © 2020 Dominic Davis-Foster # # Based on reportlab.lib.pagesizes and reportlab.lib.units # www.reportlab.co.uk # Copyright ReportLab Europe Ltd. 2000-2017 # Copyright (c) 2000-2018, ReportLab Inc. # All rights reserved. # Licensed under the BSD License # # Includes data from en.wikipedia.org. # Licensed under the Creative Commons Attribution-ShareAlike License # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import re from typing import Sequence, Tuple, Union, overload # this package from domdf_python_tools.typing import AnyNumber # this package from .units import Unit, cm, inch, mm, pc, pt, um # from .units import Unit __all__ = ["convert_from", "parse_measurement"] @overload def convert_from(value: Sequence[AnyNumber], from_: AnyNumber) -> Tuple[float, ...]: ... @overload def convert_from(value: AnyNumber, from_: AnyNumber) -> float: ... def convert_from( value: Union[Sequence[AnyNumber], AnyNumber], from_: AnyNumber, ) -> Union[float, Tuple[float, ...]]: r""" Convert ``value`` to point from the unit specified in ``from_``. :param value: :param from\_: The unit to convert from, specified as a number of points. """ if isinstance(value, Sequence): return _sequence_convert_from(value, from_) else: return _sequence_convert_from((value, ), from_)[0] def _sequence_convert_from(seq: Sequence[AnyNumber], from_: AnyNumber) -> Tuple[float, ...]: if isinstance(from_, Unit): from_ = from_._in_pt else: from_ = float(from_) return tuple(float(x) * from_ for x in seq) _measurement_re = re.compile(r"(\d*\.?\d+) *([A-Za-zμµ\"']*)") def parse_measurement(measurement: str) -> Union[float, Tuple[float, ...]]: """ Parse the given measurement. :param measurement: """ # TODO: docstring all_matches = _measurement_re.findall(measurement) if len(all_matches) > 1: raise ValueError("Too many measurements") elif len(all_matches) == 0: raise ValueError("Unable to parse measurement") val, unit = all_matches[0] if '' in {val, unit}: raise ValueError("Unable to parse measurement") val = float(val) if unit == "mm": return val * mm elif unit == "cm": return val * cm elif unit in {"um", "μm", "µm"}: # second is mu, third is micro return val * um elif unit == "pt": return val * pt elif unit in {"inch", "in", '"'}: return val * inch elif unit in ("pc", "pica"): return val * pc raise ValueError("Unknown unit") domdf_python_tools-3.10.0/domdf_python_tools/paths.py000066400000000000000000001034701475315453000231240ustar00rootroot00000000000000#!/usr/bin/env python # # paths.py """ Functions for paths and files. .. versionchanged:: 1.0.0 Removed ``relpath2``. Use :func:`domdf_python_tools.paths.relpath` instead. """ # # Copyright © 2018-2020 Dominic Davis-Foster # # Parts of the docstrings, the PathPlus class and the DirComparator class # based on Python and its Documentation # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2021 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # copytree based on https://stackoverflow.com/a/12514470/3092681 # Copyright © 2012 atzz # Licensed under CC-BY-SA # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import contextlib import filecmp import fnmatch import gzip import json import os import pathlib import shutil import stat import sys import tempfile import urllib.parse from collections import defaultdict, deque from operator import methodcaller from typing import ( IO, Any, Callable, ContextManager, Dict, Iterable, Iterator, List, Optional, Sequence, Type, TypeVar, Union ) # this package from domdf_python_tools.compat import nullcontext from domdf_python_tools.typing import JsonLibrary, PathLike __all__ = [ "append", "copytree", "delete", "maybe_make", "parent_path", "read", "relpath", "write", "clean_writer", "make_executable", "PathPlus", "PosixPathPlus", "WindowsPathPlus", "in_directory", "_P", "_PP", "traverse_to_file", "matchglob", "unwanted_dirs", "TemporaryPathPlus", "sort_paths", "DirComparator", "compare_dirs", ] NEWLINE_DEFAULT = type("NEWLINE_DEFAULT", (object, ), {"__repr__": lambda self: "NEWLINE_DEFAULT"})() _P = TypeVar("_P", bound=pathlib.Path) """ .. versionadded:: 0.11.0 .. versionchanged:: 1.7.0 Now bound to :class:`pathlib.Path`. """ _PP = TypeVar("_PP", bound="PathPlus") """ .. versionadded:: 2.3.0 """ unwanted_dirs = ( ".git", ".hg", "venv", ".venv", ".mypy_cache", "__pycache__", ".pytest_cache", ".tox", ".tox4", ".nox", "__pypackages__", "dosdevices", ) """ A list of directories which will likely be unwanted when searching directory trees for files. .. versionadded:: 2.3.0 .. versionchanged:: 2.9.0 Added ``.hg`` (`mercurial `_) .. versionchanged:: 3.0.0 Added ``__pypackages__`` (:pep:`582`) .. versionchanged:: 3.2.0 Added ``.nox`` (https://nox.thea.codes/) """ def append(var: str, filename: PathLike, **kwargs) -> int: """ Append ``var`` to the file ``filename`` in the current directory. .. TODO:: make this the file in the given directory, by default the current directory :param var: The value to append to the file :param filename: The file to append to """ kwargs.setdefault("encoding", "UTF-8") with open(os.path.join(os.getcwd(), filename), 'a', **kwargs) as f: # noqa: ENC001 return f.write(var) def copytree( src: PathLike, dst: PathLike, symlinks: bool = False, ignore: Optional[Callable] = None, ) -> PathLike: """ Alternative to :func:`shutil.copytree` to support copying to a directory that already exists. Based on https://stackoverflow.com/a/12514470 by https://stackoverflow.com/users/23252/atzz In Python 3.8 and above :func:`shutil.copytree` takes a ``dirs_exist_ok`` argument, which has the same result. :param src: Source file to copy :param dst: Destination to copy file to :param symlinks: Whether to represent symbolic links in the source as symbolic links in the destination. If false or omitted, the contents and metadata of the linked files are copied to the new tree. When symlinks is false, if the file pointed by the symlink doesn't exist, an exception will be added in the list of errors raised in an Error exception at the end of the copy process. You can set the optional ignore_dangling_symlinks flag to true if you want to silence this exception. Notice that this option has no effect on platforms that don’t support :func:`os.symlink`. :param ignore: A callable that will receive as its arguments the source directory, and a list of its contents. The ignore callable will be called once for each directory that is copied. The callable must return a sequence of directory and file names relative to the current directory (i.e. a subset of the items in its second argument); these names will then be ignored in the copy process. :func:`shutil.ignore_patterns` can be used to create such a callable that ignores names based on glob-style patterns. """ for item in os.listdir(src): s = os.path.join(src, item) d = os.path.join(dst, item) if os.path.isdir(s): shutil.copytree(s, d, symlinks, ignore) else: shutil.copy2(s, d) return dst def delete(filename: PathLike, **kwargs): """ Delete the file in the current directory. .. TODO:: make this the file in the given directory, by default the current directory :param filename: The file to delete """ os.remove(os.path.join(os.getcwd(), filename), **kwargs) def maybe_make(directory: PathLike, mode: int = 0o777, parents: bool = False): """ Create a directory at the given path, but only if the directory does not already exist. .. attention:: This will fail silently if a file with the same name already exists. This appears to be due to the behaviour of :func:`os.mkdir`. :param directory: Directory to create :param mode: Combined with the process's umask value to determine the file mode and access flags :param parents: If :py:obj:`False` (the default), a missing parent raises a :class:`FileNotFoundError`. If :py:obj:`True`, any missing parents of this path are created as needed; they are created with the default permissions without taking mode into account (mimicking the POSIX ``mkdir -p`` command). :no-default parents: .. versionchanged:: 1.6.0 Removed the ``'exist_ok'`` option, since it made no sense in this context. """ if not isinstance(directory, pathlib.Path): directory = pathlib.Path(directory) try: directory.mkdir(mode, parents, exist_ok=True) except FileExistsError: pass def parent_path(path: PathLike) -> pathlib.Path: """ Returns the path of the parent directory for the given file or directory. :param path: Path to find the parent for :return: The parent directory """ if not isinstance(path, pathlib.Path): path = pathlib.Path(path) return path.parent def read(filename: PathLike, **kwargs) -> str: """ Read a file in the current directory (in text mode). .. TODO:: make this the file in the given directory, by default the current directory :param filename: The file to read from. :return: The contents of the file. """ kwargs.setdefault("encoding", "UTF-8") with open(os.path.join(os.getcwd(), filename), **kwargs) as f: # noqa: ENC001 return f.read() def relpath(path: PathLike, relative_to: Optional[PathLike] = None) -> pathlib.Path: """ Returns the path for the given file or directory relative to the given directory or, if that would require path traversal, returns the absolute path. :param path: Path to find the relative path for :param relative_to: The directory to find the path relative to. Defaults to the current directory. :no-default relative_to: """ # noqa: D400 if not isinstance(path, pathlib.Path): path = pathlib.Path(path) abs_path = path.absolute() if relative_to is None: relative_to = pathlib.Path().absolute() if not isinstance(relative_to, pathlib.Path): relative_to = pathlib.Path(relative_to) relative_to = relative_to.absolute() try: return abs_path.relative_to(relative_to) except ValueError: return abs_path def write(var: str, filename: PathLike, **kwargs) -> None: """ Write a variable to file in the current directory. .. TODO:: make this the file in the given directory, by default the current directory :param var: The value to write to the file. :param filename: The file to write to. """ kwargs.setdefault("encoding", "UTF-8") with open(os.path.join(os.getcwd(), filename), 'w', **kwargs) as f: # noqa: ENC001 f.write(var) def clean_writer(string: str, fp: IO) -> None: """ Write string to ``fp`` without trailing spaces. :param string: :param fp: """ # this package from domdf_python_tools.stringlist import StringList buffer = StringList(string) buffer.blankline(ensure_single=True) fp.write(str(buffer)) def make_executable(filename: PathLike) -> None: """ Make the given file executable. :param filename: """ if not isinstance(filename, pathlib.Path): filename = pathlib.Path(filename) st = os.stat(str(filename)) os.chmod(str(filename), st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) @contextlib.contextmanager def in_directory(directory: PathLike): """ Context manager to change into the given directory for the duration of the ``with`` block. :param directory: """ # noqa: D400 oldwd = os.getcwd() try: os.chdir(str(directory)) yield finally: os.chdir(oldwd) class PathPlus(pathlib.Path): """ Subclass of :class:`pathlib.Path` with additional methods and a default encoding of UTF-8. Path represents a filesystem path but, unlike :class:`pathlib.PurePath`, also offers methods to do system calls on path objects. Depending on your system, instantiating a :class:`~.PathPlus` will return either a :class:`~.PosixPathPlus` or a :class:`~.WindowsPathPlus`. object. You can also instantiate a :class:`~.PosixPathPlus` or :class:`WindowsPath` directly, but cannot instantiate a :class:`~.WindowsPathPlus` on a POSIX system or vice versa. .. versionadded:: 0.3.8 .. versionchanged:: 0.5.1 Defaults to Unix line endings (``LF``) on all platforms. """ __slots__ = () if sys.version_info < (3, 11): _accessor = pathlib._normal_accessor # type: ignore _closed = False def _init(self, *args, **kwargs): pass @classmethod def _from_parts(cls, args, init=True): return super()._from_parts(args) # type: ignore def __new__(cls: Type[_PP], *args, **kwargs) -> _PP: # noqa: D102 if cls is PathPlus: cls = WindowsPathPlus if os.name == "nt" else PosixPathPlus # type: ignore return super().__new__(cls, *args, **kwargs) def make_executable(self) -> None: """ Make the file executable. .. versionadded:: 0.3.8 """ make_executable(self) def write_clean( self, string: str, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, ): """ Write to the file without trailing whitespace, and with a newline at the end of the file. .. versionadded:: 0.3.8 :param string: :param encoding: The encoding to write to the file in. :param errors: """ with self.open('w', encoding=encoding, errors=errors) as fp: clean_writer(string, fp) def maybe_make( self, mode: int = 0o777, parents: bool = False, ): """ Create a directory at this path, but only if the directory does not already exist. .. versionadded:: 0.3.8 :param mode: Combined with the process’ umask value to determine the file mode and access flags :param parents: If :py:obj:`False` (the default), a missing parent raises a :class:`FileNotFoundError`. If :py:obj:`True`, any missing parents of this path are created as needed; they are created with the default permissions without taking mode into account (mimicking the POSIX mkdir -p command). :no-default parents: .. versionchanged:: 1.6.0 Removed the ``'exist_ok'`` option, since it made no sense in this context. .. attention:: This will fail silently if a file with the same name already exists. This appears to be due to the behaviour of :func:`os.mkdir`. """ try: self.mkdir(mode, parents, exist_ok=True) except FileExistsError: pass def append_text( self, string: str, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, ): """ Open the file in text mode, append the given string to it, and close the file. .. versionadded:: 0.3.8 :param string: :param encoding: The encoding to write to the file in. :param errors: """ with self.open('a', encoding=encoding, errors=errors) as fp: fp.write(string) def write_text( self, data: str, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, newline: Optional[str] = NEWLINE_DEFAULT, ) -> int: """ Open the file in text mode, write to it, and close the file. .. versionadded:: 0.3.8 :param data: :param encoding: The encoding to write to the file in. :param errors: :param newline: :default newline: `universal newlines `__ for reading, Unix line endings (``LF``) for writing. .. versionchanged:: 3.1.0 Added the ``newline`` argument to match Python 3.10. (see :github:pull:`22420 `) """ if not isinstance(data, str): raise TypeError(f'data must be str, not {data.__class__.__name__}') with self.open(mode='w', encoding=encoding, errors=errors, newline=newline) as f: return f.write(data) def write_lines( self, data: Iterable[str], encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, *, trailing_whitespace: bool = False ) -> None: """ Write the given list of lines to the file without trailing whitespace. .. versionadded:: 0.5.0 :param data: :param encoding: The encoding to write to the file in. :param errors: :param trailing_whitespace: If :py:obj:`True` trailing whitespace is preserved. .. versionchanged:: 2.4.0 Added the ``trailing_whitespace`` option. """ if trailing_whitespace: data = list(data) if data[-1].strip(): data.append('') self.write_text('\n'.join(data), encoding=encoding, errors=errors) else: self.write_clean('\n'.join(data), encoding=encoding, errors=errors) def read_text( self, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, ) -> str: """ Open the file in text mode, read it, and close the file. .. versionadded:: 0.3.8 :param encoding: The encoding to write to the file in. :param errors: :return: The content of the file. """ return super().read_text(encoding=encoding, errors=errors) def read_lines( self, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, ) -> List[str]: """ Open the file in text mode, return a list containing the lines in the file, and close the file. .. versionadded:: 0.5.0 :param encoding: The encoding to write to the file in. :param errors: :return: The content of the file. """ # noqa: D400 return self.read_text(encoding=encoding, errors=errors).split('\n') def open( # type: ignore # noqa: A003 # pylint: disable=redefined-builtin self, mode: str = 'r', buffering: int = -1, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, newline: Optional[str] = NEWLINE_DEFAULT, ) -> IO[Any]: """ Open the file pointed by this path and return a file object, as the built-in :func:`open` function does. .. versionadded:: 0.3.8 :param mode: The mode to open the file in. :default mode: ``'r'`` (read only) :param buffering: :param encoding: :param errors: :param newline: :default newline: `universal newlines `__ for reading, Unix line endings (``LF``) for writing. :rtype: .. versionchanged:: 0.5.1 Defaults to Unix line endings (``LF``) on all platforms. """ # noqa: D400 if 'b' in mode: encoding = None newline = None if newline is NEWLINE_DEFAULT: if 'r' in mode: newline = None else: newline = '\n' return super().open( mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, ) def dump_json( self, data: Any, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, json_library: JsonLibrary = json, # type: ignore *, compress: bool = False, **kwargs, ) -> None: r""" Dump ``data`` to the file as JSON. .. versionadded:: 0.5.0 :param data: The object to serialise to JSON. :param encoding: The encoding to write to the file in. :param errors: :param json_library: The JSON serialisation library to use. :default json_library: :mod:`json` :param compress: Whether to compress the JSON file using gzip. :param \*\*kwargs: Keyword arguments to pass to the JSON serialisation function. :rtype: .. versionchanged:: 1.0.0 Now uses :meth:`PathPlus.write_clean ` rather than :meth:`PathPlus.write_text `, and as a result returns :py:obj:`None` rather than :class:`int`. .. versionchanged:: 1.9.0 Added the ``compress`` keyword-only argument. """ if compress: with gzip.open(self, mode="wt", encoding=encoding, errors=errors) as fp: fp.write(json_library.dumps(data, **kwargs)) else: self.write_clean( json_library.dumps(data, **kwargs), encoding=encoding, errors=errors, ) def load_json( self, encoding: Optional[str] = "UTF-8", errors: Optional[str] = None, json_library: JsonLibrary = json, # type: ignore *, decompress: bool = False, **kwargs, ) -> Any: r""" Load JSON data from the file. .. versionadded:: 0.5.0 :param encoding: The encoding to write to the file in. :param errors: :param json_library: The JSON serialisation library to use. :default json_library: :mod:`json` :param decompress: Whether to decompress the JSON file using gzip. Will raise an exception if the file is not compressed. :param \*\*kwargs: Keyword arguments to pass to the JSON deserialisation function. :return: The deserialised JSON data. .. versionchanged:: 1.9.0 Added the ``compress`` keyword-only argument. """ if decompress: with gzip.open(self, mode="rt", encoding=encoding, errors=errors) as fp: content = fp.read() else: content = self.read_text(encoding=encoding, errors=errors) return json_library.loads( content, **kwargs, ) if sys.version_info < (3, 10): # pragma: no cover (py310+) def is_mount(self) -> bool: """ Check if this path is a POSIX mount point. .. versionadded:: 0.3.8 for Python 3.7 and above .. versionadded:: 0.11.0 for Python 3.6 """ # Need to exist and be a dir if not self.exists() or not self.is_dir(): return False # https://github.com/python/cpython/pull/18839/files try: parent_dev = self.parent.stat().st_dev except OSError: return False dev = self.stat().st_dev if dev != parent_dev: return True ino = self.stat().st_ino parent_ino = self.parent.stat().st_ino return ino == parent_ino if sys.version_info < (3, 8): # pragma: no cover (py38+) def rename(self: _P, target: Union[str, pathlib.PurePath]) -> _P: """ Rename this path to the target path. The target path may be absolute or relative. Relative paths are interpreted relative to the current working directory, *not* the directory of the Path object. .. versionadded:: 0.3.8 for Python 3.8 and above .. versionadded:: 0.11.0 for Python 3.6 and Python 3.7 :param target: :returns: The new Path instance pointing to the target path. """ os.rename(self, target) return self.__class__(target) def replace(self: _P, target: Union[str, pathlib.PurePath]) -> _P: """ Rename this path to the target path, overwriting if that path exists. The target path may be absolute or relative. Relative paths are interpreted relative to the current working directory, *not* the directory of the Path object. Returns the new Path instance pointing to the target path. .. versionadded:: 0.3.8 for Python 3.8 and above .. versionadded:: 0.11.0 for Python 3.6 and Python 3.7 :param target: :returns: The new Path instance pointing to the target path. """ os.replace(self, target) return self.__class__(target) def unlink(self, missing_ok: bool = False) -> None: """ Remove this file or link. If the path is a directory, use :meth:`~domdf_python_tools.paths.PathPlus.rmdir()` instead. .. versionadded:: 0.3.8 for Python 3.8 and above .. versionadded:: 0.11.0 for Python 3.6 and Python 3.7 """ try: os.unlink(self) except FileNotFoundError: if not missing_ok: raise def __enter__(self): return self def __exit__(self, t, v, tb): # https://bugs.python.org/issue39682 # In previous versions of pathlib, this method marked this path as # closed; subsequent attempts to perform I/O would raise an IOError. # This functionality was never documented, and had the effect of # making Path objects mutable, contrary to PEP 428. In Python 3.9 the # _closed attribute was removed, and this method made a no-op. # This method and __enter__()/__exit__() should be deprecated and # removed in the future. pass if sys.version_info < (3, 9): # pragma: no cover (py39+) def is_relative_to(self, *other: Union[str, os.PathLike]) -> bool: r""" Returns whether the path is relative to another path. .. versionadded:: 0.3.8 for Python 3.9 and above. .. latex:vspace:: -10px .. versionadded:: 1.4.0 for Python 3.6 and Python 3.7. .. latex:vspace:: -10px :param \*other: .. latex:vspace:: -20px :rtype: .. latex:vspace:: -20px """ try: self.relative_to(*other) return True except ValueError: return False def abspath(self) -> "PathPlus": """ Return the absolute version of the path. .. versionadded:: 1.3.0 """ return self.__class__(os.path.abspath(self)) def iterchildren( self: _PP, exclude_dirs: Optional[Iterable[str]] = unwanted_dirs, match: Optional[str] = None, matchcase: bool = True, ) -> Iterator[_PP]: """ Returns an iterator over all children (files and directories) of the current path object. .. versionadded:: 2.3.0 :param exclude_dirs: A list of directory names which should be excluded from the output, together with their children. :param match: A pattern to match filenames against. The pattern should be in the format taken by :func:`~.matchglob`. :param matchcase: Whether the filename's case should match the pattern. :rtype: .. versionchanged:: 2.5.0 Added the ``matchcase`` option. """ if not self.abspath().is_dir(): return if exclude_dirs is None: exclude_dirs = () if match and not os.path.isabs(match) and self.is_absolute(): match = (self / match).as_posix() file: _PP for file in self.iterdir(): parts = file.parts if any(d in parts for d in exclude_dirs): continue if match is None or (match is not None and matchglob(file, match, matchcase)): yield file if file.is_dir(): yield from file.iterchildren(exclude_dirs, match) @classmethod def from_uri(cls: Type[_PP], uri: str) -> _PP: """ Construct a :class:`~.PathPlus` from a ``file`` URI returned by :meth:`pathlib.PurePath.as_uri`. .. versionadded:: 2.9.0 :param uri: :rtype: :class:`~.PathPlus` """ parseresult = urllib.parse.urlparse(uri) if parseresult.scheme != "file": raise ValueError(f"Unsupported URI scheme {parseresult.scheme!r}") if parseresult.params or parseresult.query or parseresult.fragment: raise ValueError("Malformed file URI") if sys.platform == "win32": # pragma: no cover (!Windows) if parseresult.netloc: path = ''.join([ "//", urllib.parse.unquote_to_bytes(parseresult.netloc).decode("UTF-8"), urllib.parse.unquote_to_bytes(parseresult.path).decode("UTF-8"), ]) else: path = urllib.parse.unquote_to_bytes(parseresult.path).decode("UTF-8").lstrip('/') else: # pragma: no cover (Windows) if parseresult.netloc: raise ValueError("Malformed file URI") path = urllib.parse.unquote_to_bytes(parseresult.path).decode("UTF-8") return cls(path) def move(self: _PP, dst: PathLike) -> _PP: """ Recursively move ``self`` to ``dst``. ``self`` may be a file or a directory. See :func:`shutil.move` for more details. .. versionadded:: 3.2.0 :param dst: :returns: The new location of ``self``. :rtype: :class:`~.PathPlus` """ new_path = shutil.move(os.fspath(self), dst) return self.__class__(new_path) def stream(self, chunk_size: int = 1024) -> Iterator[bytes]: """ Stream the file in ``chunk_size`` sized chunks. :param chunk_size: The chunk size, in bytes .. versionadded:: 3.2.0 """ with self.open("rb") as fp: while True: chunk = fp.read(chunk_size) if not chunk: break yield chunk class PosixPathPlus(PathPlus, pathlib.PurePosixPath): """ :class:`~.PathPlus` subclass for non-Windows systems. On a POSIX system, instantiating a :class:`~.PathPlus` object should return an instance of this class. .. versionadded:: 0.3.8 """ __slots__ = () class WindowsPathPlus(PathPlus, pathlib.PureWindowsPath): """ :class:`~.PathPlus` subclass for Windows systems. On a Windows system, instantiating a :class:`~.PathPlus` object should return an instance of this class. .. versionadded:: 0.3.8 .. autoclasssumm:: WindowsPathPlus :autosummary-sections: ;; The following methods are unsupported on Windows: * :meth:`~pathlib.Path.group` * :meth:`~pathlib.Path.is_mount` * :meth:`~pathlib.Path.owner` """ __slots__ = () def owner(self): # pragma: no cover """ Unsupported on Windows. """ raise NotImplementedError("Path.owner() is unsupported on this system") def group(self): # pragma: no cover """ Unsupported on Windows. """ raise NotImplementedError("Path.group() is unsupported on this system") def is_mount(self): # pragma: no cover """ Unsupported on Windows. """ raise NotImplementedError("Path.is_mount() is unsupported on this system") def traverse_to_file(base_directory: _P, *filename: PathLike, height: int = -1) -> _P: r""" Traverse the parents of the given directory until the desired file is found. .. versionadded:: 1.7.0 :param base_directory: The directory to start searching from :param \*filename: The filename(s) to search for :param height: The maximum height to traverse to. """ if not filename: raise TypeError("traverse_to_file expected 2 or more arguments, got 1") for level, directory in enumerate((base_directory, *base_directory.parents)): if height > 0 and ((level - 1) > height): break for file in filename: if (directory / file).is_file(): return directory raise FileNotFoundError(f"'{filename[0]!s}' not found in {base_directory}") def matchglob(filename: PathLike, pattern: str, matchcase: bool = True) -> bool: """ Given a filename and a glob pattern, return whether the filename matches the glob. .. versionadded:: 2.3.0 :param filename: :param pattern: A pattern structured like a filesystem path, where each element consists of the glob syntax. Each element is matched by :mod:`fnmatch`. The special element ``**`` matches zero or more files or directories. :param matchcase: Whether the filename's case should match the pattern. :rtype: .. seealso:: :wikipedia:`Glob (programming)#Syntax` on Wikipedia .. versionchanged:: 2.5.0 Added the ``matchcase`` option. """ match_func = fnmatch.fnmatchcase if matchcase else fnmatch.fnmatch filename = PathPlus(filename) pattern_parts = deque(pathlib.PurePath(pattern).parts) filename_parts = deque(filename.parts) if not pattern_parts[-1]: pattern_parts.pop() while True: if not pattern_parts and not filename_parts: return True elif not pattern_parts and filename_parts: # Pattern exhausted but still filename elements return False pattern_part = pattern_parts.popleft() if pattern_part == "**" and not filename_parts: return True else: filename_part = filename_parts.popleft() if pattern_part == "**": if not pattern_parts: return True while pattern_part == "**": if not pattern_parts: return True pattern_part = pattern_parts.popleft() if pattern_parts and not filename_parts: # Filename must match everything after ** return False if match_func(filename_part, pattern_part): continue else: while not match_func(filename_part, pattern_part): if not filename_parts: return False filename_part = filename_parts.popleft() elif match_func(filename_part, pattern_part): continue else: return False class TemporaryPathPlus(tempfile.TemporaryDirectory): """ Securely creates a temporary directory using the same rules as :func:`tempfile.mkdtemp`. The resulting object can be used as a context manager. On completion of the context or destruction of the object the newly created temporary directory and all its contents are removed from the filesystem. Unlike :func:`tempfile.TemporaryDirectory` this class is based around a :class:`~.PathPlus` object. .. versionadded:: 2.4.0 .. autosummary-widths:: 6/16 """ name: PathPlus """ The temporary directory itself. This will be assigned to the target of the :keyword:`as` clause if the :class:`~.TemporaryPathPlus` is used as a context manager. """ def __init__( self, suffix: Optional[str] = None, prefix: Optional[str] = None, dir: Optional[PathLike] = None, # noqa: A002 # pylint: disable=redefined-builtin ) -> None: super().__init__(suffix, prefix, dir) self.name = PathPlus(self.name) def cleanup(self) -> None: """ Cleanup the temporary directory by removing it and its contents. If the :class:`~.TemporaryPathPlus` is used as a context manager this is called when leaving the :keyword:`with` block. """ context: ContextManager if sys.platform == "win32": # pragma: no cover (!Windows) context = contextlib.suppress(PermissionError, NotADirectoryError) else: # pragma: no cover (Windows) context = nullcontext() with context: super().cleanup() def __enter__(self) -> PathPlus: return self.name def sort_paths(*paths: PathLike) -> List[PathPlus]: """ Sort the ``paths`` by directory, then by file. .. versionadded:: 2.6.0 :param paths: """ directories: Dict[str, List[PathPlus]] = defaultdict(list) local_contents: List[PathPlus] = [] files: List[PathPlus] = [] for obj in [PathPlus(path) for path in paths]: if len(obj.parts) > 1: key = obj.parts[0] directories[key].append(obj) else: local_contents.append(obj) # sort directories directories = {directory: directories[directory] for directory in sorted(directories.keys())} for directory, contents in directories.items(): contents = [path.relative_to(directory) for path in contents] files.extend(PathPlus(directory) / path for path in sort_paths(*contents)) return files + sorted(local_contents, key=methodcaller("as_posix")) class DirComparator(filecmp.dircmp): r""" Compare the content of ``a`` and ``a``. In contrast with :class:`filecmp.dircmp`, this subclass compares the content of files with the same path. .. versionadded:: 2.7.0 :param a: The "left" directory to compare. :param b: The "right" directory to compare. :param ignore: A list of names to ignore. :default ignore: :py:obj:`filecmp.DEFAULT_IGNORES` :param hide: A list of names to hide. :default hide: ``[`` :py:obj:`os.curdir`, :py:obj:`os.pardir` ``]`` """ # From https://stackoverflow.com/a/24860799, public domain. # Thanks Philippe def __init__( self, a: PathLike, b: PathLike, ignore: Optional[Sequence[str]] = None, hide: Optional[Sequence[str]] = None, ): super().__init__(a, b, ignore=ignore, hide=hide) def phase3(self) -> None: # noqa: D102 # Find out differences between common files. # Ensure we are using content comparison with shallow=False. fcomp = filecmp.cmpfiles(self.left, self.right, self.common_files, shallow=False) self.same_files, self.diff_files, self.funny_files = fcomp def phase4(self) -> None: # noqa: D102 # Find out differences between common subdirectories # From https://github.com/python/cpython/pull/23424 self.subdirs = {} for x in self.common_dirs: a_x = os.path.join(self.left, x) b_x = os.path.join(self.right, x) self.subdirs[x] = self.__class__(a_x, b_x, self.ignore, self.hide) _methodmap = { "subdirs": phase4, "same_files": phase3, "diff_files": phase3, "funny_files": phase3, "common_dirs": filecmp.dircmp.phase2, "common_files": filecmp.dircmp.phase2, "common_funny": filecmp.dircmp.phase2, "common": filecmp.dircmp.phase1, "left_only": filecmp.dircmp.phase1, "right_only": filecmp.dircmp.phase1, "left_list": filecmp.dircmp.phase0, "right_list": filecmp.dircmp.phase0 } methodmap = _methodmap # type: ignore def compare_dirs(a: PathLike, b: PathLike) -> bool: """ Compare the content of two directory trees. .. versionadded:: 2.7.0 :param a: The "left" directory to compare. :param b: The "right" directory to compare. :returns: :py:obj:`False` if they differ, :py:obj:`True` is they are the same. """ compared = DirComparator(a, b) if compared.left_only or compared.right_only or compared.diff_files or compared.funny_files: return False for subdir in compared.common_dirs: if not compare_dirs(os.path.join(a, subdir), os.path.join(b, subdir)): return False return True domdf_python_tools-3.10.0/domdf_python_tools/pretty_print.py000066400000000000000000000204601475315453000245450ustar00rootroot00000000000000#!/usr/bin/env python # cython: language_level=3 # # utils.py """ Functions and classes for pretty printing. .. versionadded:: 0.10.0 """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # Based on CPython. # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import sys from io import StringIO from typing import IO, Any, Callable, Iterator, MutableMapping, Optional, Tuple, Type, TypeVar try: # pragma: no cover # 3rd party from pprint36 import PrettyPrinter from pprint36._pprint import _safe_key # type: ignore supports_sort_dicts = True except ImportError: # stdlib from pprint import PrettyPrinter, _safe_key # type: ignore supports_sort_dicts = sys.version_info >= (3, 8) __all__ = ["FancyPrinter", "simple_repr"] _T = TypeVar("_T", bound=Type) class FancyPrinter(PrettyPrinter): """ Subclass of :class:`~.pprint.PrettyPrinter` with different formatting. :param indent: Number of spaces to indent for each level of nesting. :param width: Attempted maximum number of columns in the output. :param depth: The maximum depth to print out nested structures. :param stream: The desired output stream. If omitted (or :py:obj:`False`), the standard output stream available at construction will be used. :param compact: If :py:obj:`True`, several items will be combined in one line. :param sort_dicts: If :py:obj:`True`, dict keys are sorted. Only takes effect on Python 3.8 and later, or if `pprint36 `_ is installed. """ def __init__( self, indent: int = 1, width: int = 80, depth: Optional[int] = None, stream: Optional[IO[str]] = None, *, compact: bool = False, sort_dicts: bool = True, ): if supports_sort_dicts: super().__init__( indent=indent, width=width, depth=depth, stream=stream, compact=compact, sort_dicts=sort_dicts, ) else: super().__init__( indent=indent, width=width, depth=depth, stream=stream, compact=compact, ) _dispatch: MutableMapping[Callable, Callable] _indent_per_level: int _format_items: Callable[[PrettyPrinter, Any, Any, Any, Any, Any, Any], None] _dispatch = dict(PrettyPrinter._dispatch) # type: ignore def _make_open(self, char: str, indent: int, obj): if self._indent_per_level > 1: the_indent = ' ' * (indent + 1) else: the_indent = ' ' * (indent + self._indent_per_level) if obj and not self._compact: # type: ignore return f"{char}\n{the_indent}" else: return char def _make_close(self, char: str, indent: int, obj): if obj and not self._compact: # type: ignore return f",\n{' ' * (indent + self._indent_per_level)}{char}" else: return char def _pprint_dict( self, object, # noqa: A002 # pylint: disable=redefined-builtin stream, indent, allowance, context, level, ): obj = object write = stream.write write(self._make_open('{', indent, obj)) if self._indent_per_level > 1: write((self._indent_per_level - 1) * ' ') if obj: self._format_dict_items( # type: ignore obj.items(), stream, indent, allowance + 1, context, level, ) write(self._make_close('}', indent, obj)) _dispatch[dict.__repr__] = _pprint_dict def _pprint_list(self, obj, stream, indent, allowance, context, level): stream.write(self._make_open('[', indent, obj)) self._format_items(obj, stream, indent, allowance + 1, context, level) stream.write(self._make_close(']', indent, obj)) _dispatch[list.__repr__] = _pprint_list def _pprint_tuple(self, obj, stream, indent, allowance, context, level): stream.write(self._make_open('(', indent, obj)) endchar = ",)" if len(obj) == 1 else self._make_close(')', indent, obj) self._format_items(obj, stream, indent, allowance + len(endchar), context, level) stream.write(endchar) _dispatch[tuple.__repr__] = _pprint_tuple def _pprint_set(self, obj, stream, indent, allowance, context, level): if not obj: stream.write(repr(obj)) return typ = obj.__class__ if typ is set: stream.write(self._make_open('{', indent, obj)) endchar = self._make_close('}', indent, obj) else: stream.write(typ.__name__ + f"({{\n{' ' * (indent + self._indent_per_level + len(typ.__name__) + 1)}") endchar = f",\n{' ' * (indent + self._indent_per_level + len(typ.__name__) + 1)}}})" indent += len(typ.__name__) + 1 obj = sorted(obj, key=_safe_key) self._format_items(obj, stream, indent, allowance + len(endchar), context, level) stream.write(endchar) _dispatch[set.__repr__] = _pprint_set _dispatch[frozenset.__repr__] = _pprint_set class Attributes: def __init__(self, obj: object, *attributes: str): self.obj = obj self.attributes = attributes def __iter__(self) -> Iterator[Tuple[str, Any]]: for attr in self.attributes: yield attr, getattr(self.obj, attr) def __len__(self) -> int: return len(self.attributes) def __repr__(self) -> str: return f"Attributes{self.attributes}" class ReprPrettyPrinter(FancyPrinter): _dispatch = dict(FancyPrinter._dispatch) def format_attributes(self, obj: Attributes): stream = StringIO() context = {} context[id(obj)] = 1 stream.write(f"(\n{self._indent_per_level * ' '}") if self._indent_per_level > 1: stream.write((self._indent_per_level - 1) * ' ') if obj: self._format_attribute_items(list(obj), stream, 0, 0 + 1, context, 1) stream.write(f"\n{self._indent_per_level * ' '})") del context[id(obj)] return stream.getvalue() def _format_attribute_items(self, items, stream, indent, allowance, context, level): write = stream.write indent += self._indent_per_level delimnl = ",\n" + ' ' * indent last_index = len(items) - 1 for i, (key, ent) in enumerate(items): last = i == last_index write(key) write('=') self._format( # type: ignore ent, stream, indent + len(key) + 2, allowance if last else 1, context, level, ) if not last: write(delimnl) _default_formatter = ReprPrettyPrinter() def simple_repr(*attributes: str, show_module: bool = False, **kwargs): r""" Adds a simple ``__repr__`` method to the decorated class. :param attributes: The attributes to include in the ``__repr__``. :param show_module: Whether to show the name of the module in the ``__repr__``. :param \*\*kwargs: Keyword arguments passed on to :class:`pprint.PrettyPrinter`. """ def deco(obj: _T) -> _T: def __repr__(self) -> str: if kwargs: formatter = ReprPrettyPrinter(**kwargs) else: formatter = _default_formatter class_name = f"{type(self).__module__}.{type(self).__name__}" if show_module else type(self).__name__ return f"{class_name}{formatter.format_attributes(Attributes(self, *attributes))}" __repr__.__doc__ = f"Return a string representation of the :class:`~{obj.__module__}.{obj.__name__}`." __repr__.__name__ = "__repr__" __repr__.__module__ = obj.__module__ __repr__.__qualname__ = f"{obj.__module__}.__repr__" obj.__repr__ = __repr__ # type: ignore return obj return deco domdf_python_tools-3.10.0/domdf_python_tools/py.typed000066400000000000000000000000001475315453000231130ustar00rootroot00000000000000domdf_python_tools-3.10.0/domdf_python_tools/secrets.py000066400000000000000000000044001475315453000234460ustar00rootroot00000000000000# !/usr/bin/env python # # secrets.py """ Functions for working with secrets, such as API tokens. .. versionadded:: 0.4.6 """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # this package from domdf_python_tools.doctools import prettify_docstrings __all__ = ["Secret"] @prettify_docstrings class Secret(str): """ Subclass of :py:class:`str` that guards against accidentally printing a secret to the terminal. The actual value of the secret is accessed via the ``.value`` attribute. The protection should be maintained even when the secret is in a list, tuple, set or dict, but you should still refrain from printing objects containing the secret. The secret overrides the :meth:`~.__eq__` method of :class:`str`, so: .. code-block:: python >>> Secret("Barry as FLUFL") == "Barry as FLUFL" True .. versionadded:: 0.4.6 .. autosummary-widths:: 1/2 """ __slots__ = ("value", ) value: str #: The actual value of the secret. def __new__(cls, value) -> "Secret": # noqa: D102 obj: Secret = super().__new__(cls, "") obj.value = str(value) return obj def __eq__(self, other) -> bool: return self.value == other def __hash__(self): return hash(self.value) domdf_python_tools-3.10.0/domdf_python_tools/stringlist.py000066400000000000000000000341661475315453000242140ustar00rootroot00000000000000#!/usr/bin/env python # # stringlist.py """ A list of strings that represent lines in a multiline string. .. versionchanged:: 1.0.0 :class:`~domdf_python_tools.typing.String` should now be imported from :mod:`domdf_python_tools.typing`. """ # # Copyright © 2020-2021 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib from contextlib import contextmanager from itertools import chain from typing import Any, Iterable, Iterator, List, Reversible, Tuple, TypeVar, Union, cast, overload # this package from domdf_python_tools.doctools import prettify_docstrings from domdf_python_tools.typing import String, SupportsIndex from domdf_python_tools.utils import convert_indents __all__ = ["Indent", "StringList", "DelimitedList", "_SL", "splitlines", "joinlines"] _S = TypeVar("_S") _SL = TypeVar("_SL", bound="StringList") @prettify_docstrings class Indent: """ Represents an indent, having a symbol/type and a size. :param size: The indent size. :param type: The indent character. """ def __init__(self, size: int = 0, type: str = '\t'): # noqa: A002 # pylint: disable=redefined-builtin self.size = int(size) self.type = str(type) def __iter__(self) -> Iterator[Union[str, Any]]: """ Returns the size and type of the :class:`~domdf_python_tools.stringlist.Indent`. """ yield self.size yield self.type @property def size(self) -> int: """ The indent size. """ return self._size @size.setter def size(self, size: int) -> None: self._size = int(size) @property def type(self) -> str: """ The indent character. """ return self._type @type.setter def type(self, type: str) -> None: # noqa: A002 # pylint: disable=redefined-builtin if not str(type): raise ValueError("'type' cannot an empty string.") self._type = str(type) def __str__(self) -> str: """ Returns the :class:`~domdf_python_tools.stringlist.Indent` as a string. """ return self.type * self.size def __repr__(self) -> str: """ Returns the string representation of the :class:`~domdf_python_tools.stringlist.Indent`. """ return f"{type(self).__name__}(size={self.size}, type={self.type!r})" def __eq__(self, other): if isinstance(other, Indent): return other.size == self.size and other.type == self.type elif isinstance(other, str): return str(self) == other elif isinstance(other, tuple): return tuple(self) == other else: return NotImplemented class StringList(List[str]): """ A list of strings that represent lines in a multiline string. :param iterable: Content to populate the StringList with. :param convert_indents: Whether indents at the start of lines should be converted. """ #: The indent to insert at the beginning of new lines. indent: Indent convert_indents: bool """ Whether indents at the start of lines should be converted. Only applies to lines added after this is enabled/disabled. Can only be used when the indent is ``'\\t'`` or ``'â£'``. """ def __init__( self, iterable: Iterable[String] = (), convert_indents: bool = False, ) -> None: if isinstance(iterable, str): iterable = iterable.split('\n') self.indent = Indent() self.convert_indents = convert_indents super().__init__([self._make_line(str(x)) for x in iterable]) def _make_line(self, line: str) -> str: if not str(self.indent_type).strip(" \t") and self.convert_indents: if self.indent_type == '\t': line = convert_indents(line, tab_width=1, from_=" ", to='\t') else: # pragma: no cover line = convert_indents(line, tab_width=1, from_='\t', to=self.indent_type) return f"{self.indent}{line}".rstrip() def append(self, line: String) -> None: """ Append a line to the end of the :class:`~domdf_python_tools.stringlist.StringList`. :param line: """ for inner_line in str(line).split('\n'): super().append(self._make_line(inner_line)) def extend(self, iterable: Iterable[String]) -> None: """ Extend the :class:`~domdf_python_tools.stringlist.StringList` with lines from ``iterable``. :param iterable: An iterable of string-like objects to add to the end of the :class:`~domdf_python_tools.stringlist.StringList`. """ for line in iterable: self.append(line) def copy(self: _SL) -> _SL: """ Returns a shallow copy of the :class:`~domdf_python_tools.stringlist.StringList`. Equivalent to ``a[:]``. :rtype: :class:`~domdf_python_tools.stringlist.StringList` """ return self.__class__(super().copy()) def count_blanklines(self) -> int: """ Returns a count of the blank lines in the :class:`~domdf_python_tools.stringlist.StringList`. .. versionadded:: 0.7.1 """ return self.count('') def insert(self, index: SupportsIndex, line: String) -> None: """ Insert a line into the :class:`~domdf_python_tools.stringlist.StringList` at the given position. :param index: :param line: .. versionchanged:: 3.2.0 Changed :class:`int` in the type annotation to :protocol:`~.SupportsIndex`. """ lines: List[str] index = index.__index__() if index < 0 or index > len(self): lines = str(line).split('\n') else: lines = cast(list, reversed(str(line).split('\n'))) for inner_line in lines: super().insert(index, self._make_line(inner_line)) @overload def __setitem__(self, index: SupportsIndex, line: String) -> None: ... @overload def __setitem__(self, index: slice, line: Iterable[String]) -> None: ... def __setitem__(self, index: Union[SupportsIndex, slice], line: Union[String, Iterable[String]]): """ Replaces the given line with new content. If the new content consists of multiple lines subsequent content in the :class:`~domdf_python_tools.stringlist.StringList` will be shifted down. :param index: :param line: .. versionchanged:: 3.2.0 Changed :class:`int` in the type annotation to :protocol:`~.SupportsIndex`. """ if isinstance(index, slice): line = cast(Iterable[String], line) if not isinstance(line, Reversible): line = tuple(line) for lline, idx in zip( reversed(line), reversed(range(index.start or 0, index.stop + 1, index.step or 1)), ): self[idx] = lline else: line = cast(String, line) index = index.__index__() if self and index < len(self): self.pop(index) if index < 0: index = len(self) + index + 1 self.insert(index, line) @overload def __getitem__(self, index: SupportsIndex) -> str: ... @overload def __getitem__(self: _SL, index: slice) -> _SL: ... def __getitem__(self: _SL, index: Union[SupportsIndex, slice]) -> Union[str, _SL]: r""" Returns the line with the given index. :param index: :rtype: :py:obj:`~typing.Union`\[:class:`str`, :class:`~domdf_python_tools.stringlist.StringList`\] .. versionchanged:: 1.8.0 Now returns a :class:`~domdf_python_tools.stringlist.StringList` when ``index`` is a :class:`slice`. .. versionchanged:: 3.2.0 Changed :class:`int` in the type annotation to :protocol:`~.SupportsIndex`. """ if isinstance(index, slice): return self.__class__(super().__getitem__(index)) else: return super().__getitem__(index) def blankline(self, ensure_single: bool = False): """ Append a blank line to the end of the :class:`~domdf_python_tools.stringlist.StringList`. :param ensure_single: Ensure only a single blank line exists after the previous line of text. """ if ensure_single: while self and not self[-1]: self.pop(-1) self.append('') def set_indent_size(self, size: int = 0): """ Sets the size of the indent to insert at the beginning of new lines. :param size: The indent size to use for new lines. """ self.indent.size = int(size) def set_indent_type(self, indent_type: str = '\t'): """ Sets the type of the indent to insert at the beginning of new lines. :param indent_type: The type of indent to use for new lines. """ self.indent.type = str(indent_type) def set_indent(self, indent: Union[String, Indent], size: int = 0): """ Sets the indent to insert at the beginning of new lines. :param indent: The :class:`~.Indent` to use for new lines, or the indent type. :param size: If ``indent`` is an indent type, the indent size to use for new lines. """ if isinstance(indent, Indent): if size: raise TypeError("'size' argument cannot be used when providing an 'Indent' object.") self.indent = indent else: self.indent = Indent(int(size), str(indent)) @property def indent_size(self) -> int: """ The current indent size. """ return int(self.indent.size) @indent_size.setter def indent_size(self, size: int) -> None: """ Sets the indent size. """ self.indent.size = int(size) @property def indent_type(self) -> str: """ The current indent type. """ return str(self.indent.type) @indent_type.setter def indent_type(self, type: str) -> None: # noqa: A002 # pylint: disable=redefined-builtin """ Sets the indent type. """ self.indent.type = str(type) def __str__(self) -> str: """ Returns the :class:`~domdf_python_tools.stringlist.StringList` as a string. """ return '\n'.join(self) def __bytes__(self) -> bytes: """ Returns the :class:`~domdf_python_tools.stringlist.StringList` as bytes. .. versionadded:: 2.1.0 """ return str(self).encode("UTF-8") def __eq__(self, other) -> bool: """ Returns whether the other object is equal to this :class:`~domdf_python_tools.stringlist.StringList`. """ if isinstance(other, str): return str(self) == other else: return super().__eq__(other) @contextmanager def with_indent(self, indent: Union[String, Indent], size: int = 0): """ Context manager to temporarily use a different indent. .. code-block:: python >>> sl = StringList() >>> with sl.with_indent(" ", 1): ... sl.append("Hello World") :param indent: The :class:`~.Indent` to use within the ``with`` block, or the indent type. :param size: If ``indent`` is an indent type, the indent size to use within the ``with`` block. """ original_indent: Tuple[int, str] = tuple(self.indent) # type: ignore try: self.set_indent(indent, size) yield finally: self.indent = Indent(*original_indent) @contextmanager def with_indent_size(self, size: int = 0): """ Context manager to temporarily use a different indent size. .. code-block:: python >>> sl = StringList() >>> with sl.with_indent_size(1): ... sl.append("Hello World") :param size: The indent size to use within the ``with`` block. """ original_indent_size = self.indent_size try: self.indent_size = size yield finally: self.indent_size = original_indent_size @contextmanager def with_indent_type(self, indent_type: str = '\t'): """ Context manager to temporarily use a different indent type. .. code-block:: python >>> sl = StringList() >>> with sl.with_indent_type(" "): ... sl.append("Hello World") :param indent_type: The type of indent to use within the ``with`` block. """ original_indent_type = self.indent_type try: self.indent_type = indent_type yield finally: self.indent_type = original_indent_type def splitlines(self, keepends: bool = False) -> List[str]: """ Analagous to :meth:`str.splitlines`. .. versionadded:: 3.8.0 """ if keepends: return [line + '\n' for line in self] else: return self class DelimitedList(List[_S]): """ Subclass of :class:`list` that supports custom delimiters in format strings. **Example:** .. code-block:: python >>> l = DelimitedList([1, 2, 3, 4, 5]) >>> format(l, ", ") '1, 2, 3, 4, 5' >>> f"Numbers: {l:, }" 'Numbers: 1, 2, 3, 4, 5' .. autoclasssumm:: DelimitedList :autosummary-sections: ;; .. versionadded:: 1.1.0 """ def __format__(self, format_spec: str) -> str: return format_spec.join([str(x) for x in self]) # pylint: disable=not-an-iterable def splitlines(string: str) -> List[Tuple[str, str]]: """ Split ``string`` into a list of two-element tuples, containing the line content and the newline character(s), if any. .. versionadded:: 3.2.0 :param string: :rtype: .. seealso:: :meth:`str.splitlines` and :func:`~.stringlist.joinlines` """ # noqa: D400 # Translated and adapted from https://github.com/python/cpython/blob/main/Objects/stringlib/split.h str_len: int = len(string) i: int = 0 j: int = 0 eol: int the_list: List[Tuple[str, str]] = [] while i < str_len: # Find a line and append it while i < str_len and string[i] not in "\n\r": i += 1 # Skip the line break reading CRLF as one line break eol = i if i < str_len: if (string[i] == '\r') and (i + 1 < str_len) and (string[i + 1] == '\n'): i += 2 else: i += 1 if j == 0 and eol == str_len and type(string) is str: # pylint: disable=unidiomatic-typecheck # No whitespace in string, so just use it as the_list[0] the_list.append((string, '')) break the_list.append((string[j:eol], string[eol:i])) j = i return the_list def joinlines(lines: List[Tuple[str, str]]) -> str: """ Given a list of two-element tuples, each containing a line and a newline character (or empty string), return a single string. .. versionadded:: 3.2.0 :param lines: :rtype: .. seealso:: :func:`~.stringlist.splitlines` """ # noqa: D400 return ''.join(chain.from_iterable(lines)) domdf_python_tools-3.10.0/domdf_python_tools/terminal.py000066400000000000000000000145671475315453000236300ustar00rootroot00000000000000#!/usr/bin/env python # # terminal.py """ Useful functions for terminal-based programs. .. versionchanged:: 2.0.0 :func:`domdf_python_tools.terminal.get_terminal_size` was removed. Use :func:`shutil.get_terminal_size` instead. """ # # Copyright © 2014-2020 Dominic Davis-Foster # # Parts of the docstrings based on the Python 3.8.2 Documentation # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # "Echo" based on ChemPy (https://github.com/bjodah/chempy) # | Copyright (c) 2015-2018, Björn Dahlgren # | All rights reserved. # | # | Redistribution and use in source and binary forms, with or without modification, # | are permitted provided that the following conditions are met: # | # | Redistributions of source code must retain the above copyright notice, this # | list of conditions and the following disclaimer. # | # | Redistributions in binary form must reproduce the above copyright notice, this # | list of conditions and the following disclaimer in the documentation and/or # | other materials provided with the distribution. # | # | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR # | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES # | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON # | ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # stdlib import inspect import os import pprint import textwrap from shutil import get_terminal_size from typing import IO, Optional # this package from domdf_python_tools.words import CR __all__ = [ "clear", "br", "interrupt", "overtype", "get_terminal_size", "Echo", ] def clear() -> None: """ Clears the display. Works for Windows and POSIX, but does not clear the Python Interpreter or PyCharm's Console. """ if os.name == "nt": # pragma: no cover (!Windows) os.system("cls") # nosec: B607,B605 else: # pragma: no cover (!Linux) print("\u001bc", end='') def br() -> None: """ Prints a blank line. """ print('') def interrupt() -> None: """ Print the key combination needed to abort the script; dynamic depending on OS. Useful when you have a long-running script that you might want to interrupt part way through. **Example:** .. code-block:: python >>> interrupt() (Press Ctrl-C to quit at any time) """ print(f"(Press Ctrl-{'C' if os.name == 'nt' else 'D'} to quit at any time)") def overtype( *objects, sep: str = ' ', end: str = '', file: Optional[IO] = None, flush: bool = False, ) -> None: r""" Print ``*objects`` to the text stream ``file``, starting with ``'\\r'``, separated by ``sep`` and followed by ``end``. All non-keyword arguments are converted to strings like :class:`str` does and written to the stream, separated by ``sep`` and followed by ``end``. If no objects are given, :func:`~.overtype` will just write ``"\\r"``. .. TODO:: This does not currently work in the PyCharm console, at least on Windows :param \*objects: A list of strings or string-like objects to write to the terminal. :param sep: The separator between values. :param end: The final value to print. :param file: An object with a ``write(string)`` method. If not present or :py:obj:`None`, :py:obj:`sys.stdout` will be used. :no-default file: :param flush: If :py:obj:`True` the stream is forcibly flushed after printing. """ # noqa: D400 object0 = f"{CR}{objects[0]}" objects = (object0, *objects[1:]) print(*objects, sep=sep, end=end, file=file, flush=flush) class Echo: """ Context manager for echoing variable assignments (in CPython). :param indent: The indentation of the dictionary of variable assignments. """ def __init__(self, indent: str = ' ' * 2): self.indent = indent frame = inspect.currentframe() if frame is None: # pragma: no cover raise ValueError("Unable to obtain the frame of the caller.") else: self.parent_frame = inspect.currentframe().f_back # type: ignore # TODO def __enter__(self): """ Called when entering the context manager. """ self.locals_on_entry = self.parent_frame.f_locals.copy() # type: ignore def __exit__(self, *args, **kwargs): """ Called when exiting the context manager. """ new_locals = { k: v for k, v in self.parent_frame.f_locals.items() # type: ignore if k not in self.locals_on_entry } print(textwrap.indent(pprint.pformat(new_locals), self.indent)) if __name__ == "__main__": # pragma: no cover size_x, size_y = get_terminal_size() print("width =", size_x, "height =", size_y) domdf_python_tools-3.10.0/domdf_python_tools/typing.py000066400000000000000000000163141475315453000233170ustar00rootroot00000000000000#!/usr/bin/env python # # typing.py """ Various type annotation aids. """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import os import pathlib from decimal import Decimal from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type, TypeVar, Union # 3rd party from typing_extensions import Protocol, runtime_checkable # this package import domdf_python_tools if TYPE_CHECKING or domdf_python_tools.__docs: # pragma: no cover # stdlib from json import JSONDecoder, JSONEncoder # 3rd party from pandas import DataFrame, Series Series.__module__ = "pandas" DataFrame.__module__ = "pandas" JSONDecoder.__module__ = "json" JSONEncoder.__module__ = "json" #: .. versionadded:: 1.0.0 FrameOrSeries = TypeVar("FrameOrSeries", "Series", "DataFrame") __all__ = [ "PathLike", "PathType", "AnyNumber", "WrapperDescriptorType", "MethodWrapperType", "MethodDescriptorType", "ClassMethodDescriptorType", "JsonLibrary", "HasHead", "String", "FrameOrSeries", "SupportsIndex", "SupportsLessThan", "SupportsLessEqual", "SupportsGreaterThan", "SupportsGreaterEqual", "check_membership", ] PathLike = Union[str, pathlib.Path, os.PathLike] """ Type hint for objects that represent filesystem paths. .. seealso:: :py:obj:`domdf_python_tools.typing.PathType` """ PathType = TypeVar("PathType", str, pathlib.Path, os.PathLike) """ Type variable for objects that represent filesystem paths. .. versionadded:: 2.2.0 .. seealso:: :py:obj:`domdf_python_tools.typing.PathLike` """ AnyNumber = Union[float, int, Decimal] """ Type hint for numbers. .. versionchanged:: 0.4.6 Moved from :mod:`domdf_python_tools.pagesizes` """ def check_membership(obj: Any, type_: Union[Type, object]) -> bool: r""" Check if the type of ``obj`` is one of the types in a :py:data:`typing.Union`, :class:`typing.Sequence` etc. :param obj: The object to check the type of :param type\_: A :class:`~typing.Type` that has members, such as a :class:`typing.List`, :py:data:`typing.Union` or :py:class:`typing.Sequence`. """ return isinstance(obj, type_.__args__) # type: ignore class JsonLibrary(Protocol): """ :class:`typing.Protocol` for libraries that implement the same API as :mod:`json`. Useful for annotating functions which take a JSON serialisation-deserialisation library as an argument. """ @staticmethod def dumps( obj: Any, *, skipkeys: bool = ..., ensure_ascii: bool = ..., check_circular: bool = ..., allow_nan: bool = ..., cls: Optional[Type["JSONEncoder"]] = ..., indent: Union[None, int, str] = ..., separators: Optional[Tuple[str, str]] = ..., default: Optional[Callable[[Any], Any]] = ..., sort_keys: bool = ..., **kwds: Any, ) -> str: """ Serialize ``obj`` to a JSON formatted :class:`str`. :param obj: :param skipkeys: :param ensure_ascii: :param check_circular: :param allow_nan: :param cls: :param indent: :param separators: :param default: :param sort_keys: :param kwds: """ @staticmethod def loads( s: Union[str, bytes], *, cls: Optional[Type["JSONDecoder"]] = ..., object_hook: Optional[Callable[[Dict[Any, Any]], Any]] = ..., parse_float: Optional[Callable[[str], Any]] = ..., parse_int: Optional[Callable[[str], Any]] = ..., parse_constant: Optional[Callable[[str], Any]] = ..., object_pairs_hook: Optional[Callable[[List[Tuple[Any, Any]]], Any]] = ..., **kwds: Any, ) -> Any: """ Deserialize ``s`` to a Python object. :param s: :param cls: :param object_hook: :param parse_float: :param parse_int: :param parse_constant: :param object_pairs_hook: :param kwds: :rtype: .. latex:clearpage:: """ # Backported from https://github.com/python/cpython/blob/master/Lib/types.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. WrapperDescriptorType = type(object.__init__) MethodWrapperType = type(object().__str__) MethodDescriptorType = type(str.join) ClassMethodDescriptorType = type(dict.__dict__["fromkeys"]) @runtime_checkable class String(Protocol): """ :class:`~typing.Protocol` for classes that implement ``__str__``. .. versionchanged:: 0.8.0 Moved from :mod:`domdf_python_tools.stringlist`. """ def __str__(self) -> str: ... @runtime_checkable class HasHead(Protocol): """ :class:`typing.Protocol` for classes that have a ``head`` method. This includes :class:`pandas.DataFrame` and :class:`pandas.Series`. .. versionadded:: 0.8.0 """ def head(self: "HasHead", n: int = 5) -> "HasHead": """ Return the first n rows. :param n: Number of rows to select. :return: The first n rows of the caller object. """ def to_string(self, *args, **kwargs) -> Optional[str]: """ Render the object to a console-friendly tabular output. """ # class SupportsLessThan(Protocol): # # def __lt__(self, other: Any) -> bool: # ... # pragma: no cover class SupportsIndex(Protocol): """ :class:`typing.Protocol` for classes that support ``__index__``. .. versionadded:: 2.0.0 """ def __index__(self) -> int: ... class SupportsLessThan(Protocol): """ :class:`typing.Protocol` for classes that support ``__lt__``. .. versionadded:: 3.0.0 """ def __lt__(self, __other: Any) -> bool: """ Return ``self < value``. """ class SupportsLessEqual(Protocol): """ :class:`typing.Protocol` for classes that support ``__le__``. .. versionadded:: 3.0.0 """ def __le__(self, __other: Any) -> bool: """ Return ``self <= value``. """ class SupportsGreaterThan(Protocol): """ :class:`typing.Protocol` for classes that support ``__gt__``. .. versionadded:: 3.0.0 """ def __gt__(self, __other: Any) -> bool: """ Return ``self > value``. """ class SupportsGreaterEqual(Protocol): """ :class:`typing.Protocol` for classes that support ``__ge__``. .. versionadded:: 3.0.0 """ def __ge__(self, __other: Any) -> bool: """ Return ``self >= value``. """ domdf_python_tools-3.10.0/domdf_python_tools/utils.py000066400000000000000000000414731475315453000231510ustar00rootroot00000000000000#!/usr/bin/env python # cython: language_level=3 # # utils.py """ General utility functions. .. versionchanged:: 1.0.0 * Removed ``tuple2str`` and ``list2string``. Use :func:`domdf_python_tools.utils.list2str` instead. * Removed ``as_text`` and ``word_join``. Import from :mod:`domdf_python_tools.words` instead. * Removed ``splitLen``. Use :func:`domdf_python_tools.iterative.split_len` instead. .. versionchanged:: 2.0.0 :func:`~domdf_python_tools.iterative.chunks`, :func:`~domdf_python_tools.iterative.permutations`, :func:`~domdf_python_tools.iterative.split_len`, :func:`~domdf_python_tools.iterative.Len`, and :func:`~domdf_python_tools.iterative.double_chain` moved to :func:`domdf_python_tools.iterative`. .. versionchanged:: 2.3.0 Removed :func:`domdf_python_tools.utils.deprecated`. Use the new `deprecation-alias `_ package instead. """ # # Copyright © 2018-2022 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # as_text from https://stackoverflow.com/a/40935194 # Copyright © 2016 User3759685 # Available under the MIT License # # strtobool based on the "distutils" module from CPython. # Some docstrings based on the Python documentation. # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import contextlib import inspect import json import re import sys from io import StringIO from math import log10 from pprint import pformat from types import MethodType from typing import ( IO, TYPE_CHECKING, Any, Callable, Dict, Iterable, Iterator, List, Optional, Pattern, Set, Tuple, TypeVar, Union, overload ) # this package import domdf_python_tools.words from domdf_python_tools.typing import HasHead, String, SupportsLessThan if TYPE_CHECKING or domdf_python_tools.__docs: # pragma: no cover # 3rd party from pandas import DataFrame, Series Series.__module__ = "pandas" DataFrame.__module__ = "pandas" _T = TypeVar("_T") SupportsLessThanT = TypeVar("SupportsLessThanT", bound=SupportsLessThan) __all__ = [ "pyversion", "SPACE_PLACEHOLDER", "cmp", "list2str", "printr", "printt", "stderr_writer", "printe", "str2tuple", "strtobool", "enquote_value", "posargs2kwargs", "convert_indents", "etc", "head", "magnitude", "trim_precision", "double_repr_string", "redirect_output", "divide", "redivide", "unique_sorted", "replace_nonprinting", ] #: The current major python version. pyversion: int = int(sys.version_info.major) # Python Version #: The ``â£`` character. SPACE_PLACEHOLDER = 'â£' def cmp(x, y) -> int: """ Implementation of ``cmp`` for Python 3. Compare the two objects x and y and return an integer according to the outcome. The return value is negative if ``x < y``, zero if ``x == y`` and strictly positive if ``x > y``. """ return int((x > y) - (x < y)) def list2str(the_list: Iterable[Any], sep: str = ',') -> str: """ Convert an iterable, such as a list, to a comma separated string. :param the_list: The iterable to convert to a string. :param sep: Separator to use for the string. :return: Comma separated string """ return sep.join([str(x) for x in the_list]) def printr( obj: Any, *values: object, sep: Optional[str] = ' ', end: Optional[str] = '\n', file: Optional[IO] = None, flush: bool = False, ) -> None: r""" Print the :func:`repr` of an object. If no objects are given, :func:`~.printr` will just write ``end``. :param obj: :param \*values: Additional values to print. These are printed verbatim. :param sep: The separator between values. :param end: The final value to print. Setting to ``''`` will leave the insertion point at the end of the printed text. :param file: The file to write to. If not present or :py:obj:`None`, :py:obj:`sys.stdout` will be used. :no-default file: :param flush: If :py:obj:`True` the stream is forcibly flushed after printing. """ print(repr(obj), *values, sep=sep, end=end, file=file, flush=flush) def printt( obj: Any, *values: object, sep: Optional[str] = ' ', end: Optional[str] = '\n', file: Optional[IO] = None, flush: bool = False, ) -> None: r""" Print the type of an object. If no objects are given, :func:`~.printt` will just write ``end``. :param obj: :param \*values: Additional values to print. These are printed verbatim. :param sep: The separator between values. :param end: The final value to print. Setting to ``''`` will leave the insertion point at the end of the printed text. :param file: The file to write to. If not present or :py:obj:`None`, :py:obj:`sys.stdout` will be used. :no-default file: :param flush: If :py:obj:`True` the stream is forcibly flushed after printing. """ print(type(obj), *values, sep=sep, end=end, file=file, flush=flush) def stderr_writer( *values: object, sep: Optional[str] = ' ', end: Optional[str] = '\n', ) -> None: r""" Print ``*values`` to :py:obj:`sys.stderr`, separated by ``sep`` and followed by ``end``. :py:obj:`sys.stdout` is flushed before printing, and :py:obj:`sys.stderr` is flushed afterwards. If no objects are given, :func:`~.stderr_writer` will just write ``end``. :param \*values: :param sep: The separator between values. :param end: The final value to print. Setting to ``''`` will leave the insertion point at the end of the printed text. :rtype: .. versionchanged:: 3.0.0 The only permitted keyword arguments are ``sep`` and ``end``. Previous versions allowed other keywords arguments supported by :func:`print` but they had no effect. """ sys.stdout.flush() print(*values, sep=sep, end=end, file=sys.stderr, flush=True) sys.stderr.flush() #: Alias of :func:`~.stderr_writer` printe = stderr_writer def str2tuple(input_string: str, sep: str = ',') -> Tuple[int, ...]: """ Convert a comma-separated string of integers into a tuple. .. latex:vspace:: -10px .. important:: The input string must represent a comma-separated series of integers. .. TODO:: Allow custom types, not just :class:`int` (making :class:`int` the default) .. latex:vspace:: -20px :param input_string: The string to be converted into a tuple :param sep: The separator in the string. """ return tuple(int(x) for x in input_string.split(sep)) def strtobool(val: Union[str, int]) -> bool: """ Convert a string representation of truth to :py:obj:`True` or :py:obj:`False`. If val is an integer then its boolean representation is returned. If val is a boolean it is returned as-is. :py:obj:`True` values are ``'y'``, ``'yes'``, ``'t'``, ``'true'``, ``'on'``, ``'1'``, and ``1``. :py:obj:`False` values are ``'n'``, ``'no'``, ``'f'``, ``'false'``, ``'off'``, ``'0'``, and ``0``. :raises: :py:exc:`ValueError` if ``val`` is anything else. """ if isinstance(val, int): return bool(val) val = val.lower() if val in {'y', "yes", 't', "true", "on", '1'}: return True elif val in {'n', "no", 'f', "false", "off", '0'}: return False else: raise ValueError(f"invalid truth value {val!r}") def enquote_value(value: Any) -> Union[str, bool, float]: """ Adds single quotes (``'``) to the given value, suitable for use in a templating system such as Jinja2. :class:`Floats `, :class:`integers `, :class:`booleans `, :py:obj:`None`, and the strings ``'True'``, ``'False'`` and ``'None'`` are returned as-is. :param value: The value to enquote """ if value in {"True", "False", "None", True, False, None}: return value elif isinstance(value, (int, float)): return value elif isinstance(value, str): return repr(value) else: return f"'{value}'" def posargs2kwargs( args: Iterable[Any], posarg_names: Union[Iterable[str], Callable], kwargs: Optional[Dict[str, Any]] = None, ) -> Dict[str, Any]: """ Convert the positional args in ``args`` to kwargs, based on the relative order of ``args`` and ``posarg_names``. .. important:: Python 3.8's Positional-Only Parameters (:pep:`570`) are not supported. .. versionadded:: 0.4.10 :param args: List of positional arguments provided to a function. :param posarg_names: Either a list of positional argument names for the function, or the function object. :param kwargs: Optional mapping of keyword argument names to values. The arguments will be added to this dictionary if provided. :default kwargs: ``{}`` :return: Dictionary mapping argument names to values. .. versionchanged:: 2.8.0 The "self" argument for bound methods is ignored. For unbound methods (which are just functions) the behaviour is unchanged. """ if kwargs is None: kwargs = {} self_arg = None if isinstance(posarg_names, MethodType): self_arg, *posarg_names = inspect.getfullargspec(posarg_names).args elif callable(posarg_names): posarg_names = inspect.getfullargspec(posarg_names).args for name, arg_value in zip(posarg_names, args): if name in kwargs: if isinstance(posarg_names, MethodType): raise TypeError(f"{posarg_names.__name__}(): got multiple values for argument '{name}'") else: raise TypeError(f"got multiple values for argument '{name}'") kwargs.update(zip(posarg_names, args)) if self_arg is not None and self_arg in kwargs: del kwargs[self_arg] # TODO: positional only arguments return kwargs def convert_indents(text: str, tab_width: int = 4, from_: str = '\t', to: str = ' ') -> str: r""" Convert indentation at the start of lines in ``text`` from tabs to spaces. :param text: The text to convert indents in. :param tab_width: The number of spaces per tab. :param from\_: The indent to convert from. :param to: The indent to convert to. """ output = [] tab = to * tab_width from_size = len(from_) for line in text.splitlines(): indent_count = 0 while line.startswith(from_): indent_count += 1 line = line[from_size:] output.append(f"{tab * indent_count}{line}") return '\n'.join(output) class _Etcetera(str): __slots__ = () def __new__(cls): return str.__new__(cls, "...") def __repr__(self) -> str: return str(self) etc = _Etcetera() """ Object that provides an ellipsis string .. versionadded:: 0.8.0 """ def head(obj: Union[Tuple, List, "DataFrame", "Series", String, HasHead], n: int = 10) -> Optional[str]: """ Returns the head of the given object. .. versionadded:: 0.8.0 :param obj: :param n: Show the first ``n`` items of ``obj``. .. seealso:: * :func:`textwrap.shorten`, which truncates a string to fit within a given number of characters. * :func:`itertools.islice`, which returns the first ``n`` elements from an iterator. """ if isinstance(obj, tuple) and hasattr(obj, "_fields"): # Likely a namedtuple if len(obj) <= n: return repr(obj) else: head_of_namedtuple = {k: v for k, v in zip(obj._fields[:n], obj[:n])} # type: ignore repr_fmt = '(' + ", ".join(f"{k}={v!r}" for k, v in head_of_namedtuple.items()) + f", {etc})" return obj.__class__.__name__ + repr_fmt elif isinstance(obj, (list, tuple)): if len(obj) > n: return pformat(obj.__class__((*obj[:n], etc))) else: return pformat(obj) elif isinstance(obj, HasHead): return obj.head(n).to_string() elif len(obj) <= n: # type: ignore return str(obj) else: return str(obj[:n]) + etc # type: ignore def magnitude(x: float) -> int: """ Returns the magnitude of the given value. * For negative numbers the absolute magnitude is returned. * For decimal numbers below ``1`` the magnitude will be negative. .. versionadded:: 2.0.0 :param x: Numerical value to find the magnitude of. """ if x > 0.0: return int(log10(x)) elif x < 0.0: return int(log10(abs(x))) else: return 0 def trim_precision(value: float, precision: int = 4) -> float: """ Trim the precision of the given floating point value. For example, if you have the value `170.10000000000002` but really only care about it being ``\u2248 179.1``: .. code-block:: python >>> trim_precision(170.10000000000002, 2) 170.1 >>> type(trim_precision(170.10000000000002, 2)) .. versionadded:: 2.0.0 :param value: :param precision: The number of decimal places to leave in the output. """ return float(format(value, f"0.{precision}f")) def double_repr_string(string: str) -> str: """ Like :func:`repr(str) `, but tries to use double quotes instead. .. versionadded:: 2.5.0 :param string: """ # figure out which quote to use; double is preferred if '"' in string and "'" not in string: return repr(string) else: return json.dumps(string, ensure_ascii=False) @contextlib.contextmanager def redirect_output(combine: bool = False) -> Iterator[Tuple[StringIO, StringIO]]: """ Context manager to redirect stdout and stderr to two :class:`io.StringIO` objects. These are assigned (as a :class:`tuple`) to the target the :keyword:`as` expression. Example: .. code-block:: python with redirect_output() as (stdout, stderr): ... .. versionadded:: 2.6.0 :param combine: If :py:obj:`True` ``stderr`` is combined with ``stdout``. """ if combine: stdout = stderr = StringIO() else: stdout = StringIO() stderr = StringIO() with contextlib.redirect_stdout(stdout), contextlib.redirect_stderr(stderr): yield stdout, stderr def divide(string: str, sep: str) -> Tuple[str, str]: """ Divide a string into two parts, about the given string. .. versionadded:: 2.7.0 :param string: :param sep: The separator to split at. """ if sep not in string: raise ValueError(f"{sep!r} not in {string!r}") parts = string.split(sep, 1) return tuple(parts) # type: ignore def redivide(string: str, pat: Union[str, Pattern]) -> Tuple[str, str]: """ Divide a string into two parts, splitting on the given regular expression. .. versionadded:: 2.7.0 :param string: :param pat: :rtype: .. latex:clearpage:: """ if isinstance(pat, str): pat = re.compile(pat) if not pat.search(string): raise ValueError(f"{pat!r} has no matches in {string!r}") parts = pat.split(string, 1) return tuple(parts) # type: ignore @overload def unique_sorted( elements: Iterable[SupportsLessThanT], *, key: None = ..., reverse: bool = ..., ) -> List[SupportsLessThanT]: ... @overload def unique_sorted( elements: Iterable[_T], *, key: Callable[[_T], SupportsLessThan], reverse: bool = ..., ) -> List[_T]: ... def unique_sorted( elements: Iterable, *, key: Optional[Callable] = None, reverse: bool = False, ) -> List: """ Returns an ordered list of unique items from ``elements``. .. versionadded:: 3.0.0 :param elements: :param key: A function of one argument used to extract a comparison key from each item when sorting. For example, :meth:`key=str.lower `. The default value is :py:obj:`None`, which will compare the elements directly. :param reverse: If :py:obj:`True` the list elements are sorted as if each comparison were reversed. .. seealso:: :class:`set` and :func:`sorted` """ return sorted(set(elements), key=key, reverse=reverse) def replace_nonprinting(string: str, exclude: Optional[Set[int]] = None) -> str: """ Replace nonprinting (control) characters in ``string`` with ``^`` and ``M-`` notation. .. versionadded:: 3.3.0 :param string: :param exclude: A set of codepoints to exclude. :rtype: .. seealso:: :wikipedia:`C0 and C1 control codes` on Wikipedia """ # https://stackoverflow.com/a/44952259 if exclude is None: exclude = set() translation_map = {} for codepoint in range(32): if codepoint not in exclude: translation_map[codepoint] = f"^{chr(64 + codepoint)}" if 127 not in exclude: translation_map[127] = "^?" for codepoint in range(128, 256): if codepoint not in exclude: translation_map[codepoint] = f"M+{chr(codepoint-64)}" return string.translate(translation_map) domdf_python_tools-3.10.0/domdf_python_tools/versions.py000066400000000000000000000174051475315453000236570ustar00rootroot00000000000000#!/usr/bin/env python # # versions.py """ NamedTuple-like class to represent a version number. .. versionadded:: 0.4.4 """ # # Copyright © 2020 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # stdlib import re from typing import Dict, Generator, Iterable, Sequence, Tuple, Type, TypeVar, Union # 3rd party from typing_extensions import final __all__ = ["Version"] _V = TypeVar("_V", bound="Version") @final class Version(Tuple[int, int, int]): """ NamedTuple-like class to represent a version number. :param major: .. versionchanged:: 1.4.0 Implemented the same interface as a :func:`collections.namedtuple`. """ __slots__ = () #: The major version number. major: int #: The minor version number. minor: int #: The patch version number. patch: int _fields: Tuple[str, str, str] = ("major", "minor", "patch") """ Tuple of strings listing the field names. Useful for introspection and for creating new named tuple types from existing named tuples. .. versionadded:: 1.4.0 """ _field_defaults: Dict[str, int] = {"major": 0, "minor": 0, "patch": 0} """ Dictionary mapping field names to default values. .. versionadded:: 1.4.0 """ @property # type: ignore def major(self): # noqa: D102 return self[0] @property # type: ignore def minor(self): # noqa: D102 return self[1] @property # type: ignore def patch(self): # noqa: D102 return self[2] def __new__(cls: Type[_V], major=0, minor=0, patch=0) -> _V: # noqa: D102 t: _V = super().__new__(cls, (int(major), int(minor), int(patch))) # type: ignore return t def __repr__(self) -> str: """ Return the representation of the version. """ repr_fmt = '(' + ", ".join(f"{name}=%r" for name in self._fields) + ')' return self.__class__.__name__ + repr_fmt % self def __str__(self) -> str: """ Return version as a string. """ return 'v' + '.'.join(str(x) for x in self) # pylint: disable=not-an-iterable def __float__(self) -> float: """ Return the major and minor version number as a float. """ return float('.'.join(str(x) for x in self[:2])) def __int__(self) -> int: """ Return the major version number as an integer. """ return self.major def __getnewargs__(self): """ Return Version as a plain tuple. Used by copy and pickle. """ return tuple(self) def __eq__(self, other) -> bool: """ Returns whether this version is equal to the other version. :type other: :class:`str`, :class:`float`, :class:`~.Version` """ other = _prep_for_eq(other) if other is NotImplemented: return NotImplemented # pragma: no cover else: shortest = min(len(self), (len(other))) return self[:shortest] == other[:shortest] def __gt__(self, other) -> bool: """ Returns whether this version is greater than the other version. :type other: :class:`str`, :class:`float`, :class:`~.Version` """ other = _prep_for_eq(other) if other is NotImplemented: return NotImplemented # pragma: no cover else: return tuple(self) > other def __lt__(self, other) -> bool: """ Returns whether this version is less than the other version. :type other: :class:`str`, :class:`float`, :class:`~.Version` """ other = _prep_for_eq(other) if other is NotImplemented: return NotImplemented # pragma: no cover else: return tuple(self) < other def __ge__(self, other) -> bool: """ Returns whether this version is greater than or equal to the other version. :type other: :class:`str`, :class:`float`, :class:`~.Version` """ other = _prep_for_eq(other) if other is NotImplemented: return NotImplemented # pragma: no cover else: return tuple(self)[:len(other)] >= other def __le__(self, other) -> bool: """ Returns whether this version is less than or equal to the other version. :type other: :class:`str`, :class:`float`, :class:`~.Version` """ other = _prep_for_eq(other) if other is NotImplemented: return NotImplemented # pragma: no cover else: return tuple(self)[:len(other)] <= other @classmethod def from_str(cls: Type[_V], version_string: str) -> _V: """ Create a :class:`~.Version` from a :class:`str`. :param version_string: The version number. :return: The created :class:`~domdf_python_tools.versions.Version`. """ return cls(*_iter_string(version_string)) @classmethod def from_tuple(cls: Type[_V], version_tuple: Tuple[Union[str, int], ...]) -> _V: """ Create a :class:`~.Version` from a :class:`tuple`. :param version_tuple: The version number. :return: The created :class:`~domdf_python_tools.versions.Version`. .. versionchanged:: 0.9.0 Tuples with more than three elements are truncated. Previously a :exc:`TypeError` was raised. """ return cls(*(int(x) for x in version_tuple[:3])) @classmethod def from_float(cls: Type[_V], version_float: float) -> _V: """ Create a :class:`~.Version` from a :class:`float`. :param version_float: The version number. :return: The created :class:`~domdf_python_tools.versions.Version`. """ return cls.from_str(str(version_float)) def _asdict(self) -> Dict[str, int]: """ Return a new dict which maps field names to their corresponding values. .. versionadded:: 1.4.0 """ return { "major": self.major, "minor": self.minor, "patch": self.patch, } def _replace(self: _V, **kwargs) -> _V: """ Return a new instance of the named tuple replacing specified fields with new values. .. versionadded:: 1.4.0 :param kwargs: """ return self.__class__(**{**self._asdict(), **kwargs}) @classmethod def _make(cls: Type[_V], iterable: Iterable[Union[str, int]]) -> _V: """ Class method that makes a new instance from an existing sequence or iterable. .. versionadded:: 1.4.0 :param iterable: """ return cls(*(int(x) for x in tuple(iterable)[:3])) def _iter_string(version_string: str) -> Generator[int, None, None]: """ Iterate over the version elements from a string. :param version_string: The version as a string. :return: Iterable elements of the version. """ return (int(x) for x in re.split("[.,]", version_string)) def _iter_float(version_float: float) -> Generator[int, None, None]: """ Iterate over the version elements from a float. :param version_float: The version as a float. :return: Iterable elements of the version. """ return _iter_string(str(version_float)) def _prep_for_eq(other: Union[str, float, Version], ) -> Tuple[int, ...]: """ Prepare 'other' for use in ``__eq__``, ``__le__``, ``__ge__``, ``__gt__``, and ``__lt__``. """ if isinstance(other, str): return tuple(_iter_string(other)) elif isinstance(other, (Version, Sequence)): return tuple(int(x) for x in other) elif isinstance(other, (int, float)): return tuple(_iter_float(other)) else: # pragma: no cover return NotImplemented domdf_python_tools-3.10.0/domdf_python_tools/words.py000066400000000000000000000524301475315453000231420ustar00rootroot00000000000000#!/usr/bin/env python # # words.py """ Functions for working with (English) words. .. versionadded:: 0.4.5 """ # # Copyright © 2020-2022 Dominic Davis-Foster # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. # IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, # DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR # OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE # OR OTHER DEALINGS IN THE SOFTWARE. # # List of 10000 English words from https://github.com/first20hours/google-10000-english/ # Derived from the Google Web Trillion Word Corpus, # as described by Thorsten Brants and Alex Franz, # and distributed by the Linguistic Data Consortium. # # Subsets of this corpus distributed by Peter Novig. # Corpus editing and cleanup by Josh Kaufman. # # stdlib import functools import random import re import sys from gettext import ngettext from reprlib import recursive_repr from string import ascii_lowercase, ascii_uppercase from typing import Any, Dict, Iterable, List, NamedTuple, Optional, Tuple # this package import domdf_python_tools from domdf_python_tools.compat import PYPY from domdf_python_tools.doctools import prettify_docstrings __all__ = [ "greek_uppercase", "greek_lowercase", "get_words_list", "get_random_word", "make_font", "Font", "SERIF_BOLD_LETTERS", "SERIF_ITALIC_LETTERS", "SERIF_BOLD_ITALIC_LETTERS", "SANS_SERIF_LETTERS", "SANS_SERIF_BOLD_LETTERS", "SANS_SERIF_ITALIC_LETTERS", "SANS_SERIF_BOLD_ITALIC_LETTERS", "SCRIPT_LETTERS", "FRAKTUR_LETTERS", "MONOSPACE_LETTERS", "DOUBLESTRUCK_LETTERS", "alpha_sort", "as_text", "word_join", "TAB", "CR", "LF", "Plural", "PluralPhrase", "truncate_string", ] ascii_digits = "0123456789" """ ASCII numbers. .. versionadded:: 0.7.0 """ greek_uppercase = "ΑΒΓΔΕΖΗΘΙΚΛΜÎΞΟΠΡϴΣΤΥΦΧΨΩ" """ Uppercase Greek letters. .. versionadded:: 0.7.0 """ greek_lowercase = "αβγδεζηθικλμνξοπÏςστυφχψω∂ϵϑϰϕϱϖ" """ Lowercase Greek letters. .. versionadded:: 0.7.0 """ @functools.lru_cache() def get_words_list(min_length: int = 0, max_length: int = -1) -> List[str]: """ Returns the list of words, optionally only those whose length is between ``min_length`` and ``max_length``. .. versionadded:: 0.4.5 :param min_length: The minimum length of the words to return :param max_length: The maximum length of the words to return. A value of ``-1`` indicates no upper limit. :no-default max_length: :return: The list of words meeting the above specifiers. """ # noqa: D400 # this package from domdf_python_tools.compat import importlib_resources words: str = importlib_resources.read_text("domdf_python_tools", "google-10000-english-no-swears.txt") words_list: List[str] = words.splitlines() if min_length > 0 or max_length != -1: if max_length == -1: words_list = [word for word in words_list if min_length <= len(word)] else: words_list = [word for word in words_list if min_length <= len(word) <= max_length] return words_list def get_random_word(min_length: int = 0, max_length: int = -1) -> str: """ Returns a random word, optionally only one whose length is between ``min_length`` and ``max_length``. .. versionadded:: 0.4.5 :param min_length: The minimum length of the words to return :param max_length: The maximum length of the words to return. A value of ``-1`` indicates no upper limit. :no-default max_length: :return: A random word meeting the above specifiers. """ # noqa: D400 words_list = get_words_list(min_length=min_length, max_length=max_length) return random.choice(words_list) # _default_unicode_sort_order: str = "".join(sorted(chr(i) for i in range(sys.maxunicode + 1))) def alpha_sort( iterable: Iterable[str], alphabet: Iterable[str], # = _default_unicode_sort_order reverse: bool = False, ) -> List[str]: """ Sorts a list of strings using a custom alphabet. .. versionadded:: 0.7.0 :param iterable: The strings to sort. :param alphabet: The custom alphabet to use for sorting. :param reverse: """ alphabet_ = list(alphabet) try: return sorted(iterable, key=lambda attr: [alphabet_.index(letter) for letter in attr], reverse=reverse) except ValueError as e: m = re.match(r"'(.*)' is not in list", str(e)) if m: raise ValueError(f"The character {m.group(1)!r} was not found in the alphabet.") from None else: # pragma: no cover raise e class Font(Dict[str, str]): """ Represents a Unicode pseudo-font. Mapping of ASCII letters to their equivalents in the pseudo-font. Individual characters can be converted using the :meth:`Font.get ` method or the ``getitem`` syntax. Entire strings can be converted by calling the :class:`~domdf_python_tools.words.Font` object and passing the string as the first argument. """ def __getitem__(self, char: str) -> str: """ Returns the given character in this font. If the character is not found in this font the character is returned unchanged. :param char: The character to convert. """ char = str(char) if char not in self: return str(char) else: return str(super().__getitem__(char)) def __call__(self, text: str) -> str: """ Returns the given text in this font. :param text: """ return ''.join(self[char] for char in text) def get(self, char: str, default: Optional[str] = None) -> str: # type: ignore """ Returns the given character in this font. If the character is not found in this font the character is returned unchanged or, if a value for ``default`` is provided, that is returned instead. :param char: The character to convert. :param default: Optional default value. """ if char not in self and default is not None: return str(default) else: return self[char] def make_font( uppers: Iterable[str], lowers: Iterable[str], digits: Optional[Iterable[str]] = None, greek_uppers: Optional[Iterable[str]] = None, greek_lowers: Optional[Iterable[str]] = None, ) -> Font: """ Returns a dictionary mapping ASCII alphabetical characters and digits to the Unicode equivalents in a different pseudo-font. .. versionadded:: 0.7.0 :param uppers: Iterable of uppercase letters (A-Z, 26 characters). :param lowers: Iterable of lowercase letters (a-z, 26 characters). :param digits: Optional iterable of digits (0-9). :param greek_uppers: Optional iterable of uppercase Greek letters (A-Ω, 25 characters). :param greek_lowers: Optional iterable of lowercase Greek letters (α-Ï–, 32 characters). """ # noqa: D400 font = Font({ **dict(zip(ascii_uppercase, uppers)), **dict(zip(ascii_lowercase, lowers)), }) if digits: font.update({char: unichar for char, unichar in zip(ascii_digits, digits)}) if greek_uppers: font.update({char: unichar for char, unichar in zip(greek_uppercase, greek_uppers)}) if greek_lowers: font.update({char: unichar for char, unichar in zip(greek_lowercase, greek_lowers)}) return font #: Bold Serif letters (uppercase) SERIF_BOLD_UPPER = "ð€ðð‚ðƒð„ð…ð†ð‡ðˆð‰ðŠð‹ðŒððŽððð‘ð’ð“ð”ð•ð–ð—ð˜ð™" #: Bold Serif letters (lowercase) SERIF_BOLD_LOWER = "ðšð›ðœððžðŸð ð¡ð¢ð£ð¤ð¥ð¦ð§ð¨ð©ðªð«ð¬ð­ð®ð¯ð°ð±ð²ð³" #: Bold Serif digits SERIF_BOLD_DIGITS = "ðŸŽðŸðŸðŸ‘ðŸ’ðŸ“ðŸ”ðŸ•ðŸ–ðŸ—" #: Bold Serif Greek letters (uppercase) SERIF_BOLD_GREEK_UPPER = "ðš¨ðš©ðšªðš«ðš¬ðš­ðš®ðš¯ðš°ðš±ðš²ðš³ðš´ðšµðš¶ðš·ðš¸ðš¹ðšºðš»ðš¼ðš½ðš¾ðš¿ð›€" #: Bold Serif Greek letters (lowercase) SERIF_BOLD_GREEK_LOWER = "ð›‚ð›ƒð›„ð›…ð›†ð›‡ð›ˆð›‰ð›Šð›‹ð›Œð›ð›Žð›ð›ð›‘ð›’ð›“ð›”ð›•ð›–ð›—ð›˜ð›™ð›šð››ð›œð›ð›žð›Ÿð› ð›¡" SERIF_BOLD_LETTERS = make_font( uppers=SERIF_BOLD_UPPER, lowers=SERIF_BOLD_LOWER, digits=SERIF_BOLD_DIGITS, greek_uppers=SERIF_BOLD_GREEK_UPPER, greek_lowers=SERIF_BOLD_GREEK_LOWER, ) """ Bold Serif :class:`~domdf_python_tools.words.Font`. This font includes numbers and Greek letters. .. versionadded:: 0.7.0 """ #: Italic Serif letters (uppercase) SERIF_ITALIC_UPPER = "ð´ðµð¶ð·ð¸ð¹ðºð»ð¼ð½ð¾ð¿ð‘€ð‘ð‘‚ð‘ƒð‘„ð‘…ð‘†ð‘‡ð‘ˆð‘‰ð‘Šð‘‹ð‘Œð‘" #: Italic Serif letters (lowercase) SERIF_ITALIC_LOWER = "ð‘Žð‘ð‘ð‘‘ð‘’ð‘“ð‘”ℎð‘–ð‘—ð‘˜ð‘™ð‘šð‘›ð‘œð‘ð‘žð‘Ÿð‘ ð‘¡ð‘¢ð‘£ð‘¤ð‘¥ð‘¦ð‘§" #: Italic Serif Greek letters (uppercase) SERIF_ITALIC_GREEK_UPPER = "ð›¢ð›£ð›¤ð›¥ð›¦ð›§ð›¨ð›©ð›ªð›«ð›¬ð›­ð›®ð›¯ð›°ð›±ð›²ð›³ð›´ð›µð›¶ð›·ð›¸ð›¹ð›ºð›»" #: Italic Serif Greek letters (lowercase) SERIF_ITALIC_GREEK_LOWER = "ð›¼ð›½ð›¾ð›¿ðœ€ðœðœ‚ðœƒðœ„ðœ…ðœ†ðœ‡ðœˆðœ‰ðœŠðœ‹ðœŒðœðœŽðœðœðœ‘ðœ’ðœ“ðœ”ðœ•ðœ–ðœ—ðœ˜ðœ™ðœšðœ›" SERIF_ITALIC_LETTERS = make_font( uppers=SERIF_ITALIC_UPPER, lowers=SERIF_ITALIC_LOWER, greek_uppers=SERIF_ITALIC_GREEK_UPPER, greek_lowers=SERIF_ITALIC_GREEK_LOWER, ) """ Italic Serif :class:`~domdf_python_tools.words.Font`. This font includes Greek letters. .. versionadded:: 0.7.0 """ #: Bold and Italic Serif letters (uppercase) SERIF_BOLD_ITALIC_UPPER = "ð‘¨ð‘©ð‘ªð‘«ð‘¬ð‘­ð‘®ð‘¯ð‘°ð‘±ð‘²ð‘³ð‘´ð‘µð‘¶ð‘·ð‘¸ð‘¹ð‘ºð‘»ð‘¼ð‘½ð‘¾ð‘¿ð’€ð’" #: Bold and Italic Serif letters (lowercase) SERIF_BOLD_ITALIC_LOWER = "ð’‚ð’ƒð’„ð’…ð’†ð’‡ð’ˆð’‰ð’Šð’‹ð’Œð’ð’Žð’ð’ð’‘ð’’ð’“ð’”ð’•ð’–ð’—ð’˜ð’™ð’šð’›" #: Bold and Italic Serif Greek letters (uppercase) SERIF_BOLD_ITALIC_GREEK_UPPER = "ðœœðœðœžðœŸðœ ðœ¡ðœ¢ðœ£ðœ¤ðœ¥ðœ¦ðœ§ðœ¨ðœ©ðœªðœ«ðœ¬ðœ­ðœ®ðœ¯ðœ°ðœ±ðœ²ðœ³ðœ´ðœµ" #: Bold and Italic Serif Greek letters (lowercase) SERIF_BOLD_ITALIC_GREEK_LOWER = "ðœ¶ðœ·ðœ¸ðœ¹ðœºðœ»ðœ¼ðœ½ðœ¾ðœ¿ð€ðð‚ðƒð„ð…ð†ð‡ðˆð‰ðŠð‹ðŒððŽððð‘ð’ð“ð”ð•" SERIF_BOLD_ITALIC_LETTERS = make_font( uppers=SERIF_BOLD_ITALIC_UPPER, lowers=SERIF_BOLD_ITALIC_LOWER, greek_uppers=SERIF_BOLD_ITALIC_GREEK_UPPER, greek_lowers=SERIF_BOLD_ITALIC_GREEK_LOWER, ) """ Bold and Italic Serif :class:`~domdf_python_tools.words.Font`. This font includes Greek letters. .. versionadded:: 0.7.0 """ #: Normal Sans-Serif letters (uppercase) SANS_SERIF_UPPER = "ð– ð–¡ð–¢ð–£ð–¤ð–¥ð–¦ð–§ð–¨ð–©ð–ªð–«ð–¬ð–­ð–®ð–¯ð–°ð–±ð–²ð–³ð–´ð–µð–¶ð–·ð–¸ð–¹" #: Normal Sans-Serif letters (lowercase) SANS_SERIF_LOWER = "ð–ºð–»ð–¼ð–½ð–¾ð–¿ð—€ð—ð—‚ð—ƒð—„ð—…ð—†ð—‡ð—ˆð—‰ð—Šð—‹ð—Œð—ð—Žð—ð—ð—‘ð—’ð—“" #: Normal Sans-Serif digits SANS_SERIF_DIGITS = "ðŸ¢ðŸ£ðŸ¤ðŸ¥ðŸ¦ðŸ§ðŸ¨ðŸ©ðŸªðŸ«" SANS_SERIF_LETTERS = make_font( uppers=SANS_SERIF_UPPER, lowers=SANS_SERIF_LOWER, digits=SANS_SERIF_DIGITS, ) """ Normal Sans-Serif :class:`~domdf_python_tools.words.Font`. This font includes numbers. .. versionadded:: 0.7.0 """ #: Bold Sans-Serif letters (uppercase) SANS_SERIF_BOLD_UPPER = "ð—”ð—•ð—–ð——ð—˜ð—™ð—šð—›ð—œð—ð—žð—Ÿð— ð—¡ð—¢ð—£ð—¤ð—¥ð—¦ð—§ð—¨ð—©ð—ªð—«ð—¬ð—­" #: Bold Sans-Serif letters (lowercase) SANS_SERIF_BOLD_LOWER = "ð—®ð—¯ð—°ð—±ð—²ð—³ð—´ð—µð—¶ð—·ð—¸ð—¹ð—ºð—»ð—¼ð—½ð—¾ð—¿ð˜€ð˜ð˜‚ð˜ƒð˜„ð˜…ð˜†ð˜‡" #: Bold Sans-Serif digits SANS_SERIF_BOLD_DIGITS = "ðŸ¬ðŸ­ðŸ®ðŸ¯ðŸ°ðŸ±ðŸ²ðŸ³ðŸ´ðŸµ" SANS_SERIF_BOLD_LETTERS = make_font( uppers=SANS_SERIF_BOLD_UPPER, lowers=SANS_SERIF_BOLD_LOWER, digits=SANS_SERIF_BOLD_DIGITS, ) """ Bold Sans-Serif :class:`~domdf_python_tools.words.Font`. This font includes numbers. .. versionadded:: 0.7.0 """ #: Italic Sans-Serif letters (uppercase) SANS_SERIF_ITALIC_UPPER = "ð˜ˆð˜‰ð˜Šð˜‹ð˜Œð˜ð˜Žð˜ð˜ð˜‘ð˜’ð˜“ð˜”ð˜•ð˜–ð˜—ð˜˜ð˜™ð˜šð˜›ð˜œð˜ð˜žð˜Ÿð˜ ð˜¡" #: Italic Sans-Serif letters (lowercase) SANS_SERIF_ITALIC_LOWER = "ð˜¢ð˜£ð˜¤ð˜¥ð˜¦ð˜§ð˜¨ð˜©ð˜ªð˜«ð˜¬ð˜­ð˜®ð˜¯ð˜°ð˜±ð˜²ð˜³ð˜´ð˜µð˜¶ð˜·ð˜¸ð˜¹ð˜ºð˜»" SANS_SERIF_ITALIC_LETTERS = make_font( uppers=SANS_SERIF_ITALIC_UPPER, lowers=SANS_SERIF_ITALIC_LOWER, ) """ Italic Sans-Serif :class:`~domdf_python_tools.words.Font`. .. versionadded:: 0.7.0 """ #: Bold and Italic Sans-Serif letters (uppercase) SANS_SERIF_BOLD_ITALIC_UPPER = "ð˜¼ð˜½ð˜¾ð˜¿ð™€ð™ð™‚ð™ƒð™„ð™…ð™†ð™‡ð™ˆð™‰ð™Šð™‹ð™Œð™ð™Žð™ð™ð™‘ð™’ð™“ð™”ð™•" #: Bold and Italic Sans-Serif letters (lowercase) SANS_SERIF_BOLD_ITALIC_LOWER = "ð™–ð™—ð™˜ð™™ð™šð™›ð™œð™ð™žð™Ÿð™ ð™¡ð™¢ð™£ð™¤ð™¥ð™¦ð™§ð™¨ð™©ð™ªð™«ð™¬ð™­ð™®ð™¯" #: Bold and Italic Sans-Serif letters (uppercase) SANS_SERIF_BOLD_ITALIC_GREEK_UPPER = "ðžðž‘ðž’ðž“ðž”ðž•ðž–ðž—ðž˜ðž™ðžšðž›ðžœðžðžžðžŸðž ðž¡ðž¢ðž£ðž¤ðž¥ðž¦ðž§ðž¨ðž©" #: Bold and Italic Sans-Serif letters (lowercase) SANS_SERIF_BOLD_ITALIC_GREEK_LOWER = "ðžªðž«ðž¬ðž­ðž®ðž¯ðž°ðž±ðž²ðž³ðž´ðžµðž¶ðž·ðž¸ðž¹ðžºðž»ðž¼ðž½ðž¾ðž¿ðŸ€ðŸðŸ‚ðŸƒðŸ„ðŸ…ðŸ†ðŸ‡ðŸˆðŸ‰" SANS_SERIF_BOLD_ITALIC_LETTERS = make_font( uppers=SANS_SERIF_BOLD_ITALIC_UPPER, lowers=SANS_SERIF_BOLD_ITALIC_LOWER, greek_uppers=SANS_SERIF_BOLD_ITALIC_GREEK_UPPER, greek_lowers=SANS_SERIF_BOLD_ITALIC_GREEK_LOWER, ) """ Bold and Italic Sans-Serif :class:`~domdf_python_tools.words.Font`. This font includes Greek letters. .. versionadded:: 0.7.0 """ #: Script letters (uppercase) SCRIPT_UPPER = "ð“ð“‘ð“’ð““ð“”ð“•ð“–ð“—ð“˜ð“™ð“šð“›ð“œð“ð“žð“Ÿð“ ð“¡ð“¢ð“£ð“¤ð“¥ð“¦ð“§ð“¨ð“©" #: Script letters (lowercase) SCRIPT_LOWER = "ð“ªð“«ð“¬ð“­ð“®ð“¯ð“°ð“±ð“²ð“³ð“´ð“µð“¶ð“·ð“¸ð“¹ð“ºð“»ð“¼ð“½ð“¾ð“¿ð”€ð”ð”‚ð”ƒ" SCRIPT_LETTERS = make_font(SCRIPT_UPPER, SCRIPT_LOWER) """ Script :class:`~domdf_python_tools.words.Font`. .. versionadded:: 0.7.0 """ #: Fraktur letters (uppercase) FRAKTUR_UPPER = "ð•¬ð•­ð•®ð•¯ð•°ð•±ð•²ð•³ð•´ð•µð•¶ð•·ð•¸ð•¹ð•ºð•»ð•¼ð•½ð•¾ð•¿ð–€ð–ð–‚ð–ƒð–„ð–…" #: Fraktur letters (lowercase) FRAKTUR_LOWER = "ð–†ð–‡ð–ˆð–‰ð–Šð–‹ð–Œð–ð–Žð–ð–ð–‘ð–’ð–“ð–”ð–•ð––ð–—ð–˜ð–™ð–šð–›ð–œð–ð–žð–Ÿ" FRAKTUR_LETTERS = make_font(FRAKTUR_UPPER, FRAKTUR_LOWER) """ Fraktur :class:`~domdf_python_tools.words.Font`. .. versionadded:: 0.7.0 """ #: Monospace letters (uppercase) MONOSPACE_UPPER = "ð™°ð™±ð™²ð™³ð™´ð™µð™¶ð™·ð™¸ð™¹ð™ºð™»ð™¼ð™½ð™¾ð™¿ðš€ðšðš‚ðšƒðš„ðš…ðš†ðš‡ðšˆðš‰" #: Monospace letters (lowercase) MONOSPACE_LOWER = "ðšŠðš‹ðšŒðšðšŽðšðšðš‘ðš’ðš“ðš”ðš•ðš–ðš—ðš˜ðš™ðššðš›ðšœðšðšžðšŸðš ðš¡ðš¢ðš£" #: Monospace digits MONOSPACE_DIGITS = "ðŸ¶ðŸ·ðŸ¸ðŸ¹ðŸºðŸ»ðŸ¼ðŸ½ðŸ¾ðŸ¿" MONOSPACE_LETTERS = make_font(MONOSPACE_UPPER, MONOSPACE_LOWER, MONOSPACE_DIGITS) """ Monospace :class:`~domdf_python_tools.words.Font`. This font includes numbers. .. versionadded:: 0.7.0 """ #: Doublestruck letters (uppercase) DOUBLESTRUCK_UPPER = "ð”¸ð”¹â„‚ð”»ð”¼ð”½ð”¾â„ð•€ð•ð•‚ð•ƒð•„â„•ð•†â„™â„šâ„ð•Šð•‹ð•Œð•ð•Žð•ð•ℤ" #: Doublestruck letters (lowercase) DOUBLESTRUCK_LOWER = "ð•’ð•“ð•”ð••ð•–ð•—ð•˜ð•™ð•šð•›ð•œð•ð•žð•Ÿð• ð•¡ð•¢ð•£ð•¤ð•¥ð•¦ð•§ð•¨ð•©ð•ªð•«" #: Doublestruck digits DOUBLESTRUCK_DIGITS = "ðŸ˜ðŸ™ðŸšðŸ›ðŸœðŸðŸžðŸŸðŸ ðŸ¡" DOUBLESTRUCK_LETTERS = make_font(DOUBLESTRUCK_UPPER, DOUBLESTRUCK_LOWER, DOUBLESTRUCK_DIGITS) """ Doublestruck :class:`~domdf_python_tools.words.Font`. This font includes numbers. .. versionadded:: 0.7.0 """ def as_text(value: Any) -> str: """ Convert the given value to a string. :py:obj:`None` is converted to ``''``. :param value: The value to convert to a string. :rtype: .. versionchanged:: 0.8.0 Moved from :mod:`domdf_python_tools.utils`. """ if value is None: return '' return str(value) def word_join( iterable: Iterable[str], use_repr: bool = False, oxford: bool = False, delimiter: str = ',', connective: str = "and", ) -> str: """ Join the given list of strings in a natural manner, with 'and' to join the last two elements. :param iterable: :param use_repr: Whether to join the ``repr`` of each object. :param oxford: Whether to use an oxford comma when joining the last two elements. :default oxford: :py:obj:`False`. Always :py:obj:`False` if there are fewer than three elements :param delimiter: A single character to use between the words. :param connective: The connective to join the final two words with. :rtype: .. versionchanged:: 0.11.0 Added ``delimiter`` and ``connective`` arguments. """ delimiter = f"{delimiter} " if use_repr: words = [repr(w) for w in iterable] else: words = list(iterable) if len(words) == 0: return '' elif len(words) == 1: return words[0] elif len(words) == 2: return f" {connective} ".join(words) else: if oxford: return delimiter.join(words[:-1]) + f"{delimiter}{connective} {words[-1]}" else: return delimiter.join(words[:-1]) + f" {connective} {words[-1]}" TAB = '\t' """ A literal ``TAB`` (``\\t``) character for use in f-strings. .. versionadded:: 1.3.0 """ CR = '\r' """ The carriage return character (``\\r``) for use in f-strings. .. versionadded:: 1.3.0 """ LF = '\n' """ The newline character (``\\n``) for use in f-strings. .. versionadded:: 1.3.0 """ _docs = domdf_python_tools.__docs @prettify_docstrings class Plural(functools.partial): """ Represents a word as its singular and plural. .. versionadded:: 2.0.0 :param singular: The singular form of the word. :param plural: The plural form of the word. .. code-block:: python >>> cow = Plural("cow", "cows") >>> n = 1 >>> print(f"The farmer has {n} {cow(n)}.") The farmer has 1 cow. >>> n = 2 >>> print(f"The farmer has {n} {cow(n)}.") The farmer has 2 cows. >>> n = 3 >>> print(f"The farmer has {n} {cow(n)}.") The farmer has 3 cows. """ if _docs: # pragma: no cover def __init__(self, singular: str, plural: str): pass def __call__(self, n: int) -> str: # type: ignore """ Returns either the singular or plural form of the word depending on the value of ``n``. :param n: """ # if PYPY: # pragma: no cover (!PyPy) if PYPY and sys.version_info < (3, 9): # pragma: no cover (!PyPy) def __init__(self, singular: str, plural: str): super().__init__(ngettext, singular, plural) # type: ignore[call-arg] else: # pragma: no cover (!CPython) def __new__(cls, singular: str, plural: str): # noqa: D102 return functools.partial.__new__(cls, ngettext, singular, plural) @recursive_repr() def __repr__(self): qualname = type(self).__qualname__ args: List[str] = [] args.extend(repr(x) for x in self.args) args.extend(f"{k}={v!r}" for (k, v) in self.keywords.items()) return f"{qualname}({', '.join(args)})" @prettify_docstrings class PluralPhrase(NamedTuple): """ Represents a phrase which varies depending on a numerical count. .. versionadded:: 3.3.0 :param template: The phrase template. :param words: The words to insert into the template. For example, consider the phase:: The proposed changes are to ... The "phrase template" would be: .. code-block:: python "The proposed {} {} to ..." and the two words to insert are: .. code-block:: python Plural("change", "changes") Plural("is", "are") The phrase is constructed as follows: .. code-block:: python >>> phrase = PluralPhrase( ... "The proposed {} {} to ...", ... (Plural("change", "changes"), Plural("is", "are")) ... ) >>> phrase(1) 'The proposed change is to ...' >>> phrase(2) 'The proposed changes are to ...' The phrase template can use any `valid syntax`_ for :meth:`str.format`, except for keyword arguments. The exception if the keyword ``n``, which is replaced with the count (e.g. ``2``) passed in when the phrase is constructed. For example: .. code-block:: python >>> phrase2 = PluralPhrase("The farmer has {n} {0}.", (Plural("cow", "cows"), )) >>> phrase2(2) 'The farmer has 2 cows.' .. _valid syntax: https://docs.python.org/3/library/string.html#formatstrings """ template: str words: Tuple[Plural, ...] def __call__(self, n: int) -> str: # noqa: TYP004 # TODO """ Construct the phrase based on the value of ``n``. :param n: """ plural_words = [x(n) for x in self.words] return self.template.format(*plural_words, n=n) @functools.lru_cache() def _slice_end(max_length: int, ending: str = "...") -> slice: slice_end = max_length - len(ending) return slice(slice_end) def truncate_string(string: str, max_length: int, ending: str = "...") -> str: """ Truncate a string to ``max_length`` characters, and put ``ending`` on the end. The truncated string is further truncated by the length of ``ending`` so the returned string is no more then ``max_length``. .. versionadded:: 3.3.0 :param string: :param max_length: :param ending: """ string_length = len(string) if string_length > max_length: return string[_slice_end(max_length, ending)] + ending else: return string domdf_python_tools-3.10.0/formate.toml000066400000000000000000000021651475315453000200520ustar00rootroot00000000000000[hooks] dynamic_quotes = 10 collections-import-rewrite = 20 reformat-generics = 40 noqa-reformat = 60 ellipsis-reformat = 70 squish_stubs = 80 [hooks.yapf] priority = 30 [hooks.yapf.kwargs] yapf_style = ".style.yapf" [hooks.isort] priority = 50 [hooks.isort.kwargs] indent = " " multi_line_output = 8 import_heading_stdlib = "stdlib" import_heading_thirdparty = "3rd party" import_heading_firstparty = "this package" import_heading_localfolder = "this package" balanced_wrapping = false lines_between_types = 0 use_parentheses = true remove_redundant_aliases = true default_section = "THIRDPARTY" known_third_party = [ "click", "coincidence", "consolekit", "coverage", "coverage_pyver_pragma", "faker", "flake8", "funcy", "github", "importlib_metadata", "importlib_resources", "natsort", "pandas", "pydash", "pytest", "pytest_cov", "pytest_randomly", "pytest_regressions", "pytest_rerunfailures", "pytest_timeout", "pytz", "requests", "typing_extensions", ] known_first_party = [ "domdf_python_tools",] [config] indent = " " line_length = 115 domdf_python_tools-3.10.0/justfile000066400000000000000000000005731475315453000172710ustar00rootroot00000000000000default: lint pdf-docs: latex-docs make -C doc-source/build/latex/ latex-docs: SPHINX_BUILDER=latex tox -e docs unused-imports: tox -e lint -- --select F401 incomplete-defs: tox -e lint -- --select MAN vdiff: git diff $(repo-helper show version -q)..HEAD bare-ignore: greppy '# type:? *ignore(?!\[|\w)' -s lint: unused-imports incomplete-defs bare-ignore tox -n qa domdf_python_tools-3.10.0/pyproject.toml000066400000000000000000000124151475315453000204330ustar00rootroot00000000000000[build-system] requires = [ "hatch-requirements-txt",] build-backend = "hatchling.build" [project] name = "domdf_python_tools" version = "3.10.0" description = "Helpful functions for Python ðŸâ€‚🛠ï¸" readme = "README.rst" requires-python = ">=3.6" keywords = [ "utilities",] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", ] dynamic = [ "dependencies",] [project.license] file = "LICENSE" [[project.authors]] name = "Dominic Davis-Foster" email = "dominic@davis-foster.co.uk" [project.urls] Homepage = "https://github.com/domdfcoding/domdf_python_tools" "Issue Tracker" = "https://github.com/domdfcoding/domdf_python_tools/issues" "Source Code" = "https://github.com/domdfcoding/domdf_python_tools" Documentation = "https://domdf-python-tools.readthedocs.io/en/latest" [project.optional-dependencies] dates = [ "pytz>=2019.1",] testing = [] all = [ "pytz>=2019.1",] [tool.mkrecipe] conda-channels = [ "conda-forge", "domdfcoding",] extras = [ "dates",] license-key = "MIT" [tool.sphinx-pyproject] github_username = "domdfcoding" github_repository = "domdf_python_tools" author = "Dominic Davis-Foster" project = "domdf-python-tools" copyright = "2019-2022 Dominic Davis-Foster" language = "en" package_root = "domdf_python_tools" extensions = [ "sphinx_toolbox", "sphinx_toolbox.more_autodoc", "sphinx_toolbox.more_autosummary", "sphinx_toolbox.documentation_summary", "sphinx_toolbox.tweaks.param_dash", "sphinxcontrib.toctree_plus", "sphinx_toolbox.tweaks.latex_layout", "sphinx_toolbox.tweaks.latex_toc", "sphinx.ext.intersphinx", "sphinx.ext.mathjax", "sphinxcontrib.extras_require", "sphinx.ext.todo", "notfound.extension", "sphinx_copybutton", "sphinxcontrib.default_values", "sphinx_debuginfo", "sphinx_licenseinfo", "seed_intersphinx_mapping", "html_section", "sphinx_autofixture", "sphinx_highlights", "sphinx_toolbox.more_autosummary.column_widths", "sphinx_toolbox.latex.succinct_seealso", "latex_unicode", ] gitstamp_fmt = "%d %b %Y" templates_path = [ "_templates",] html_static_path = [ "_static",] source_suffix = ".rst" master_doc = "index" suppress_warnings = [ "image.nonlocal_uri",] pygments_style = "default" html_theme = "domdf_sphinx_theme" html_theme_path = [ "../..",] html_show_sourcelink = true toctree_plus_types = [ "class", "confval", "data", "directive", "enum", "exception", "flag", "function", "namedtuple", "protocol", "role", "typeddict", ] add_module_names = false hide_none_rtype = true all_typevars = true overloads_location = "bottom" html_codeblock_linenos_style = "table" autodoc_exclude_members = [ "__dict__", "__class__", "__dir__", "__weakref__", "__module__", "__annotations__", "__orig_bases__", "__parameters__", "__subclasshook__", "__init_subclass__", "__attrs_attrs__", "__init__", "__new__", "__getnewargs__", "__abstractmethods__", "__hash__", ] [tool.whey] base-classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Topic :: Software Development :: Libraries :: Python Modules", "Typing :: Typed", ] python-versions = [ "3.6", "3.7", "3.8", "3.9", "3.10", "3.11", "3.12",] python-implementations = [ "CPython", "PyPy",] platforms = [ "Windows", "macOS", "Linux",] license-key = "MIT" additional-files = [ "include domdf_python_tools/google-10000-english-no-swears.txt",] [tool.mypy] python_version = "3.8" namespace_packages = true check_untyped_defs = true warn_unused_ignores = true no_implicit_optional = true show_error_codes = true [tool.snippet-fmt] directives = [ "code-block",] [tool.snippet-fmt.languages.python] reformat = true [tool.snippet-fmt.languages.TOML] reformat = true [tool.snippet-fmt.languages.ini] [tool.snippet-fmt.languages.json] [tool.setuptools] zip-safe = false include-package-data = true platforms = [ "Windows", "macOS", "Linux",] [tool.dependency-dash."requirements.txt"] order = 10 [tool.dependency-dash."tests/requirements.txt"] order = 20 include = false [tool.dependency-dash."doc-source/requirements.txt"] order = 30 include = false [tool.hatch.build] exclude = [ "/*", "!/domdf_python_tools", "!/domdf_python_tools/**/requirements.txt", "!/requirements.txt", "tests", "doc-source", ] [tool.hatch.build.sdist] include = [ "domdf_python_tools", "requirements.txt",] [tool.hatch.build.wheel] include = [ "domdf_python_tools",] [tool.hatch.metadata.hooks.requirements_txt] files = [ "requirements.txt",] domdf_python_tools-3.10.0/repo_helper.yml000066400000000000000000000034101475315453000205410ustar00rootroot00000000000000--- modname: domdf_python_tools copyright_years: "2019-2022" author: "Dominic Davis-Foster" email: "dominic@davis-foster.co.uk" version: "3.10.0" username: "domdfcoding" license: 'MIT' short_desc: 'Helpful functions for Python ðŸâ€‚🛠ï¸' python_deploy_version: 3.8 requires_python: 3.6 min_coverage: 95 tox_testenv_extras: all pre_commit_exclude: "^domdf_python_tools/compat/importlib_resources.py$" docs_fail_on_warning: true use_hatch: true conda_channels: - conda-forge python_versions: 3.6: 3.7: 3.8: 3.9: "3.10": "3.11": "3.12": "3.13-dev": pypy36: pypy37: pypy38: pypy39: pypy310: classifiers: - 'Development Status :: 5 - Production/Stable' - 'Intended Audience :: Developers' - 'Topic :: Software Development :: Libraries :: Python Modules' extras_require: dates: - pytz>=2019.1 testing: [] conda_extras: - dates keywords: - utilities manifest_additional: - "include domdf_python_tools/google-10000-english-no-swears.txt" sphinx_conf_epilogue: - manpages_url = "https://manpages.debian.org/{path}" - toctree_plus_types.add("fixture") - latex_elements["preamble"] = "\\usepackage{textgreek}\\usepackage{multicol}" - needspace_amount = r"5\baselineskip" intersphinx_mapping: - "'pandas': ('https://pandas.pydata.org/docs/', None)" - "'consolekit': ('https://consolekit.readthedocs.io/en/latest/', None)" - "'pytest': ('https://docs.pytest.org/en/stable', None)" - "'pytest-regressions': ('https://pytest-regressions.readthedocs.io/en/latest/', None)" mypy_deps: - pprint36 extra_sphinx_extensions: - sphinx_autofixture - sphinx_highlights - sphinx_toolbox.more_autosummary.column_widths - sphinx_toolbox.latex.succinct_seealso - latex_unicode tox_unmanaged: - testenv additional_ignore: - demo.py - frozendict.py - graaltest.sh domdf_python_tools-3.10.0/requirements.txt000066400000000000000000000002171475315453000210000ustar00rootroot00000000000000importlib-metadata>=3.6.0; python_version < "3.9" importlib-resources>=3.0.0; python_version < "3.9" natsort>=7.0.1 typing-extensions>=3.7.4.1 domdf_python_tools-3.10.0/stubs.txt000066400000000000000000000001421475315453000174120ustar00rootroot00000000000000pandas-stubs; implementation_name == "cpython" and python_version < "3.10" types-click types-pytz domdf_python_tools-3.10.0/tests/000077500000000000000000000000001475315453000166565ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/__init__.py000066400000000000000000000000001475315453000207550ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/conftest.py000066400000000000000000000000431475315453000210520ustar00rootroot00000000000000pytest_plugins = ("coincidence", ) domdf_python_tools-3.10.0/tests/discover_demo_module/000077500000000000000000000000001475315453000230455ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/discover_demo_module/__init__.py000066400000000000000000000000401475315453000251500ustar00rootroot00000000000000def foo_in_init() -> str: pass domdf_python_tools-3.10.0/tests/discover_demo_module/submodule_a.py000066400000000000000000000000641475315453000257160ustar00rootroot00000000000000def foo(): # noqa pass def bar(): # noqa pass domdf_python_tools-3.10.0/tests/discover_demo_module/submodule_b.py000066400000000000000000000002041475315453000257130ustar00rootroot00000000000000# stdlib from math import ceil # noqa: F401 from string import ascii_letters # noqa: F401 class Bob: pass class Alice: pass domdf_python_tools-3.10.0/tests/list_tests.py000066400000000000000000000334241475315453000214330ustar00rootroot00000000000000# From https://raw.githubusercontent.com/python/cpython/master/Lib/test/list_tests.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. """ Tests common to list and UserList.UserList """ # stdlib import sys from functools import cmp_to_key from typing import List, no_type_check # 3rd party import pytest from coincidence.selectors import not_pypy # this package from tests import seq_tests from tests.seq_tests import ALWAYS_EQ, NEVER_EQ class CommonTest(seq_tests.CommonTest): def test_init(self): # Iterable arg is optional assert self.type2test([]) == self.type2test() # Init clears previous values a = self.type2test([1, 2, 3]) a.__init__() assert a == self.type2test([]) # Init overwrites previous values a = self.type2test([1, 2, 3]) a.__init__([4, 5, 6]) assert a == self.type2test([4, 5, 6]) # Mutables always return a new object b = self.type2test(a) assert id(a) != id(b) assert a == b @no_type_check def test_getitem_error(self): a = [] with pytest.raises(TypeError, match="list indices must be integers or slices"): a['a'] # pylint: disable=pointless-statement @no_type_check def test_setitem_error(self): a = [] with pytest.raises(TypeError, match="list indices must be integers or slices"): a['a'] = "python" def test_repr(self): l0: List = [] l2 = [0, 1, 2] a0 = self.type2test(l0) a2 = self.type2test(l2) assert str(a0) == str(l0) assert repr(a0) == repr(l0) assert repr(a2) == repr(l2) assert str(a2) == "[0, 1, 2]" assert repr(a2) == "[0, 1, 2]" a2.append(a2) a2.append(3) assert str(a2) == "[0, 1, 2, [...], 3]" assert repr(a2) == "[0, 1, 2, [...], 3]" @not_pypy() @pytest.mark.skipif(sys.version_info >= (3, 12), reason="Doesn't error on newer Pythons") def test_repr_deep(self): a = self.type2test([]) for i in range(1500 + 1): # sys.getrecursionlimit() + 100 a = self.type2test([a]) with pytest.raises(RecursionError): repr(a) def test_set_subscript(self): a = self.type2test(range(20)) with pytest.raises(ValueError): a.__setitem__(slice(0, 10, 0), [1, 2, 3]) with pytest.raises(TypeError): a.__setitem__(slice(0, 10), 1) with pytest.raises(ValueError): a.__setitem__(slice(0, 10, 2), [1, 2]) with pytest.raises(TypeError): a.__getitem__('x', 1) a[slice(2, 10, 3)] = [1, 2, 3] assert a == self.type2test([0, 1, 1, 3, 4, 2, 6, 7, 3, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19]) def test_reversed(self): a = self.type2test(range(20)) r = reversed(a) assert list(r) == self.type2test(range(19, -1, -1)) with pytest.raises(StopIteration): next(r) assert list(reversed(self.type2test())) == self.type2test() # Bug 3689: make sure list-reversed-iterator doesn't have __len__ with pytest.raises(TypeError): len(reversed([1, 2, 3])) # type: ignore def test_setitem(self): a = self.type2test([0, 1]) a[0] = 0 a[1] = 100 assert a == self.type2test([0, 100]) a[-1] = 200 assert a == self.type2test([0, 200]) a[-2] = 100 assert a == self.type2test([100, 200]) with pytest.raises(IndexError): a.__setitem__(-3, 200) with pytest.raises(IndexError): a.__setitem__(2, 200) a = self.type2test([]) with pytest.raises(IndexError): a.__setitem__(0, 200) with pytest.raises(IndexError): a.__setitem__(-1, 200) with pytest.raises(TypeError): a.__setitem__() a = self.type2test([0, 1, 2, 3, 4]) a[0] = 1 a[1] = 2 a[2] = 3 assert a == self.type2test([1, 2, 3, 3, 4]) a[0] = 5 a[1] = 6 a[2] = 7 assert a == self.type2test([5, 6, 7, 3, 4]) a[-2] = 88 a[-1] = 99 assert a == self.type2test([5, 6, 7, 88, 99]) a[-2] = 8 a[-1] = 9 assert a == self.type2test([5, 6, 7, 8, 9]) with pytest.raises(TypeError, match="list indices must be integers or slices"): a['a'] = "python" def test_delitem(self): a = self.type2test([0, 1]) del a[1] assert a == [0] del a[0] assert a == [] a = self.type2test([0, 1]) del a[-2] assert a == [1] del a[-1] assert a == [] a = self.type2test([0, 1]) with pytest.raises(IndexError): a.__delitem__(-3) with pytest.raises(IndexError): a.__delitem__(2) a = self.type2test([]) with pytest.raises(IndexError): a.__delitem__(0) with pytest.raises(TypeError): a.__delitem__() def test_setslice(self): l = [0, 1] a = self.type2test(l) for i in range(-3, 4): a[:i] = l[:i] assert a == l a2 = a[:] a2[:i] = a[:i] assert a2 == a a[i:] = l[i:] assert a == l a2 = a[:] a2[i:] = a[i:] assert a2 == a for j in range(-3, 4): a[i:j] = l[i:j] assert a == l a2 = a[:] a2[i:j] = a[i:j] assert a2 == a aa2 = a2[:] aa2[:0] = [-2, -1] assert aa2 == [-2, -1, 0, 1] aa2[0:] = [] assert aa2 == [] a = self.type2test([1, 2, 3, 4, 5]) a[:-1] = a assert a == self.type2test([1, 2, 3, 4, 5, 5]) a = self.type2test([1, 2, 3, 4, 5]) a[1:] = a assert a == self.type2test([1, 1, 2, 3, 4, 5]) a = self.type2test([1, 2, 3, 4, 5]) a[1:-1] = a assert a == self.type2test([1, 1, 2, 3, 4, 5, 5]) a = self.type2test([]) a[:] = tuple(range(10)) assert a == self.type2test(range(10)) with pytest.raises(TypeError): a.__setitem__(slice(0, 1, 5)) with pytest.raises(TypeError): a.__setitem__() def test_delslice(self): a = self.type2test([0, 1]) del a[1:2] del a[0:1] assert a == self.type2test([]) a = self.type2test([0, 1]) del a[1:2] del a[0:1] assert a == self.type2test([]) a = self.type2test([0, 1]) del a[-2:-1] assert a == self.type2test([1]) a = self.type2test([0, 1]) del a[-2:-1] assert a == self.type2test([1]) a = self.type2test([0, 1]) del a[1:] del a[:1] assert a == self.type2test([]) a = self.type2test([0, 1]) del a[1:] del a[:1] assert a == self.type2test([]) a = self.type2test([0, 1]) del a[-1:] assert a == self.type2test([0]) a = self.type2test([0, 1]) del a[-1:] assert a == self.type2test([0]) a = self.type2test([0, 1]) del a[:] assert a == self.type2test([]) def test_append(self): a = self.type2test([]) a.append(0) a.append(1) a.append(2) assert a == self.type2test([0, 1, 2]) with pytest.raises(TypeError): a.append() def test_extend(self): a1 = self.type2test([0]) a2 = self.type2test((0, 1)) a = a1[:] a.extend(a2) assert a == a1 + a2 a.extend(self.type2test([])) assert a == a1 + a2 a.extend(a) assert a == self.type2test([0, 0, 1, 0, 0, 1]) a = self.type2test("spam") a.extend("eggs") assert a == list("spameggs") with pytest.raises(TypeError): a.extend(None) with pytest.raises(TypeError): a.extend() # overflow test. issue1621 class CustomIter: def __iter__(self): # noqa: MAN002 return self def __next__(self): # noqa: MAN002 raise StopIteration def __length_hint__(self): # noqa: MAN002 return sys.maxsize a = self.type2test([1, 2, 3, 4]) a.extend(CustomIter()) assert a == [1, 2, 3, 4] def test_insert(self): a = self.type2test([0, 1, 2]) a.insert(0, -2) a.insert(1, -1) a.insert(2, 0) assert a == [-2, -1, 0, 0, 1, 2] b = a[:] b.insert(-2, "foo") b.insert(-200, "left") b.insert(200, "right") assert b == self.type2test(["left", -2, -1, 0, 0, "foo", 1, 2, "right"]) with pytest.raises(TypeError): a.insert() def test_pop(self): a = self.type2test([-1, 0, 1]) a.pop() assert a == [-1, 0] a.pop(0) assert a == [0] with pytest.raises(IndexError): a.pop(5) a.pop(0) assert a == [] with pytest.raises(IndexError): a.pop() with pytest.raises(TypeError): a.pop(42, 42) a = self.type2test([0, 10, 20, 30, 40]) @not_pypy("Doesn't work on PyPy") def test_remove(self): a = self.type2test([0, 0, 1]) a.remove(1) assert a == [0, 0] a.remove(0) assert a == [0] a.remove(0) assert a == [] with pytest.raises(ValueError): a.remove(0) with pytest.raises(TypeError): a.remove() a = self.type2test([1, 2]) with pytest.raises(ValueError): a.remove(NEVER_EQ) assert a == [1, 2] a.remove(ALWAYS_EQ) assert a == [2] a = self.type2test([ALWAYS_EQ]) a.remove(1) assert a == [] a = self.type2test([ALWAYS_EQ]) a.remove(NEVER_EQ) assert a == [] a = self.type2test([NEVER_EQ]) with pytest.raises(ValueError): a.remove(ALWAYS_EQ) class BadExc(Exception): pass class BadCmp: def __eq__(self, other): # noqa: MAN001,MAN002 if other == 2: raise BadExc() return False a = self.type2test([0, 1, 2, 3]) with pytest.raises(BadExc): a.remove(BadCmp()) class BadCmp2: def __eq__(self, other): # noqa: MAN001,MAN002 raise BadExc() d = self.type2test("abcdefghcij") d.remove('c') assert d == self.type2test("abdefghcij") d.remove('c') assert d == self.type2test("abdefghij") with pytest.raises(ValueError): d.remove('c') assert d == self.type2test("abdefghij") # Handle comparison errors d = self.type2test(['a', 'b', BadCmp2(), 'c']) e = self.type2test(d) with pytest.raises(BadExc): d.remove('c') for x, y in zip(d, e): # verify that original order and values are retained. assert x is y @not_pypy("Doesn't work on PyPy") def test_index(self): super().test_index() a = self.type2test([-2, -1, 0, 0, 1, 2]) a.remove(0) with pytest.raises(ValueError): a.index(2, 0, 4) assert a == self.type2test([-2, -1, 0, 1, 2]) # Test modifying the list during index's iteration class EvilCmp: def __init__(self, victim): # noqa: MAN001 self.victim = victim def __eq__(self, other): # noqa: MAN001,MAN002 del self.victim[:] return False a = self.type2test() a[:] = [EvilCmp(a) for _ in range(100)] # This used to seg fault before patch #1005778 with pytest.raises(ValueError): a.index(None) def test_reverse(self): u = self.type2test([-2, -1, 0, 1, 2]) u2 = u[:] u.reverse() assert u == [2, 1, 0, -1, -2] u.reverse() assert u == u2 with pytest.raises(TypeError): u.reverse(42) def test_clear(self): u = self.type2test([2, 3, 4]) u.clear() assert u == [] u = self.type2test([]) u.clear() assert u == [] u = self.type2test([]) u.append(1) u.clear() u.append(2) assert u == [2] with pytest.raises(TypeError): u.clear(None) def test_copy(self): u = self.type2test([1, 2, 3]) v = u.copy() assert v == [1, 2, 3] u = self.type2test([]) v = u.copy() assert v == [] # test that it's indeed a copy and not a reference u = self.type2test(['a', 'b']) v = u.copy() v.append('i') assert u == ['a', 'b'] assert v == u + ['i'] # test that it's a shallow, not a deep copy u = self.type2test([1, 2, [3, 4], 5]) v = u.copy() assert u == v assert v[3] is u[3] with pytest.raises(TypeError): u.copy(None) def test_sort(self): u = self.type2test([1, 0]) u.sort() assert u == [0, 1] u = self.type2test([2, 1, 0, -1, -2]) u.sort() assert u == self.type2test([-2, -1, 0, 1, 2]) with pytest.raises(TypeError): u.sort(42, 42) def revcmp(a, b): # noqa: MAN001,MAN002 if a == b: return 0 elif a < b: return 1 else: # a > b return -1 u.sort(key=cmp_to_key(revcmp)) assert u == self.type2test([2, 1, 0, -1, -2]) # The following dumps core in unpatched Python 1.5: def myComparison(x, y): xmod, ymod = x % 3, y % 7 if xmod == ymod: return 0 elif xmod < ymod: return -1 else: # xmod > ymod return 1 z = self.type2test(range(12)) z.sort(key=cmp_to_key(myComparison)) with pytest.raises(TypeError): z.sort(2) with pytest.raises(TypeError): z.sort(42, 42, 42, 42) def test_slice(self): u = self.type2test("spam") u[:2] = 'h' assert u == list("ham") def test_iadd(self): super().test_iadd() u = self.type2test([0, 1]) u2 = u u += [2, 3] assert u is u2 u = self.type2test("spam") u += "eggs" assert u == self.type2test("spameggs") with pytest.raises(TypeError): u.__iadd__(None) def test_imul(self): super().test_imul() s = self.type2test([]) oldid = id(s) s *= 10 assert id(s) == oldid def test_extendedslicing(self): # subscript a = self.type2test([0, 1, 2, 3, 4]) # deletion del a[::2] assert a == self.type2test([1, 3]) a = self.type2test(range(5)) del a[1::2] assert a == self.type2test([0, 2, 4]) a = self.type2test(range(5)) del a[1::-2] assert a == self.type2test([0, 2, 3, 4]) a = self.type2test(range(10)) del a[::1000] assert a == self.type2test([1, 2, 3, 4, 5, 6, 7, 8, 9]) # assignment a = self.type2test(range(10)) a[::2] = [-1] * 5 assert a == self.type2test([-1, 1, -1, 3, -1, 5, -1, 7, -1, 9]) a = self.type2test(range(10)) a[::-4] = [10] * 3 assert a == self.type2test([0, 10, 2, 3, 4, 10, 6, 7, 8, 10]) a = self.type2test(range(4)) a[::-1] = a assert a == self.type2test([3, 2, 1, 0]) a = self.type2test(range(10)) b = a[:] c = a[:] a[2:3] = self.type2test(["two", "elements"]) b[slice(2, 3)] = self.type2test(["two", "elements"]) c[2:3:] = self.type2test(["two", "elements"]) assert a == b assert a == c a = self.type2test(range(10)) a[::2] = tuple(range(5)) assert a == self.type2test([0, 1, 1, 3, 2, 5, 3, 7, 4, 9]) # test issue7788 a = self.type2test(range(10)) del a[9::1 << 333] def test_constructor_exception_handling(self): # Bug #1242657 class F: def __iter__(self): # noqa: MAN002 raise KeyboardInterrupt with pytest.raises(KeyboardInterrupt): list(F()) def test_exhausted_iterator(self): a = self.type2test([1, 2, 3]) exhit = iter(a) empit = iter(a) for x in exhit: # exhaust the iterator next(empit) # not exhausted a.append(9) assert list(exhit) == [] assert list(empit) == [9] assert a == self.type2test([1, 2, 3, 9]) domdf_python_tools-3.10.0/tests/mypy_test.py000066400000000000000000000006321475315453000212660ustar00rootroot00000000000000# stdlib from typing import Any, Dict # this package from domdf_python_tools.bases import Dictable class MyDictable(Dictable): def __init__(self, foo: str, bar: int): super().__init__() self.foo: str = foo self.bar: float = float(bar) @property def __dict__(self): return dict(foo=self.foo, bar=self.bar) def myfunc() -> Dict[str, Any]: a = MyDictable("foo", 12) return dict(a) myfunc() domdf_python_tools-3.10.0/tests/requirements.txt000066400000000000000000000005751475315453000221510ustar00rootroot00000000000000click>=7.1.2 coincidence>=0.2.0 consolekit>=1.0.0 coverage>=5.1 coverage-pyver-pragma>=0.2.1 faker>=4.1.2 flake8<5,>=3.8.4 funcy>=1.16 importlib-metadata>=3.6.0 pandas>=1.0.0; implementation_name == "cpython" and python_version < "3.11" pytest>=6.0.0 pytest-cov>=2.8.1 pytest-randomly>=3.7.0 pytest-regressions>=2.0.1 pytest-rerunfailures>=9.1.1 pytest-timeout>=1.4.2 pytz>=2019.1 domdf_python_tools-3.10.0/tests/seq_tests.py000066400000000000000000000322011475315453000212400ustar00rootroot00000000000000# From https://raw.githubusercontent.com/python/cpython/master/Lib/test/seq_tests.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. """ Tests common to tuple, list and UserList.UserList """ # stdlib import pickle import sys from itertools import chain from typing import Any, List # 3rd party import pytest from coincidence.selectors import not_pypy # this package from domdf_python_tools.compat import PYPY38_PLUS from domdf_python_tools.iterative import Len class _ALWAYS_EQ: """ Object that is equal to anything. """ def __eq__(self, other): return True def __ne__(self, other): return False ALWAYS_EQ = _ALWAYS_EQ() class _NEVER_EQ: """ Object that is not equal to anything. """ def __eq__(self, other): return False def __ne__(self, other): return True def __hash__(self): return 1 NEVER_EQ = _NEVER_EQ() # Various iterables # This is used for checking the constructor (here and in test_deque.py) def iterfunc(seqn): """ Regular generator. """ yield from seqn class Sequence: """ Sequence using ``__getitem__``. """ def __init__(self, seqn): self.seqn = seqn def __getitem__(self, i): return self.seqn[i] class IterFunc: """ Sequence using iterator protocol, """ def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class IterGen: """ Sequence using iterator protocol defined with a generator. """ def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): yield from self.seqn class IterNextOnly: """ Missing __getitem__ and __iter__. """ def __init__(self, seqn): self.seqn = seqn self.i = 0 def __next__(self): if self.i >= len(self.seqn): raise StopIteration v = self.seqn[self.i] self.i += 1 return v class IterNoNext: """ Iterator missing __next__(). """ def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self class IterGenExc: """ Test propagation of exceptions. """ def __init__(self, seqn): self.seqn = seqn self.i = 0 def __iter__(self): return self def __next__(self): 3 // 0 # pylint: disable=pointless-statement class IterFuncStop: """ Test immediate stop. """ def __init__(self, seqn): pass def __iter__(self): return self def __next__(self): raise StopIteration def itermulti(seqn): """ Test multiple tiers of iterators. """ return chain(map(lambda x: x, iterfunc(IterGen(Sequence(seqn))))) class LyingTuple(tuple): __slots__ = () def __iter__(self): yield 1 class LyingList(list): def __iter__(self): yield 1 class CommonTest: # The type to be tested type2test: type def assertEqual(self, left, right): assert left == right def assertNotEqual(self, left, right): assert left != right def assertRaises(self, what, func, *args): with pytest.raises(what): func(*args) def test_constructors(self): l0: List = [] l1 = [0] l2 = [0, 1] u = self.type2test() u0 = self.type2test(l0) u1 = self.type2test(l1) u2 = self.type2test(l2) uu = self.type2test(u) uu0 = self.type2test(u0) uu1 = self.type2test(u1) uu2 = self.type2test(u2) v = self.type2test(tuple(u)) class OtherSeq: def __init__(self, initseq): self.__data = initseq def __len__(self): return len(self.__data) def __getitem__(self, i): return self.__data[i] s = OtherSeq(u0) v0 = self.type2test(s) assert len(v0) == len(s) s2 = "this is also a sequence" vv = self.type2test(s2) assert len(vv) == len(s2) # Create from various iteratables for s2 in ("123", '', range(1000), ("do", 1.2), range(2000, 2200, 5)): # type: ignore for g in (Sequence, IterFunc, IterGen, itermulti, iterfunc): assert self.type2test(g(s2)) == self.type2test(s2) assert self.type2test(IterFuncStop(s2)) == self.type2test() assert self.type2test(c for c in "123") == self.type2test("123") with pytest.raises(TypeError): self.type2test(IterNextOnly(s2)) with pytest.raises(TypeError): self.type2test(IterNoNext(s2)) with pytest.raises(ZeroDivisionError): self.type2test(IterGenExc(s2)) # Issue #23757 assert self.type2test(LyingTuple((2, ))) == self.type2test((1, )) assert self.type2test(LyingList([2])) == self.type2test([1]) def test_truth(self): assert not self.type2test() assert self.type2test([42]) def test_getitem(self): u = self.type2test([0, 1, 2, 3, 4]) for i in Len(u): assert u[i] == i assert u[int(i)] == i for i in range(-len(u), -1): assert u[i] == len(u) + i assert u[int(i)] == len(u) + i with pytest.raises(IndexError): u.__getitem__(-len(u) - 1) with pytest.raises(IndexError): u.__getitem__(len(u)) with pytest.raises(ValueError): u.__getitem__(slice(0, 10, 0)) u = self.type2test() with pytest.raises(IndexError): u.__getitem__(0) with pytest.raises(IndexError): u.__getitem__(-1) with pytest.raises(TypeError): u.__getitem__() a = self.type2test([10, 11]) assert a[0] == 10 assert a[1] == 11 assert a[-2] == 10 assert a[-1] == 11 with pytest.raises(IndexError): a.__getitem__(-3) with pytest.raises(IndexError): a.__getitem__(3) def test_getslice(self): l = [0, 1, 2, 3, 4] u = self.type2test(l) assert u[0:0] == self.type2test() assert u[1:2] == self.type2test([1]) assert u[-2:-1] == self.type2test([3]) assert u[-1000:1000] == u assert u[1000:-1000] == self.type2test([]) assert u[:] == u assert u[1:None] == self.type2test([1, 2, 3, 4]) assert u[None:3] == self.type2test([0, 1, 2]) # Extended slices assert u[::] == u assert u[::2] == self.type2test([0, 2, 4]) assert u[1::2] == self.type2test([1, 3]) assert u[::-1] == self.type2test([4, 3, 2, 1, 0]) assert u[::-2] == self.type2test([4, 2, 0]) assert u[3::-2] == self.type2test([3, 1]) assert u[3:3:-2] == self.type2test([]) assert u[3:2:-2] == self.type2test([3]) assert u[3:1:-2] == self.type2test([3]) assert u[3:0:-2] == self.type2test([3, 1]) assert u[::-100] == self.type2test([4]) assert u[100:-100:] == self.type2test([]) assert u[-100:100:] == u assert u[100:-100:-1] == u[::-1] assert u[-100:100:-1] == self.type2test([]) assert u[-100:100:2] == self.type2test([0, 2, 4]) # Test extreme cases with long ints a = self.type2test([0, 1, 2, 3, 4]) assert a[-pow(2, 128):3] == self.type2test([0, 1, 2]) assert a[3:pow(2, 145)] == self.type2test([3, 4]) assert a[3::sys.maxsize] == self.type2test([3]) def test_contains(self): u = self.type2test([0, 1, 2]) for i in u: assert i in u for i in min(u) - 1, max(u) + 1: assert i not in u with pytest.raises(TypeError): u.__contains__() def test_contains_fake(self): # Sequences must use rich comparison against each item # (unless "is" is true, or an earlier item answered) # So ALWAYS_EQ must be found in all non-empty sequences. assert ALWAYS_EQ not in self.type2test([]) assert ALWAYS_EQ in self.type2test([1]) assert 1 in self.type2test([ALWAYS_EQ]) assert NEVER_EQ not in self.type2test([]) def test_contains_order(self): # Sequences must test in-order. If a rich comparison has side # effects, these will be visible to tests against later members. # In this test, the "side effect" is a short-circuiting raise. class DoNotTestEq(Exception): pass class StopCompares: def __eq__(self, other): raise DoNotTestEq checkfirst = self.type2test([1, StopCompares()]) assert 1 in checkfirst checklast = self.type2test([StopCompares(), 1]) with pytest.raises(DoNotTestEq): checklast.__contains__(1) def test_len(self): assert len(self.type2test()) == 0 assert len(self.type2test([])) == 0 assert len(self.type2test([0])) == 1 assert len(self.type2test([0, 1, 2])) == 3 def test_minmax(self): u = self.type2test([0, 1, 2]) assert min(u) == 0 assert max(u) == 2 def test_addmul(self): u1 = self.type2test([0]) u2 = self.type2test([0, 1]) assert u1 == u1 + self.type2test() assert u1 == self.type2test() + u1 assert u1 + self.type2test([1]) == u2 assert self.type2test([-1]) + u1 == self.type2test([-1, 0]) assert self.type2test() == u2 * 0 assert self.type2test() == 0 * u2 assert self.type2test() == u2 * 0 assert self.type2test() == 0 * u2 assert u2 == u2 * 1 assert u2 == 1 * u2 assert u2 == u2 * 1 assert u2 == 1 * u2 assert u2 + u2 == u2 * 2 assert u2 + u2 == 2 * u2 assert u2 + u2 == u2 * 2 assert u2 + u2 == 2 * u2 assert u2 + u2 + u2 == u2 * 3 assert u2 + u2 + u2 == 3 * u2 class subclass(self.type2test): # type: ignore pass u3 = subclass([0, 1]) assert u3 == u3 * 1 assert u3 is not u3 * 1 def test_iadd(self): u = self.type2test([0, 1]) u += self.type2test() assert u == self.type2test([0, 1]) u += self.type2test([2, 3]) assert u == self.type2test([0, 1, 2, 3]) u += self.type2test([4, 5]) assert u == self.type2test([0, 1, 2, 3, 4, 5]) u = self.type2test("spam") u += self.type2test("eggs") assert u == self.type2test("spameggs") def test_imul(self): u = self.type2test([0, 1]) u *= 3 assert u == self.type2test([0, 1, 0, 1, 0, 1]) u *= 0 assert u == self.type2test([]) def test_getitemoverwriteiter(self): # Verify that __getitem__ overrides are not recognized by __iter__ class T(self.type2test): # type: ignore def __getitem__(self, key: Any) -> str: return str(key) + "!!!" assert next(iter(T((1, 2)))) == 1 def test_repeat(self): for m in range(4): s = tuple(range(m)) for n in range(-3, 5): assert self.type2test(s * n) == self.type2test(s) * n assert self.type2test(s) * (-4) == self.type2test([]) assert id(s) == id(s * 1) def test_bigrepeat(self): if sys.maxsize <= 2147483647: x = self.type2test([0]) x *= 2**16 with pytest.raises(MemoryError): x.__mul__(2**16) if hasattr(x, "__imul__"): with pytest.raises(MemoryError): x.__imul__(2**16) def test_subscript(self): a = self.type2test([10, 11]) assert a.__getitem__(0) == 10 assert a.__getitem__(1) == 11 assert a.__getitem__(-2) == 10 assert a.__getitem__(-1) == 11 with pytest.raises(IndexError): a.__getitem__(-3) with pytest.raises(IndexError): a.__getitem__(3) assert a.__getitem__(slice(0, 1)) == self.type2test([10]) assert a.__getitem__(slice(1, 2)) == self.type2test([11]) assert a.__getitem__(slice(0, 2)) == self.type2test([10, 11]) assert a.__getitem__(slice(0, 3)) == self.type2test([10, 11]) assert a.__getitem__(slice(3, 5)) == self.type2test([]) with pytest.raises(ValueError): a.__getitem__(slice(0, 10, 0)) with pytest.raises(TypeError): a.__getitem__('x') def test_count(self): a = self.type2test([0, 1, 2]) * 3 assert a.count(0) == 3 assert a.count(1) == 3 assert a.count(3) == 0 assert a.count(ALWAYS_EQ), 9 assert self.type2test([ALWAYS_EQ, ALWAYS_EQ]).count(1) == 2 if not PYPY38_PLUS: # TODO: figure out why the tests fail assert self.type2test([ALWAYS_EQ, ALWAYS_EQ]).count(NEVER_EQ) == 2 assert self.type2test([NEVER_EQ, NEVER_EQ]).count(ALWAYS_EQ) == 0 with pytest.raises(TypeError): a.count() class BadExc(Exception): pass class BadCmp: def __eq__(self, other): if other == 2: raise BadExc() return False with pytest.raises(BadExc): a.count(BadCmp()) @not_pypy("Doesn't work on PyPy") def test_index(self): u = self.type2test([0, 1]) assert u.index(0) == 0 assert u.index(1) == 1 with pytest.raises(ValueError): u.index(2) u = self.type2test([-2, -1, 0, 0, 1, 2]) assert u.count(0) == 2 assert u.index(0) == 2 assert u.index(0, 2) == 2 assert u.index(-2, -10) == 0 assert u.index(0, 3) == 3 assert u.index(0, 3, 4) == 3 with pytest.raises(ValueError): u.index(2, 0, -10) assert u.index(ALWAYS_EQ) == 0 assert self.type2test([ALWAYS_EQ, ALWAYS_EQ]).index(1) == 0 assert self.type2test([ALWAYS_EQ, ALWAYS_EQ]).index(NEVER_EQ) == 0 with pytest.raises(ValueError): self.type2test([NEVER_EQ, NEVER_EQ]).index(ALWAYS_EQ) with pytest.raises(TypeError): u.index() class BadExc(Exception): pass class BadCmp: def __eq__(self, other): if other == 2: raise BadExc() return False a = self.type2test([0, 1, 2, 3]) with pytest.raises(BadExc): a.index(BadCmp()) a = self.type2test([-2, -1, 0, 0, 1, 2]) assert a.index(0) == 2 assert a.index(0, 2) == 2 assert a.index(0, -4) == 2 assert a.index(-2, -10) == 0 assert a.index(0, 3) == 3 assert a.index(0, -3) == 3 assert a.index(0, 3, 4) == 3 assert a.index(0, -3, -2) == 3 assert a.index(0, -4 * sys.maxsize, 4 * sys.maxsize) == 2 with pytest.raises(ValueError): a.index(0, 4 * sys.maxsize, -4 * sys.maxsize) with pytest.raises(ValueError): a.index(2, 0, -10) def test_pickle(self): lst = self.type2test([4, 5, 6, 7]) for proto in range(pickle.HIGHEST_PROTOCOL + 1): lst2 = pickle.loads(pickle.dumps(lst, proto)) assert lst2 == lst assert id(lst2) != id(lst) domdf_python_tools-3.10.0/tests/test_bases.py000066400000000000000000000156611475315453000213750ustar00rootroot00000000000000""" test_bases ~~~~~~~~~~~~~~~ Test functions in bases.py """ # stdlib import copy import pickle # nosec: B101 from numbers import Number, Real from typing import no_type_check # 3rd party import pytest # this package from domdf_python_tools._is_match import is_match_with from domdf_python_tools.bases import Dictable, UserFloat class Person(Dictable): def __init__(self, name, age, occupation=None): super().__init__() self.name = str(name) self.age = int(age) self.occupation = occupation @property def __dict__(self): return dict( name=self.name, age=self.age, occupation=self.occupation, ) class Child(Person): def __init__(self, name, age, school): super().__init__(name, age, "Student") self.school = "school" @property def __dict__(self): class_dict = super().__dict__ class_dict["School"] = self.school return class_dict @pytest.fixture() def alice(): return Person("Alice", 20, "IRC Lurker") class TestDictable: def test_creation(self, alice): assert alice.name == "Alice" assert alice.age == 20 assert alice.occupation == "IRC Lurker" def test_str(self, alice: object): assert str(alice).startswith(" 6 assert seven > 6.0 assert seven > UserFloat(6) def test_ge(self): assert seven >= 6 assert seven >= 6.0 assert seven >= UserFloat(6) assert seven >= 7 assert seven >= 7.0 assert seven >= UserFloat(7) def test_pos(self): assert isinstance(+seven, UserFloat) assert +seven == seven assert +seven == 7 assert +seven == 7.0 def test_neg(self): assert isinstance(-seven, UserFloat) assert -seven == UserFloat(-7) assert -seven == -7 assert -seven == -7.0 def test_abs(self): assert isinstance(abs(+seven), UserFloat) assert abs(+seven) == seven assert abs(+seven) == 7 assert abs(+seven) == 7.0 assert isinstance(abs(-seven), UserFloat) assert abs(-seven) == UserFloat(7) assert abs(-seven) == 7 assert abs(-seven) == 7.0 def test_ne(self): assert seven != UserFloat(8) assert seven != 8 assert seven != 8.0 def test_hash(self): assert hash(seven) == hash(UserFloat(7)) assert hash(seven) != hash(UserFloat(8)) assert hash(seven) == hash(7) assert hash(seven) != hash(8) def test_isinstance(self): assert isinstance(seven, UserFloat) assert not isinstance(seven, float) assert not isinstance(7, UserFloat) # From https://github.com/dgilland/pydash/blob/develop/tests/test_predicates.py # MIT Licensed @pytest.mark.parametrize( "case,expected", [ (({"name": "fred", "age": 40}, {"age": 40}), True), (({"name": "fred", "age": 40}, {"age": 40, "active": True}), False), (([1, 2, 3], [1, 2]), True), (([1, 2, 3], [1, 2, 3, 4]), False), (({}, {}), True), (({'a': 1}, {}), True), (([], []), True), (([1], []), True), (([1, 2], [2, 4]), False), (([0, 1], [0, 1]), True), ], ) def test_is_match_with(case, expected): assert is_match_with(*case) == expected domdf_python_tools-3.10.0/tests/test_compat.py000066400000000000000000000002421475315453000215500ustar00rootroot00000000000000# this package from domdf_python_tools.compat import nullcontext def test_nullcontext(): with nullcontext("foo") as f: assert f == "foo" assert f == "foo" domdf_python_tools-3.10.0/tests/test_dates.py000066400000000000000000000310001475315453000213610ustar00rootroot00000000000000""" test_dates ~~~~~~~~~~~~~~~ Test functions in dates.py """ # stdlib import re from datetime import date, datetime, timedelta from typing import Union # 3rd party import pytest from coincidence.params import count # this package from domdf_python_tools import dates # TODO: test get_timezone try: # 3rd party import pytz test_date = datetime(1996, 10, 13, 2, 20).replace(tzinfo=pytz.utc) today = datetime.now(pytz.utc) # make sure UTC def test_utc_offset(): # Check that the correct UTC offsets are given for common timezones assert dates.get_utc_offset("US/Pacific", test_date) == timedelta(-1, 61200) assert dates.get_utc_offset("Europe/London", test_date) == timedelta(0, 3600) assert dates.get_utc_offset("Africa/Algiers", test_date) == timedelta(0, 3600) # TODO: Finish # Check that the correct UTC offsets are given for common timezones for today assert dates.get_utc_offset("US/Pacific", today) in { timedelta(-1, 57600), timedelta(-1, 61200), } assert dates.get_utc_offset("Europe/London", today) in { timedelta(0, 3600), # BST timedelta(0, 0), } assert dates.get_utc_offset("Africa/Algiers", today) == timedelta(0, 3600) # Check that the correct UTC offsets are given for common timezones when ``date`` is not given assert dates.get_utc_offset("US/Pacific") in { timedelta(-1, 57600), timedelta(-1, 61200), } assert dates.get_utc_offset("Europe/London") in { timedelta(0, 3600), # BST timedelta(0, 0), } assert dates.get_utc_offset("Africa/Algiers") == timedelta(0, 3600) def test_converting_timezone(): # No matter what timezone we convert to the timestamp should be the same for tz in pytz.all_timezones: assert test_date.astimezone(dates.get_timezone(tz, test_date), ).timestamp() == test_date.timestamp() == 845173200.0 if dates.get_utc_offset(tz, test_date): # otherwise the timezone stayed as UTC assert test_date.astimezone(dates.get_timezone(tz, test_date)).hour != test_date.hour # And again with today's date assert today.astimezone(dates.get_timezone(tz, today)).timestamp() == today.timestamp() if dates.get_utc_offset(tz, today): # otherwise the timezone stayed as UTC assert today.astimezone(dates.get_timezone(tz, today)).hour != today.hour def test_utc_timestamp_to_datetime(): # Going from a datetime object to timezone and back should give us the same object for tz in pytz.all_timezones: tzinfo = dates.get_timezone(tz, test_date) dt = test_date.astimezone(tzinfo) assert dates.utc_timestamp_to_datetime(dt.timestamp(), tzinfo) == dt # And again with today's date tzinfo = dates.get_timezone(tz, today) dt = today.astimezone(tzinfo) assert dates.utc_timestamp_to_datetime(dt.timestamp(), tzinfo) == dt @pytest.mark.xfail() def test_set_timezone(): # Setting the timezone should change the timestamp for tz in pytz.all_timezones: if dates.get_utc_offset(tz, today): # otherwise the timezone stayed as UTC # ensure timestamp did change target_tz = dates.get_timezone(tz, today) assert target_tz is not None assert dates.set_timezone(today, target_tz).timestamp() != today.timestamp() # Difference between "today" and the new timezone should be the timezone difference target_tz = dates.get_timezone(tz, today) assert target_tz is not None utc_offset = dates.get_utc_offset(tz, today) assert utc_offset is not None as_seconds = dates.set_timezone(today, target_tz).timestamp() + utc_offset.total_seconds() assert as_seconds == today.timestamp() if tz in { "America/Punta_Arenas", "America/Santiago", "Antarctica/Palmer", "Chile/Continental", "Chile/EasterIsland", "Pacific/Easter", }: continue if dates.get_utc_offset(tz, test_date): # otherwise the timezone stayed as UTC # print(dates.set_timezone(test_date, get_timezone(tz, test_date)).timestamp()) # print(repr(test_date)) # print(get_utc_offset(tz, test_date).total_seconds()) # print(test_date.timestamp()) # print(repr(dates.set_timezone(test_date, get_timezone(tz, test_date)))) # print(dates.set_timezone(test_date, get_timezone(tz, test_date)).timestamp()) # print(dates.set_timezone(test_date, get_timezone(tz, test_date)).timestamp() + get_utc_offset(tz, test_date).total_seconds()) # print(get_utc_offset(tz, test_date).total_seconds()) # print( # dates.set_timezone(test_date, get_timezone(tz, test_date)).timestamp() + # get_utc_offset(tz, test_date).total_seconds() # # ) target_tz = dates.get_timezone(tz, test_date) assert target_tz is not None offset = dates.get_utc_offset(tz, test_date) assert offset is not None as_seconds = dates.set_timezone(test_date, target_tz).timestamp() + offset.total_seconds() assert as_seconds == test_date.timestamp() except ImportError: def test_utc_offset_no_pytz(): with pytest.raises( ImportError, match=r"'get_utc_offset' requires pytz \(.*\), but it could not be imported", ): dates.get_utc_offset # pylint: disable=pointless-statement with pytest.raises( ImportError, match=r"'get_utc_offset' requires pytz \(.*\), but it could not be imported", ): # this package from domdf_python_tools.dates import get_utc_offset # noqa: F401 # TODO: Finish # import sys # from importlib.abc import MetaPathFinder # # class NoPytzPath(MetaPathFinder): # # def find_spec(self, fullname, path, target=None): # if fullname == "pytz": # raise ModuleNotFoundError(f"No module named '{fullname}'") # # # class TestDatesNoPytz: # # def test_import_pytz(self, fake_no_pytz): # with pytest.raises(ImportError): # import pytz # # this package # from domdf_python_tools import dates # # with pytest.raises(ImportError): # # 3rd party # import pytz # # def test_utc_offset_no_pytz(self, fake_no_pytz): # # this package # from domdf_python_tools import dates # # print(sys.modules.keys()) # # with pytest.raises( # ImportError, # match=r"'get_utc_offset' requires pytz \(.*\), but it could not be imported", # ): # dates.get_utc_offset # pylint: disable=pointless-statement # # with pytest.raises( # ImportError, # match=r"'get_utc_offset' requires pytz \(.*\), but it could not be imported", # ): # # # this package # from domdf_python_tools.dates import get_utc_offset @pytest.mark.parametrize("month_idx, month", enumerate(dates.month_full_names)) def test_parse_month(month_idx: int, month: str): month_idx += 1 # to make 1-indexed for i in range(3, len(month)): assert dates.parse_month(month.lower()[:i]) == month assert dates.parse_month(month.upper()[:i]) == month assert dates.parse_month(month.capitalize()[:i]) == month assert dates.parse_month(month_idx) == month def test_parse_month_errors(): for value in ["abc", 0, '0', -1, "-1", 13, "13"]: with pytest.raises(ValueError, match=fr"The given month \({value!r}\) is not recognised."): dates.parse_month(value) # type: ignore @pytest.mark.parametrize("month_idx, month", enumerate(dates.month_full_names)) def test_get_month_number_from_name(month_idx: int, month: str): month_idx += 1 # to make 1-indexed for i in range(3, len(month)): assert dates.get_month_number(month.lower()[:i]) == month_idx assert dates.get_month_number(month.upper()[:i]) == month_idx assert dates.get_month_number(month.capitalize()[:i]) == month_idx assert dates.get_month_number(month) == month_idx @count(13, 1) def test_get_month_number_from_no(count: int): assert dates.get_month_number(count) == count @pytest.mark.parametrize( "value, match", [ (0, "The given month (0) is not recognised."), (-1, "The given month (-1) is not recognised."), (13, "The given month (13) is not recognised."), ("abc", "The given month ('abc') is not recognised."), ('0', "The given month ('0') is not recognised."), ("-1", "The given month ('-1') is not recognised."), ("13", "The given month ('13') is not recognised."), ] ) def test_get_month_number_errors(value: Union[str, int], match: str): with pytest.raises(ValueError, match=re.escape(match)): dates.get_month_number(value) def test_check_date(): for month_idx, month in enumerate(dates.month_full_names): month_idx += 1 # to make 1-indexed if month_idx in {9, 4, 6, 11}: max_day = 30 elif month_idx == 2: max_day = 28 else: max_day = 31 for day in range(-5, 36): if month_idx == 2 and day == 29: for i in range(3, len(month)): assert dates.check_date(month.lower()[:i], 29) assert dates.check_date(month.upper()[:i], 29) assert dates.check_date(month.capitalize()[:i], 29) assert not dates.check_date(month.lower()[:i], 29, False) assert not dates.check_date(month.upper()[:i], 29, False) assert not dates.check_date(month.capitalize()[:i], 29, False) assert dates.check_date(month, 29) assert not dates.check_date(month, 29, False) elif 0 < day <= max_day: for i in range(3, len(month)): assert dates.check_date(month.lower()[:i], day) assert dates.check_date(month.upper()[:i], day) assert dates.check_date(month.capitalize()[:i], day) assert dates.check_date(month, day) else: for i in range(3, len(month)): assert not dates.check_date(month.lower()[:i], day) assert not dates.check_date(month.upper()[:i], day) assert not dates.check_date(month.capitalize()[:i], day) assert not dates.check_date(month, day) @pytest.mark.parametrize( "date", [ date(2000, 4, 23), date(2001, 4, 15), date(2002, 3, 31), date(2003, 4, 20), date(2004, 4, 11), date(2005, 3, 27), date(2006, 4, 16), date(2007, 4, 8), date(2008, 3, 23), date(2009, 4, 12), date(2010, 4, 4), date(2011, 4, 24), date(2012, 4, 8), date(2013, 3, 31), date(2014, 4, 20), date(2015, 4, 5), date(2016, 3, 27), date(2017, 4, 16), date(2018, 4, 1), date(2019, 4, 21), date(2020, 4, 12), date(2021, 4, 4), ] ) def test_calc_easter(date): assert dates.calc_easter(date.year) == date @pytest.mark.parametrize( "the_date, result", [ (date(month=3, day=2, year=2019), False), (date(month=4, day=7, year=2020), True), (date(month=8, day=17, year=2015), True), (date(month=12, day=25, year=2030), False), (date(month=3, day=29, year=2019), False), (date(month=3, day=30, year=2019), False), (date(month=3, day=31, year=2019), True), (date(month=4, day=1, year=2019), True), (date(month=10, day=25, year=2019), True), (date(month=10, day=26, year=2019), True), (date(month=10, day=27, year=2019), False), (date(month=10, day=28, year=2019), False), (date(month=3, day=27, year=2020), False), (date(month=3, day=28, year=2020), False), (date(month=3, day=29, year=2020), True), (date(month=3, day=30, year=2020), True), (date(month=10, day=23, year=2020), True), (date(month=10, day=24, year=2020), True), (date(month=10, day=25, year=2020), False), (date(month=10, day=26, year=2020), False), (date(month=3, day=26, year=2021), False), (date(month=3, day=27, year=2021), False), (date(month=3, day=28, year=2021), True), (date(month=3, day=29, year=2021), True), (date(month=10, day=29, year=2021), True), (date(month=10, day=30, year=2021), True), (date(month=10, day=31, year=2021), False), (date(month=11, day=1, year=2021), False), (date(month=3, day=25, year=2022), False), (date(month=3, day=26, year=2022), False), (date(month=3, day=27, year=2022), True), (date(month=3, day=28, year=2022), True), (date(month=10, day=28, year=2022), True), (date(month=10, day=29, year=2022), True), (date(month=10, day=30, year=2022), False), (date(month=10, day=31, year=2022), False), (date(month=3, day=24, year=2023), False), (date(month=3, day=25, year=2023), False), (date(month=3, day=26, year=2023), True), (date(month=3, day=27, year=2023), True), (date(month=10, day=27, year=2023), True), (date(month=10, day=28, year=2023), True), (date(month=10, day=29, year=2023), False), (date(month=10, day=30, year=2023), False), (date(month=3, day=29, year=2024), False), (date(month=3, day=30, year=2024), False), (date(month=3, day=31, year=2024), True), (date(month=4, day=1, year=2024), True), (date(month=10, day=25, year=2024), True), (date(month=10, day=26, year=2024), True), (date(month=10, day=27, year=2024), False), (date(month=10, day=28, year=2024), False), ] ) def test_is_bst(the_date, result: bool): assert dates.is_bst(the_date) is result domdf_python_tools-3.10.0/tests/test_delegators.py000066400000000000000000000050531475315453000224230ustar00rootroot00000000000000# stdlib import inspect from typing import Any, get_type_hints # this package from domdf_python_tools.delegators import delegate_kwargs, delegates def f(a: int = 1, b: float = 1.1, c: int = 2, d: list = [], e: tuple = (), f: str = '', g: bytes = b'') -> int: pass def test_delegate_kwargs(): @delegate_kwargs(f) def g(b: int, a: int = 7, **kwargs: Any): pass @delegate_kwargs(f) def h(a: int, b: int): pass sig = inspect.signature(g) assert list(sig.parameters.keys()) == ['b', 'a', 'c', 'd', 'e', 'f', 'g'] assert sig.parameters['a'].default == 7 assert sig.parameters['b'].default is inspect.Parameter.empty assert sig.parameters['c'].default == 2 assert sig.parameters['d'].default == [] assert sig.parameters['e'].default == () assert sig.parameters['f'].default == '' assert sig.parameters['g'].default == b'' assert sig.return_annotation is inspect.Parameter.empty # TODO assert get_type_hints(g) == { 'b': int, 'a': int, 'c': int, 'd': list, 'e': tuple, 'f': str, 'g': bytes, "return": int, } assert list(inspect.signature(h).parameters.keys()) == ['a', 'b'] def test_delegates(): @delegates(f) def g(*args, **kwargs): pass @delegates(f) def h(a: int, b: int): pass sig = inspect.signature(g) assert list(sig.parameters.keys()) == ['a', 'b', 'c', 'd', 'e', 'f', 'g'] assert sig.parameters['a'].default == 1 assert sig.parameters['b'].default == 1.1 assert sig.parameters['c'].default == 2 assert sig.parameters['d'].default == [] assert sig.parameters['e'].default == () assert sig.parameters['f'].default == '' assert sig.parameters['g'].default == b'' assert sig.return_annotation == int assert get_type_hints(g) == { 'a': int, 'b': float, 'c': int, 'd': list, 'e': tuple, 'f': str, 'g': bytes, "return": int, } assert list(inspect.signature(h).parameters.keys()) == ['a', 'b'] def test_delegates_method(): class F: @delegates(f) def g(self, *args, **kwargs) -> str: pass sig = inspect.signature(F.g) assert list(sig.parameters.keys()) == ["self", 'a', 'b', 'c', 'd', 'e', 'f', 'g'] assert sig.parameters['a'].default == 1 assert sig.parameters['b'].default == 1.1 assert sig.parameters['c'].default == 2 assert sig.parameters['d'].default == [] assert sig.parameters['e'].default == () assert sig.parameters['f'].default == '' assert sig.parameters['g'].default == b'' assert sig.return_annotation == str assert get_type_hints(F.g) == { 'a': int, 'b': float, 'c': int, 'd': list, 'e': tuple, 'f': str, 'g': bytes, "return": str, } domdf_python_tools-3.10.0/tests/test_diff_/000077500000000000000000000000001475315453000207645ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_diff_/modified000066400000000000000000000005101475315453000224630ustar00rootroot00000000000000Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque feugiat, lectus et interdum feugiat, magna enim vestibulum diam, a ultrices urna odio at magna. Quisque ut ullamcorper justo. Integer lobortis eros eget diam varius eleifend. Donec ornare nisi vel purus aliquet consequat. Ut quis ipsum et nunc sodales tristique. domdf_python_tools-3.10.0/tests/test_diff_/original000066400000000000000000000006521475315453000225160ustar00rootroot00000000000000Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque feugiat, lectus et interdum feugiat, magna enim vestibulum diam, a ultrices urna odio at magna. Quisque ut ullamcorper justo. Suspendisse ac tincidunt velit. Integer lobortis eros eget diam varius eleifend. Donec ornare nisi vel purus aliquet consequat. Ut quis ipsum et nunc sodales tristique. Maecenas justo libero, semper eget feugiat et, aliquam et mauris. domdf_python_tools-3.10.0/tests/test_dir_comparator.py000066400000000000000000000214111475315453000232730ustar00rootroot00000000000000# Adapted from https://github.com/python/cpython/blob/master/Lib/test/test_filecmp.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import filecmp import os import shutil from contextlib import redirect_stdout from io import StringIO # 3rd party import pytest # this package from domdf_python_tools.paths import DirComparator, PathPlus, compare_dirs class ComparatorTmpdirData: __slots__ = ("dir", "dir_same", "dir_diff", "dir_ignored", "caseinsensitive") dir: str # noqa: A003 # pylint: disable=redefined-builtin dir_same: str dir_diff: str dir_ignored: str caseinsensitive: bool @pytest.fixture() def comparator_tmpdir(tmp_pathplus: PathPlus) -> ComparatorTmpdirData: data = ComparatorTmpdirData() data.dir = os.path.join(tmp_pathplus, "dir") data.dir_same = os.path.join(tmp_pathplus, "dir-same") data.dir_diff = os.path.join(tmp_pathplus, "dir-diff") # Another dir is created under dir_same, but it has a name from the # ignored list so it should not affect testing results. data.dir_ignored = os.path.join(data.dir_same, ".hg") data.caseinsensitive = os.path.normcase('A') == os.path.normcase('a') for dir in (data.dir, data.dir_same, data.dir_diff, data.dir_ignored): # noqa: A001 # pylint: disable=redefined-builtin shutil.rmtree(dir, True) os.mkdir(dir) subdir_path = os.path.join(dir, "subdir") os.mkdir(subdir_path) if data.caseinsensitive and dir is data.dir_same: fn = "FiLe" # Verify case-insensitive comparison else: fn = "file" with open(os.path.join(dir, fn), 'w', encoding="UTF-8") as output: output.write('Contents of file go here.\n') with open(os.path.join(data.dir_diff, "file2"), 'w', encoding="UTF-8") as output: output.write('An extra file.\n') return data class TestDirComparator: def test_default_ignores(self): assert ".hg" in filecmp.DEFAULT_IGNORES # @pytest.mark.parametrize() def test_cmpfiles(self, comparator_tmpdir): assert filecmp.cmpfiles( comparator_tmpdir.dir, comparator_tmpdir.dir, ["file"], ) == (["file"], [], []), "Comparing directory to itself fails" assert filecmp.cmpfiles( comparator_tmpdir.dir, comparator_tmpdir.dir_same, ["file"], ) == (["file"], [], []), "Comparing directory to same fails" # Try it with shallow=False assert filecmp.cmpfiles( comparator_tmpdir.dir, comparator_tmpdir.dir, ["file"], shallow=False, ) == (["file"], [], []), "Comparing directory to itself fails" assert filecmp.cmpfiles( comparator_tmpdir.dir, comparator_tmpdir.dir_same, ["file"], shallow=False, ), "Comparing directory to same fails" # Add different file2 with open(os.path.join(comparator_tmpdir.dir, "file2"), 'w', encoding="UTF-8") as output: output.write('Different contents.\n') assert filecmp.cmpfiles( comparator_tmpdir.dir, comparator_tmpdir.dir_same, ["file", "file2"], ) != (["file"], ["file2"], []), "Comparing mismatched directories fails" def _assert_lists(self, actual, expected): """ Assert that two lists are equal, up to ordering. """ assert sorted(actual) == sorted(expected) def test_dircmp(self, comparator_tmpdir): # Check attributes for comparison of two identical directories left_dir, right_dir = comparator_tmpdir.dir, comparator_tmpdir.dir_same d = DirComparator(left_dir, right_dir) assert d.left == left_dir assert d.right == right_dir if comparator_tmpdir.caseinsensitive: self._assert_lists(d.left_list, ["file", "subdir"]) self._assert_lists(d.right_list, ["FiLe", "subdir"]) else: self._assert_lists(d.left_list, ["file", "subdir"]) self._assert_lists(d.right_list, ["file", "subdir"]) self._assert_lists(d.common, ["file", "subdir"]) self._assert_lists(d.common_dirs, ["subdir"]) assert d.left_only == [] assert d.right_only == [] assert d.same_files == ["file"] assert d.diff_files == [] expected_report = [ f"diff {comparator_tmpdir.dir} {comparator_tmpdir.dir_same}", "Identical files : ['file']", "Common subdirectories : ['subdir']", ] self._assert_report(d.report, expected_report) # Check attributes for comparison of two different directories (right) left_dir, right_dir = comparator_tmpdir.dir, comparator_tmpdir.dir_diff d = DirComparator(left_dir, right_dir) assert d.left == left_dir assert d.right == right_dir self._assert_lists(d.left_list, ["file", "subdir"]) self._assert_lists(d.right_list, ["file", "file2", "subdir"]) self._assert_lists(d.common, ["file", "subdir"]) self._assert_lists(d.common_dirs, ["subdir"]) assert d.left_only == [] assert d.right_only == ["file2"] assert d.same_files == ["file"] assert d.diff_files == [] expected_report = [ f"diff {comparator_tmpdir.dir} {comparator_tmpdir.dir_diff}", f"Only in {comparator_tmpdir.dir_diff} : ['file2']", "Identical files : ['file']", "Common subdirectories : ['subdir']", ] self._assert_report(d.report, expected_report) # Check attributes for comparison of two different directories (left) left_dir, right_dir = comparator_tmpdir.dir, comparator_tmpdir.dir_diff shutil.move( os.path.join(comparator_tmpdir.dir_diff, "file2"), os.path.join(comparator_tmpdir.dir, "file2"), ) d = DirComparator(left_dir, right_dir) assert d.left == left_dir assert d.right == right_dir self._assert_lists(d.left_list, ["file", "file2", "subdir"]) self._assert_lists(d.right_list, ["file", "subdir"]) self._assert_lists(d.common, ["file", "subdir"]) assert d.left_only == ["file2"] assert d.right_only == [] assert d.same_files == ["file"] assert d.diff_files == [] expected_report = [ f"diff {comparator_tmpdir.dir} {comparator_tmpdir.dir_diff}", f"Only in {comparator_tmpdir.dir} : ['file2']", "Identical files : ['file']", "Common subdirectories : ['subdir']", ] self._assert_report(d.report, expected_report) # Add different file2 with open(os.path.join(comparator_tmpdir.dir_diff, "file2"), 'w', encoding="UTF-8") as output: output.write('Different contents.\n') d = DirComparator(comparator_tmpdir.dir, comparator_tmpdir.dir_diff) assert d.same_files == ["file"] assert d.diff_files == ["file2"] expected_report = [ f"diff {comparator_tmpdir.dir} {comparator_tmpdir.dir_diff}", "Identical files : ['file']", "Differing files : ['file2']", "Common subdirectories : ['subdir']", ] self._assert_report(d.report, expected_report) def test_dircmp_subdirs_type(self, comparator_tmpdir): """ Check that dircmp.subdirs respects subclassing. """ class MyDirCmp(DirComparator): pass d = MyDirCmp(comparator_tmpdir.dir, comparator_tmpdir.dir_diff) sub_dirs = d.subdirs assert list(sub_dirs.keys()) == ["subdir"] sub_dcmp = sub_dirs["subdir"] assert type(sub_dcmp) == MyDirCmp # pylint: disable=unidiomatic-typecheck def test_report_partial_closure(self, comparator_tmpdir): left_dir, right_dir = comparator_tmpdir.dir, comparator_tmpdir.dir_same d = DirComparator(left_dir, right_dir) left_subdir = os.path.join(left_dir, "subdir") right_subdir = os.path.join(right_dir, "subdir") expected_report = [ f"diff {comparator_tmpdir.dir} {comparator_tmpdir.dir_same}", "Identical files : ['file']", "Common subdirectories : ['subdir']", '', f"diff {left_subdir} {right_subdir}", ] self._assert_report(d.report_partial_closure, expected_report) def test_report_full_closure(self, comparator_tmpdir): left_dir, right_dir = comparator_tmpdir.dir, comparator_tmpdir.dir_same d = DirComparator(left_dir, right_dir) left_subdir = os.path.join(left_dir, "subdir") right_subdir = os.path.join(right_dir, "subdir") expected_report = [ f"diff {comparator_tmpdir.dir} {comparator_tmpdir.dir_same}", "Identical files : ['file']", "Common subdirectories : ['subdir']", '', f"diff {left_subdir} {right_subdir}", ] self._assert_report(d.report_full_closure, expected_report) def _assert_report(self, dircmp_report, expected_report_lines): stdout = StringIO() with redirect_stdout(stdout): dircmp_report() report_lines = stdout.getvalue().strip().split('\n') assert report_lines == expected_report_lines def test_compare_dirs(tmp_pathplus: PathPlus): dir_a = tmp_pathplus / "dir_a" dir_b = tmp_pathplus / "dir_b" dir_a.mkdir() dir_b.mkdir() (dir_a / "foo").mkdir() (dir_b / "foo").mkdir() (dir_a / "bar").mkdir() (dir_b / "bar").mkdir() (dir_a / "baz").mkdir() (dir_a / "baz" / "code.py").touch() (dir_b / "foo" / "src").mkdir() (dir_b / "foo" / "src" / "code.py").touch() assert not compare_dirs(dir_a, dir_b) domdf_python_tools-3.10.0/tests/test_docstrings.py000066400000000000000000000022151475315453000224460ustar00rootroot00000000000000# stdlib import doctest import inspect import shutil from textwrap import indent # 3rd party import pytest # this package from domdf_python_tools import getters, iterative, pagesizes, secrets, stringlist, utils, words from domdf_python_tools.utils import redirect_output VERBOSE = 1 ret = 0 @pytest.mark.parametrize("module", [iterative, getters, secrets, stringlist, utils, words, pagesizes.units]) def test_docstrings(module): # Check that we were actually given a module. if inspect.ismodule(module): print(f"Running doctest in {module!r}".center(shutil.get_terminal_size().columns, '=')) else: raise TypeError(f"testmod: module required; {module!r}") with redirect_output(combine=True) as (stdout, stderr): # Find, parse, and run all tests in the given module. finder = doctest.DocTestFinder() runner = doctest.DocTestRunner(verbose=VERBOSE >= 2) for test in finder.find(module, module.__name__): runner.run(test) runner.summarize(verbose=bool(VERBOSE)) # results = doctest.TestResults(runner.failures, runner.tries) print(indent(stdout.getvalue(), " ")) if runner.failures: pytest.fail(f"{runner.failures} tests failed") domdf_python_tools-3.10.0/tests/test_doctools.py000066400000000000000000000312211475315453000221140ustar00rootroot00000000000000""" test_doctools ~~~~~~~~~~~~~~~ Test functions in doctools.py """ # stdlib import math import sys from typing import Iterable, NamedTuple, get_type_hints # 3rd party import pytest from coincidence import PEP_563, max_version # this package from domdf_python_tools import doctools from domdf_python_tools.bases import Dictable from domdf_python_tools.compat import PYPY from domdf_python_tools.doctools import ( base_int_docstrings, base_new_docstrings, container_docstrings, operator_docstrings, prettify_docstrings ) # TODO: test sphinxification of docstrings class Cafe: """ Generic class for a Cafe """ def __init__(self): self._dish1 = "egg and bacon" self._dish2 = "egg sausage and bacon" self._dish3 = "egg and spam" self._dish4 = "egg bacon and spam" self._opens_at = 7 self._closes_at = 6 @property def menu(self): """ Returns the menu of the cafe :return: :rtype: """ return [self._dish1, self._dish2, self._dish3, self._dish4] @property def opening_hours(self): """ Returns the opening hours of the Cafe :rtype: str """ return f"Open every day {self._opens_at}am - {self._closes_at}pm" def set_opening_hours(self, opens_at, closes_at): """ Sets the opening hours of the Cafe :param opens_at: :type opens_at: :param closes_at: :type closes_at: :return: :rtype: """ self._opens_at = opens_at self._closes_at = closes_at @property def owner(self): """ Returns the owner of the Cafe :rtype: str """ return "Unknown" @property def serves_spam(self): """ Returns whether the Cafe serves spam :rtype: bool """ return True class SpamCafe(Cafe): """ Cafe that serves Spam to Vikings """ def __init__(self): super().__init__() self._todays_special = ( "Lobster Thermidor au Crevette with a Mornay " "sauce served in a Provencale manner with " "shallots and aubergines garnished with truffle " "pate, brandy and with a fried egg on top and spam." ) @doctools.is_documented_by(Cafe.menu) # type: ignore @property def menu(self): return super().menu + [self._todays_special] @doctools.is_documented_by(Cafe.opening_hours) # type: ignore @property def opening_hours(self): return f"""Open Monday-Saturday {self._opens_at}am - {self._closes_at}pm Please note our opening hours may vary due to COVID-19""" @doctools.append_docstring_from(Cafe.set_opening_hours) def set_opening_hours(self, opens_at, closes_at): """I will not buy this record, it is scratched. """ self._opens_at = opens_at self._closes_at = closes_at @doctools.append_docstring_from(math.ceil) def ceil(self, x): """ I don't know why the cafe has a ceil function, but we'd better document it properly. """ return math.ceil(x) @property def owner(self): return "Terry Jones" def documented_function(a: float, b: float, c: float, d: float) -> float: """ This function is documented. It multiplies the four values `a`, `b`, `c`, and `d` together. :type a: float :type b: float :type c: float :type d: float """ return a * b * c * d @doctools.is_documented_by(documented_function) def undocumented_function(a: float, b: float, c: float, d: int) -> float: return d * c * b * a @doctools.append_docstring_from(documented_function) def partially_documented_function(a: float, b: float, c: float, d: float) -> None: """ This function works like ``documented_function`` except it returns the result telepathically. """ d * c * b * a # pylint: disable=pointless-statement class DummyClass: @doctools.is_documented_by(documented_function) def function_in_class_with_same_args(self, a, b, c, d): return @pytest.mark.parametrize( "docstring, expects", [ ("\t\t\t ", ''), ("\t\t\t Spam", "Spam"), ("\t\t\t Spam \t\t\t", "Spam \t\t\t"), ("\t\t\t Spam\n \t\t\t", "Spam\n"), (" \t\t\t", ''), (" \t\t\tSpam", "Spam"), (" \t\t\tSpam\t\t\t ", "Spam\t\t\t "), (" \t\t\tSpam\n\t\t\t ", "Spam\n"), ('', ''), (None, ''), (False, ''), (0, ''), ([], ''), ] ) def test_deindent_string(docstring, expects): assert doctools.deindent_string(docstring) == expects @pytest.mark.xfail(PEP_563, reason="The future of PEP 563 is unclear at this time.") def test_decorators(): # Check the ``SpamCafe`` class has has its docstrings modified appropriately. # menu and opening_hours should have been copied from menu of the superclass assert SpamCafe.menu.__doc__ == Cafe.menu.__doc__ assert SpamCafe.opening_hours.__doc__ == Cafe.opening_hours.__doc__ # set_opening_hours and ceil should have extra text at the beginning assert SpamCafe.set_opening_hours.__doc__.startswith( # type: ignore "I will not buy this record, it is scratched." ) assert (doctools.deindent_string(SpamCafe.set_opening_hours.__doc__ )).endswith(doctools.deindent_string(Cafe.set_opening_hours.__doc__)) # Dedented both strings to be sure of equivalence assert SpamCafe.ceil.__doc__.startswith( # type: ignore "I don't know why the cafe has a ceil function, but we'd better document it properly.", ) assert doctools.deindent_string(SpamCafe.ceil.__doc__ ).rstrip().endswith(doctools.deindent_string(math.ceil.__doc__).rstrip()) # Dedented both strings to be sure of equivalence # Functions assert undocumented_function.__doc__ == documented_function.__doc__ assert undocumented_function.__name__ == "undocumented_function" if PEP_563: assert undocumented_function.__annotations__ == { 'a': "float", 'b': "float", 'c': "float", 'd': "int", "return": "float" } else: assert undocumented_function.__annotations__ == { 'a': float, 'b': float, 'c': float, 'd': int, "return": float } assert partially_documented_function.__doc__.startswith( # type: ignore "This function works like ``documented_function`` except it returns the result telepathically.", ) assert (doctools.deindent_string(partially_documented_function.__doc__ )).endswith(doctools.deindent_string(documented_function.__doc__)) # Dedented both strings to be sure of equivalence assert DummyClass.function_in_class_with_same_args.__doc__ == documented_function.__doc__ assert DummyClass.function_in_class_with_same_args.__name__ == "function_in_class_with_same_args" def test_document_object_from_another(): def funA(): pass doctools.document_object_from_another(funA, str) assert funA.__doc__ == str.__doc__ doctools.document_object_from_another(funA, int) assert funA.__doc__ == int.__doc__ doctools.document_object_from_another(funA, math.ceil) assert funA.__doc__ == math.ceil.__doc__ def test_append_doctring_from_another(): def funB(): "Hello" # noqa: Q002 def funC(): "World" # noqa: Q002 def funD(): pass assert funB.__doc__ == "Hello" assert funC.__doc__ == "World" doctools.append_doctring_from_another(funB, funC) assert funB.__doc__ == "Hello\n\nWorld\n" doctools.append_doctring_from_another(funD, funB) assert funD.__doc__ == "Hello\n\nWorld\n" def test_still_callable(): cafe = Cafe() assert cafe.menu == [ "egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam", ] assert cafe.opening_hours == "Open every day 7am - 6pm" cafe.set_opening_hours(9, 5) assert cafe.opening_hours == "Open every day 9am - 5pm" assert cafe.owner == "Unknown" assert cafe.serves_spam is True spam_cafe = SpamCafe() assert spam_cafe.menu == [ "egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam", "Lobster Thermidor au Crevette with a Mornay " "sauce served in a Provencale manner with " "shallots and aubergines garnished with truffle " "pate, brandy and with a fried egg on top and spam.", ] assert spam_cafe.opening_hours == """Open Monday-Saturday 7am - 6pm Please note our opening hours may vary due to COVID-19""" spam_cafe.set_opening_hours(9, 5) assert spam_cafe.opening_hours == """Open Monday-Saturday 9am - 5pm Please note our opening hours may vary due to COVID-19""" assert spam_cafe.owner == "Terry Jones" assert spam_cafe.serves_spam is True assert spam_cafe.ceil(5.5) == 6 assert documented_function(1, 2, 3, 4) == 24 assert undocumented_function(1, 2, 3, 4) == 24 assert partially_documented_function(1, 2, 3, 4) is None def test_make_sphinx_links(): original = """ This is a docstring that contains references to ``str``, ``int``, ``float`` and ``None``, but lacks proper references to them when rendered in Sphinx. :return: pi :rtype: float """ sphinx = """ This is a docstring that contains references to :class:`str`, :class:`int`, :class:`float` and :py:obj:`None`, but lacks proper references to them when rendered in Sphinx. :return: pi :rtype: float """ assert doctools.make_sphinx_links(original) == sphinx def test_sphinxify_docstring(): @doctools.sphinxify_docstring() def demo_function(): """ This is a docstring that contains references to ``str``, ``int``, and ``float`` but lacks proper references to them when rendered in Sphinx. :return: pi :rtype: float """ # noqa: SXL001 return math.pi if sys.version_info >= (3, 13): assert demo_function.__doc__ == """ This is a docstring that contains references to :class:`str`, :class:`int`, and :class:`float` but lacks proper references to them when rendered in Sphinx. :return: pi :rtype: float """ else: assert demo_function.__doc__ == """ This is a docstring that contains references to :class:`str`, :class:`int`, and :class:`float` but lacks proper references to them when rendered in Sphinx. :return: pi :rtype: float """ @prettify_docstrings class Klasse: def __delattr__(self, item): ... def __dir__(self): ... def __eq__(self, other): ... def __getattribute__(self, item): ... def __ge__(self, other): ... def __gt__(self, other): ... def __hash__(self): ... def __lt__(self, other): ... def __le__(self, other): ... def __ne__(self, other): ... def __setattr__(self, item, value): ... def __sizeof__(self): ... def __str__(self): ... def __contains__(self, item): ... def __getitem__(self, item): ... def __setitem__(self, item, value): ... def __delitem__(self, item): ... def __and__(self): ... def __add__(self, other): ... def __abs__(self): ... def __divmod__(self, other): ... def __floordiv__(self, other): ... def __invert__(self): ... def __lshift__(self, other): ... def __mod__(self, other): ... def __mul__(self, other): ... def __neg__(self): ... def __or__(self, other): ... def __pos__(self): ... def __pow__(self, other): ... def __radd__(self, other): ... def __rand__(self, other): ... def __rdivmod__(self, other): ... def __rfloordiv__(self, other): ... def __rlshift__(self, other): ... def __rmod__(self, other): ... def __rmul__(self, other): ... def __ror__(self, other): ... def __rpow__(self, other): ... def __rrshift__(self, other): ... def __rshift__(self, other): ... def __rsub__(self, other): ... def __rtruediv__(self, other): ... def __rxor__(self, other): ... def __sub__(self, other): ... def __truediv__(self, other): ... def __xor__(self, other): ... def __float__(self): ... def __int__(self): ... def __repr__(self): ... def __bool__(self): ... def test_prettify_docstrings(): all_docstrings = { **base_new_docstrings, **container_docstrings, **operator_docstrings, **base_int_docstrings, } for attr_name, docstring in all_docstrings.items(): if PYPY and attr_name in {"__delattr__", "__dir__"}: continue assert getattr(Klasse, attr_name).__doc__ == docstring assert get_type_hints(Klasse.__eq__)["return"] is bool assert get_type_hints(Klasse.__ge__)["return"] is bool assert get_type_hints(Klasse.__gt__)["return"] is bool assert get_type_hints(Klasse.__lt__)["return"] is bool assert get_type_hints(Klasse.__le__)["return"] is bool assert get_type_hints(Klasse.__ne__)["return"] is bool assert get_type_hints(Klasse.__repr__)["return"] is str assert get_type_hints(Klasse.__str__)["return"] is str assert get_type_hints(Klasse.__int__)["return"] is int assert get_type_hints(Klasse.__float__)["return"] is float assert get_type_hints(Klasse.__bool__)["return"] is bool assert Klasse.__repr__.__doc__ == "Return a string representation of the :class:`~tests.test_doctools.Klasse`." @max_version("3.7") def test_prettify_with_method(): class F(Iterable): pass assert prettify_docstrings(F).__getitem__.__doc__ != "Return ``self[key]``." # type: ignore class G(Dictable): pass assert prettify_docstrings(G).__getitem__.__doc__ != "Return ``self[key]``." # type: ignore def test_prettify_namedtuple(): @prettify_docstrings class T(NamedTuple): a: str b: float assert T.__repr__.__doc__ == "Return a string representation of the :class:`~tests.test_doctools.T`." domdf_python_tools-3.10.0/tests/test_getters.py000066400000000000000000000252301475315453000217460ustar00rootroot00000000000000# Adapted from https://github.com/python/cpython/blob/master/Lib/test/test_operator.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import pickle from typing import Any # 3rd party import pytest from funcy.funcs import rpartial # type: ignore # this package import domdf_python_tools from domdf_python_tools.getters import attrgetter, itemgetter, methodcaller evaluate = rpartial(eval, {"domdf_python_tools": domdf_python_tools}, {"domdf_python_tools": domdf_python_tools}) class TestAttrgetter: def test_attrgetter(self): class A: pass a = A() a.name = "john" # type: ignore b = A() b.name = "graham" # type: ignore f = attrgetter(0, "name") assert f([a, b]) == "john" f = attrgetter(1, "name") assert f([a, b]) == "graham" with pytest.raises(TypeError, match=r"__call__\(\) missing 1 required positional argument: 'obj'"): f() # type: ignore with pytest.raises(TypeError, match=r"__call__\(\) takes 2 positional arguments but 3 were given"): f(a, "cleese") # type: ignore with pytest.raises(TypeError, match=r"__call__\(\) got an unexpected keyword argument 'surname'"): f(a, surname="cleese") # type: ignore f = attrgetter(0, "rank") with pytest.raises(AttributeError, match="'A' object has no attribute 'rank'"): f([a, b]) with pytest.raises(TypeError, match="attribute name must be a string"): attrgetter(0, 2) # type: ignore[arg-type] with pytest.raises(TypeError, match="'idx' must be an integer"): attrgetter("hello", 0) # type: ignore[arg-type] with pytest.raises(TypeError, match=r"__init__\(\) missing 1 required positional argument: 'attr'"): attrgetter(0) # type: ignore[call-arg] f = attrgetter(1, "name") with pytest.raises(IndexError, match="list index out of range"): f([]) class C: def __getattr__(self, name): raise SyntaxError with pytest.raises(SyntaxError, match="None"): attrgetter(0, "foo")([C()]) # recursive gets a = A() a.name = "john" # type: ignore a.child = A() # type: ignore a.child.name = "thomas" # type: ignore f = attrgetter(3, "child.name") assert f([1, 2, 3, a]) == "thomas" with pytest.raises(AttributeError, match="'A' object has no attribute 'child'"): f([1, 2, 3, a.child]) # type: ignore f = attrgetter(1, "child.name") assert f([1, a]) == "thomas" f = attrgetter(2, "child.child.name") with pytest.raises(AttributeError, match="'A' object has no attribute 'child'"): f([1, 2, a]) f = attrgetter(0, "child.") with pytest.raises(AttributeError, match="'A' object has no attribute ''"): f([a]) f = attrgetter(0, ".child") with pytest.raises(AttributeError, match="'A' object has no attribute ''"): f([a]) a.child.child = A() # type: ignore a.child.child.name = "johnson" # type: ignore f = attrgetter(0, "child.child.name") assert f([a]) == "johnson" @pytest.mark.parametrize("proto", range(pickle.HIGHEST_PROTOCOL + 1)) def test_pickle(self, proto: int): class A: pass a = A() a.x = 'X' # type: ignore a.y = 'Y' # type: ignore a.z = 'Z' # type: ignore a.t = A() # type: ignore a.t.u = A() # type: ignore a.t.u.v = 'V' # type: ignore f = attrgetter(0, 'x') f2 = copy(f, proto) assert repr(f2) == repr(f) assert f2([a]) == f([a]) def test_repr(self): assert repr(attrgetter(0, "name")) == "domdf_python_tools.getters.attrgetter(idx=0, attr='name')" assert repr(attrgetter(1, "value")) == "domdf_python_tools.getters.attrgetter(idx=1, attr='value')" evaluate(repr(attrgetter(0, "name"))) evaluate(repr(attrgetter(1, "value"))) class TestItemgetter: def test_itemgetter(self): a = "ABCDE" f = itemgetter(0, 2) assert f([a]) == 'C' f = itemgetter(2, 2) assert f([1, 2, a]) == 'C' with pytest.raises(TypeError, match=r"__call__\(\) missing 1 required positional argument: 'obj'"): f() # type: ignore with pytest.raises(TypeError, match=r"__call__\(\) takes 2 positional arguments but 3 were given"): f(a, 3) # type: ignore with pytest.raises(TypeError, match=r"__call__\(\) got an unexpected keyword argument 'size'"): f(a, size=3) # type: ignore f = itemgetter(1, 10) with pytest.raises(IndexError, match="list index out of range"): f([]) with pytest.raises(IndexError, match="list index out of range"): f([1]) with pytest.raises(IndexError, match="string index out of range"): f([(), a]) class C: def __getitem__(self, name): raise SyntaxError with pytest.raises(SyntaxError, match="None"): itemgetter(2, 42)([1, (), C()]) f = itemgetter(0, "name") with pytest.raises(TypeError, match="string( index)? indices must be integers( or slices, not str)?"): f([a]) with pytest.raises( TypeError, match=r"__init__\(\) missing 2 required positional arguments: 'idx' and 'item'", ): itemgetter() # type: ignore with pytest.raises(TypeError, match=r"__init__\(\) missing 1 required positional argument: 'item'"): itemgetter(1) # type: ignore with pytest.raises(TypeError, match=r"__init__\(\) missing 1 required positional argument: 'item'"): itemgetter("abc") # type: ignore with pytest.raises(TypeError, match="'idx' must be an integer"): itemgetter("abc", 2) # type: ignore d = dict(key="val") f = itemgetter(1, "key") assert f([{}, d]) == "val" f = itemgetter(1, "nonkey") with pytest.raises(KeyError, match="nonkey"): f([{}, d]) inventory = [("apple", 3), ("pear", 5), ("banana", 2), ("orange", 1)] getcount = itemgetter(0, 1) assert list(map(getcount, inventory)) == ['p', 'e', 'a', 'r'] assert sorted(inventory, key=getcount) == [("banana", 2), ("pear", 5), ("apple", 3), ("orange", 1)] # interesting indices t = tuple("abcde") assert itemgetter(-1, -1)([1, 2, t]) == 'e' assert itemgetter(1, slice(2, 4))([1, t]) == ('c', 'd') # interesting sequences class T(tuple): """ Tuple subclass """ assert itemgetter(2, 0)([T("abc"), T("def"), T("ghi")]) == 'g' assert itemgetter(2, 0)([range(100, 200), range(200, 300), range(300, 400)]) == 300 @pytest.mark.parametrize("proto", range(pickle.HIGHEST_PROTOCOL + 1)) def test_pickle(self, proto: int): a = "ABCDE" f = itemgetter(0, 2) f2 = copy(f, proto) assert repr(f2) == repr(f) assert f2([a]) == f([a]) def test_repr(self): assert repr(itemgetter(0, 1)) == "domdf_python_tools.getters.itemgetter(idx=0, item=1)" assert repr(itemgetter(1, 2)) == "domdf_python_tools.getters.itemgetter(idx=1, item=2)" evaluate(repr(itemgetter(0, 1))) evaluate(repr(itemgetter(1, 2))) class TestMethodcaller: def test_methodcaller(self): with pytest.raises( TypeError, match=r"__init__\(\) missing 2 required positional arguments: '_idx' and '_name'", ): methodcaller() # type: ignore with pytest.raises(TypeError, match=r"__init__\(\) missing 1 required positional argument: '_name'"): methodcaller(12) # type: ignore with pytest.raises(TypeError, match=r"__init__\(\) missing 1 required positional argument: '_name'"): methodcaller("name") # type: ignore with pytest.raises(TypeError, match="'_idx' must be an integer"): methodcaller("name", 12) # type: ignore with pytest.raises(TypeError, match="method name must be a string"): methodcaller(0, 12) # type: ignore f = methodcaller(1, "foo") with pytest.raises(IndexError, match="list index out of range"): f([]) with pytest.raises(IndexError, match="list index out of range"): f([1]) class A: def foo(self, *args, **kwds): return args[0] + args[1] def bar(self, f=42): # noqa: MAN001,MAN002 return f def baz(*args, **kwds): # noqa: MAN002 return kwds["name"], kwds["self"] a = A() f = methodcaller(2, "foo") with pytest.raises(IndexError, match="tuple index out of range"): f(["abc", 123, a]) f = methodcaller(1, "foo", 1, 2) assert f([1, a]) == 3 with pytest.raises(TypeError, match=r"__call__\(\) missing 1 required positional argument: 'obj'"): f() # type: ignore with pytest.raises(TypeError, match=r"__call__\(\) takes 2 positional arguments but 3 were given"): f(a, 3) # type: ignore with pytest.raises(TypeError, match=r"__call__\(\) got an unexpected keyword argument 'spam'"): f(a, spam=3) # type: ignore f = methodcaller(0, "bar") assert f([a]) == 42 with pytest.raises(TypeError, match=r"__call__\(\) takes 2 positional arguments but 3 were given"): f([a], [a]) # type: ignore f = methodcaller(0, "bar", f=5) assert f([a]) == 5 f = methodcaller(0, "baz", name="spam", self="eggs") assert f([a]) == ("spam", "eggs") @pytest.mark.parametrize("proto", range(pickle.HIGHEST_PROTOCOL + 1)) def test_pickle(self, proto: int): class A: def foo(self, *args, **kwds): return args[0] + args[1] def bar(self, f=42): return f def baz(*args, **kwds): return kwds["name"], kwds["self"] a = A() f = methodcaller(0, "bar") f2 = copy(f, proto) assert repr(f2) == repr(f) assert f2([a]) == f([a]) # positional args f = methodcaller(0, "foo", 1, 2) f2 = copy(f, proto) assert repr(f2) == repr(f) assert f2([a]) == f([a]) # keyword args f = methodcaller(0, "bar", f=5) f2 = copy(f, proto) assert repr(f2) == repr(f) assert f2([a]) == f([a]) f = methodcaller(0, "baz", self="eggs", name="spam") f2 = copy(f, proto) # Can't test repr consistently with multiple keyword args assert f2([a]) == f([a]) def test_repr(self): assert repr(methodcaller(0, "lower")) == "domdf_python_tools.getters.methodcaller(0, 'lower')" assert repr(methodcaller(1, "__iter__")) == "domdf_python_tools.getters.methodcaller(1, '__iter__')" assert repr( methodcaller(1, "__iter__", "arg1") ) == "domdf_python_tools.getters.methodcaller(1, '__iter__', 'arg1')" assert repr( methodcaller(1, "__iter__", kw1="kwarg1") ) == "domdf_python_tools.getters.methodcaller(1, '__iter__', kw1='kwarg1')" assert repr( methodcaller(1, "__iter__", "arg1", "arg2", kw1="kwarg1", kw2="kwarg2") ) == "domdf_python_tools.getters.methodcaller(1, '__iter__', 'arg1', 'arg2', kw1='kwarg1', kw2='kwarg2')" evaluate(repr(methodcaller(0, "lower"))) evaluate(repr(methodcaller(1, "__iter__"))) evaluate(repr(methodcaller(1, "__iter__", "arg1"))) evaluate(repr(methodcaller(1, "__iter__", kw1="kwarg1"))) evaluate(repr(methodcaller(1, "__iter__", "arg1", "arg2", kw1="kwarg1", kw2="kwarg2"))) def copy(obj: Any, proto: int): pickled = pickle.dumps(obj, proto) return pickle.loads(pickled) # nosec: B301 domdf_python_tools-3.10.0/tests/test_import_tools.py000066400000000000000000000160011475315453000230170ustar00rootroot00000000000000# stdlib import inspect import platform import re import sys from contextlib import contextmanager # 3rd party import pytest from coincidence.regressions import AdvancedDataRegressionFixture from coincidence.selectors import not_pypy, only_pypy, only_version # this package from domdf_python_tools.import_tools import ( discover, discover_entry_points, discover_entry_points_by_name, iter_submodules ) sys.path.append('.') sys.path.append("tests") # 3rd party import discover_demo_module # type: ignore # noqa: E402 def test_discover(): # Alphabetical order regardless of order in the module. assert discover(discover_demo_module) == [ discover_demo_module.foo_in_init, discover_demo_module.submodule_a.bar, discover_demo_module.submodule_a.foo, discover_demo_module.submodule_b.Alice, discover_demo_module.submodule_b.Bob, ] def test_discover_function_only(): # Alphabetical order regardless of order in the module. assert discover( discover_demo_module, match_func=inspect.isfunction ) == [ discover_demo_module.foo_in_init, discover_demo_module.submodule_a.bar, discover_demo_module.submodule_a.foo, ] def test_discover_class_only(): # Alphabetical order regardless of order in the module. assert discover( discover_demo_module, match_func=inspect.isclass ) == [ discover_demo_module.submodule_b.Alice, discover_demo_module.submodule_b.Bob, ] def test_discover_hasattr(): def match_func(obj): return hasattr(obj, "foo") assert discover(discover_demo_module, match_func=match_func) == [] class HasPath: __path__ = "foo" @contextmanager def does_not_raise(): yield if sys.version_info <= (3, 7): haspath_error = does_not_raise() else: haspath_error = pytest.raises(ValueError, match="^path must be None or list of paths to look for modules in$") def raises_attribute_error(obj, **kwargs): return pytest.param( obj, pytest.raises(AttributeError, match=f"^'{type(obj).__name__}' object has no attribute '__name__'$"), **kwargs, ) @pytest.mark.parametrize( "obj, expects", [ raises_attribute_error("abc", id="string"), raises_attribute_error(123, id="int"), raises_attribute_error(12.34, id="float"), raises_attribute_error([1, 2, 3], id="list"), raises_attribute_error((1, 2, 3), id="tuple"), raises_attribute_error({1, 2, 3}, id="set"), raises_attribute_error({'a': 1, 'b': 2, 'c': 3}, id="dictionary"), pytest.param(HasPath, haspath_error, id="HasPath"), ], ) def test_discover_errors(obj, expects): with expects: discover(obj) def test_discover_entry_points(advanced_data_regression: AdvancedDataRegressionFixture): entry_points = discover_entry_points("flake8.extension", lambda f: f.__name__.startswith("break")) advanced_data_regression.check([f.__name__ for f in entry_points]) def test_discover_entry_points_by_name_object_match_func(advanced_data_regression: AdvancedDataRegressionFixture): entry_points = discover_entry_points_by_name( "flake8.extension", object_match_func=lambda f: f.__name__.startswith("break") ) advanced_data_regression.check({k: v.__name__ for k, v in entry_points.items()}) def test_discover_entry_points_by_name_name_match_func(advanced_data_regression: AdvancedDataRegressionFixture): entry_points = discover_entry_points_by_name( "flake8.extension", name_match_func=lambda n: n.startswith("pycodestyle.") ) advanced_data_regression.check({k: v.__name__ for k, v in entry_points.items()}) iter_submodules_versions = pytest.mark.parametrize( "version", [ pytest.param(3.6, marks=only_version(3.6, reason="Output differs on Python 3.6")), pytest.param( 3.7, marks=[ only_version(3.7, reason="Output differs on Python 3.7"), not_pypy("Output differs on PyPy") ] ), pytest.param( "3.7-pypy", marks=[ only_version(3.7, reason="Output differs on Python 3.7"), only_pypy("Output differs on PyPy") ] ), pytest.param( 3.8, marks=[ only_version(3.8, reason="Output differs on Python 3.8"), not_pypy("Output differs on PyPy 3.8") ] ), pytest.param( "3.8_pypy", marks=[ only_version(3.8, reason="Output differs on Python 3.8"), only_pypy("Output differs on PyPy 3.8") ] ), pytest.param( 3.9, marks=[ only_version(3.9, reason="Output differs on Python 3.9"), not_pypy("Output differs on PyPy 3.9") ] ), pytest.param( "3.9_pypy", marks=[ only_version(3.9, reason="Output differs on Python 3.9"), only_pypy("Output differs on PyPy 3.9") ] ), pytest.param("3.10", marks=only_version("3.10", reason="Output differs on Python 3.10")), ] ) @iter_submodules_versions @pytest.mark.parametrize( "module", ["collections", "importlib", "domdf_python_tools", "consolekit", "json", "cRQefleMvm", "reprlib"], ) def test_iter_submodules(version, module: str, advanced_data_regression: AdvancedDataRegressionFixture): advanced_data_regression.check(list(iter_submodules(module))) if sys.version_info < (3, 10): # From https://github.com/python/cpython/blob/main/Lib/platform.py#L1319 # License: https://github.com/python/cpython/blob/main/LICENSE ### freedesktop.org os-release standard # https://www.freedesktop.org/software/systemd/man/os-release.html # NAME=value with optional quotes (' or "). The regular expression is less # strict than shell lexer, but that's ok. _os_release_line = re.compile("^(?P[a-zA-Z0-9_]+)=(?P[\"']?)(?P.*)(?P=quote)$") # unescape five special characters mentioned in the standard _os_release_unescape = re.compile(r"\\([\\\$\"\'`])") # /etc takes precedence over /usr/lib _os_release_candidates = ("/etc/os-release", "/usr/lib/os-release") def freedesktop_os_release(): """ Return operation system identification from freedesktop.org os-release """ errno = None for candidate in _os_release_candidates: try: with open(candidate, encoding="utf-8") as f: info = {"ID": "linux"} for line in f: mo = _os_release_line.match(line) if mo is not None: info[mo.group("name")] = _os_release_unescape.sub(r"\1", mo.group("value")) return info except OSError as e: errno = e.errno raise OSError(errno, f"Unable to read files {', '.join(_os_release_candidates)}") else: freedesktop_os_release = platform.freedesktop_os_release on_alt_linux = False if platform.system() == "Linux": try: on_alt_linux = freedesktop_os_release()["ID"] == "altlinux" except OSError: pass @iter_submodules_versions @pytest.mark.parametrize( "platform", [ pytest.param('', marks=pytest.mark.skipif(on_alt_linux, reason="Not for ALT Linux")), pytest.param("altlinux", marks=pytest.mark.skipif(not on_alt_linux, reason="Only for ALT Linux")), ] ) def test_iter_submodules_asyncio( platform, version, advanced_data_regression: AdvancedDataRegressionFixture, ): advanced_data_regression.check(list(iter_submodules("asyncio"))) domdf_python_tools-3.10.0/tests/test_import_tools_/000077500000000000000000000000001475315453000226065ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_/test_discover_entry_points.yml000066400000000000000000000000751475315453000310250ustar00rootroot00000000000000- break_after_binary_operator - break_before_binary_operator test_discover_entry_points_by_name_name_match_func.yml000066400000000000000000000041621475315453000356300ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_pycodestyle.ambiguous_identifier: ambiguous_identifier pycodestyle.bare_except: bare_except pycodestyle.blank_lines: blank_lines pycodestyle.break_after_binary_operator: break_after_binary_operator pycodestyle.break_before_binary_operator: break_before_binary_operator pycodestyle.comparison_negative: comparison_negative pycodestyle.comparison_to_singleton: comparison_to_singleton pycodestyle.comparison_type: comparison_type pycodestyle.compound_statements: compound_statements pycodestyle.continued_indentation: continued_indentation pycodestyle.explicit_line_join: explicit_line_join pycodestyle.extraneous_whitespace: extraneous_whitespace pycodestyle.imports_on_separate_lines: imports_on_separate_lines pycodestyle.indentation: indentation pycodestyle.maximum_doc_length: maximum_doc_length pycodestyle.maximum_line_length: maximum_line_length pycodestyle.missing_whitespace: missing_whitespace pycodestyle.missing_whitespace_after_import_keyword: missing_whitespace_after_import_keyword pycodestyle.missing_whitespace_around_operator: missing_whitespace_around_operator pycodestyle.module_imports_on_top_of_file: module_imports_on_top_of_file pycodestyle.python_3000_async_await_keywords: python_3000_async_await_keywords pycodestyle.python_3000_backticks: python_3000_backticks pycodestyle.python_3000_has_key: python_3000_has_key pycodestyle.python_3000_invalid_escape_sequence: python_3000_invalid_escape_sequence pycodestyle.python_3000_not_equal: python_3000_not_equal pycodestyle.python_3000_raise_comma: python_3000_raise_comma pycodestyle.tabs_obsolete: tabs_obsolete pycodestyle.tabs_or_spaces: tabs_or_spaces pycodestyle.trailing_blank_lines: trailing_blank_lines pycodestyle.trailing_whitespace: trailing_whitespace pycodestyle.whitespace_around_comma: whitespace_around_comma pycodestyle.whitespace_around_keywords: whitespace_around_keywords pycodestyle.whitespace_around_named_parameter_equals: whitespace_around_named_parameter_equals pycodestyle.whitespace_around_operator: whitespace_around_operator pycodestyle.whitespace_before_comment: whitespace_before_comment pycodestyle.whitespace_before_parameters: whitespace_before_parameters test_discover_entry_points_by_name_object_match_func.yml000066400000000000000000000002141475315453000361500ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_pycodestyle.break_after_binary_operator: break_after_binary_operator pycodestyle.break_before_binary_operator: break_before_binary_operator domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_10_.yml000066400000000000000000000011521475315453000323410ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.mixins - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.threads - asyncio.transports - asyncio.trsock - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_6_.yml000066400000000000000000000007651475315453000322770ustar00rootroot00000000000000- asyncio - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.selector_events - asyncio.sslproto - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.transports - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_7_.yml000066400000000000000000000007721475315453000322760ustar00rootroot00000000000000- asyncio - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.transports - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_7_pypy_.yml000066400000000000000000000010401475315453000333440ustar00rootroot00000000000000- asyncio - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.transports - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_8_.yml000066400000000000000000000011071475315453000322700ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.transports - asyncio.trsock - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_8_pypy_.yml000066400000000000000000000011551475315453000333540ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.transports - asyncio.trsock - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_9_.yml000066400000000000000000000011311475315453000322660ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.threads - asyncio.transports - asyncio.trsock - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio__3_9_pypy_.yml000066400000000000000000000011311475315453000333470ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.threads - asyncio.transports - asyncio.trsock - asyncio.unix_events - asyncio.windows_events - asyncio.windows_utils domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio_altlinux_3_10_.yml000066400000000000000000000010711475315453000341220ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.mixins - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.threads - asyncio.transports - asyncio.trsock - asyncio.unix_events domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio_altlinux_3_6_.yml000066400000000000000000000007041475315453000340510ustar00rootroot00000000000000- asyncio - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.selector_events - asyncio.sslproto - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.transports - asyncio.unix_events domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio_altlinux_3_7_.yml000066400000000000000000000007111475315453000340500ustar00rootroot00000000000000- asyncio - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.transports - asyncio.unix_events test_iter_submodules_asyncio_altlinux_3_7_pypy_.yml000066400000000000000000000007571475315453000350640ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_- asyncio - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.transports - asyncio.unix_events domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio_altlinux_3_8_.yml000066400000000000000000000010261475315453000340510ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.transports - asyncio.trsock - asyncio.unix_events test_iter_submodules_asyncio_altlinux_3_8_pypy_.yml000066400000000000000000000010741475315453000350560ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.transports - asyncio.trsock - asyncio.unix_events domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_asyncio_altlinux_3_9_.yml000066400000000000000000000010501475315453000340470ustar00rootroot00000000000000- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.threads - asyncio.transports - asyncio.trsock - asyncio.unix_events test_iter_submodules_asyncio_altlinux_3_9_pypy_.yml000066400000000000000000000011161475315453000350540ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_- asyncio - asyncio.__main__ - asyncio.base_events - asyncio.base_futures - asyncio.base_subprocess - asyncio.base_tasks - asyncio.compat - asyncio.constants - asyncio.coroutines - asyncio.events - asyncio.exceptions - asyncio.format_helpers - asyncio.futures - asyncio.locks - asyncio.log - asyncio.proactor_events - asyncio.protocols - asyncio.queues - asyncio.runners - asyncio.selector_events - asyncio.sslproto - asyncio.staggered - asyncio.streams - asyncio.subprocess - asyncio.tasks - asyncio.test_utils - asyncio.threads - asyncio.transports - asyncio.trsock - asyncio.unix_events domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_10_.yml000066400000000000000000000000031475315453000325300ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_6_.yml000066400000000000000000000000031475315453000324550ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_7_.yml000066400000000000000000000000031475315453000324560ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_7_pypy_.yml000066400000000000000000000000031475315453000335370ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_8_.yml000066400000000000000000000000031475315453000324570ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_8_pypy_.yml000066400000000000000000000000031475315453000335400ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_9_.yml000066400000000000000000000000031475315453000324600ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_cRQefleMvm_3_9_pypy_.yml000066400000000000000000000000031475315453000335410ustar00rootroot00000000000000[] domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_10_.yml000066400000000000000000000000401475315453000330460ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_6_.yml000066400000000000000000000000401475315453000327730ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_7_.yml000066400000000000000000000000401475315453000327740ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_7_pypy_.yml000066400000000000000000000000401475315453000340550ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_8_.yml000066400000000000000000000000401475315453000327750ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_8_pypy_.yml000066400000000000000000000000401475315453000340560ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_9_.yml000066400000000000000000000000401475315453000327760ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_collections_3_9_pypy_.yml000066400000000000000000000000401475315453000340570ustar00rootroot00000000000000- collections - collections.abc domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_10_.yml000066400000000000000000000003521475315453000327100ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_6_.yml000066400000000000000000000003521475315453000326350ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_7_.yml000066400000000000000000000003521475315453000326360ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_7_pypy_.yml000066400000000000000000000003521475315453000337170ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_8_.yml000066400000000000000000000003521475315453000326370ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_8_pypy_.yml000066400000000000000000000003521475315453000337200ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_9_.yml000066400000000000000000000003521475315453000326400ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_consolekit_3_9_pypy_.yml000066400000000000000000000003521475315453000337210ustar00rootroot00000000000000- consolekit - consolekit._readline - consolekit._types - consolekit.commands - consolekit.input - consolekit.options - consolekit.terminal_colours - consolekit.testing - consolekit.tracebacks - consolekit.utils - consolekit.versions domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_domdf_python_tools_3_10_.yml000066400000000000000000000014761475315453000344600ustar00rootroot00000000000000- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_domdf_python_tools_3_6_.yml000066400000000000000000000014761475315453000344050ustar00rootroot00000000000000- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_domdf_python_tools_3_7_.yml000066400000000000000000000014761475315453000344060ustar00rootroot00000000000000- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words test_iter_submodules_domdf_python_tools_3_7_pypy_.yml000066400000000000000000000014761475315453000354100ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_domdf_python_tools_3_8_.yml000066400000000000000000000014761475315453000344070ustar00rootroot00000000000000- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words test_iter_submodules_domdf_python_tools_3_8_pypy_.yml000066400000000000000000000014761475315453000354110ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_domdf_python_tools_3_9_.yml000066400000000000000000000014761475315453000344100ustar00rootroot00000000000000- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words test_iter_submodules_domdf_python_tools_3_9_pypy_.yml000066400000000000000000000014761475315453000354120ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_import_tools_- domdf_python_tools - domdf_python_tools._is_match - domdf_python_tools.bases - domdf_python_tools.compat - domdf_python_tools.compat.importlib_metadata - domdf_python_tools.compat.importlib_resources - domdf_python_tools.dates - domdf_python_tools.delegators - domdf_python_tools.doctools - domdf_python_tools.getters - domdf_python_tools.import_tools - domdf_python_tools.iterative - domdf_python_tools.pagesizes - domdf_python_tools.pagesizes.classes - domdf_python_tools.pagesizes.sizes - domdf_python_tools.pagesizes.units - domdf_python_tools.pagesizes.utils - domdf_python_tools.paths - domdf_python_tools.pretty_print - domdf_python_tools.secrets - domdf_python_tools.stringlist - domdf_python_tools.terminal - domdf_python_tools.typing - domdf_python_tools.utils - domdf_python_tools.versions - domdf_python_tools.words domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_10_.yml000066400000000000000000000006531475315453000325430ustar00rootroot00000000000000- importlib - importlib._abc - importlib._adapters - importlib._bootstrap - importlib._bootstrap_external - importlib._common - importlib.abc - importlib.machinery - importlib.metadata - importlib.metadata._adapters - importlib.metadata._collections - importlib.metadata._functools - importlib.metadata._itertools - importlib.metadata._meta - importlib.metadata._text - importlib.readers - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_6_.yml000066400000000000000000000001721475315453000324640ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib.abc - importlib.machinery - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_7_.yml000066400000000000000000000002201475315453000324570ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib.abc - importlib.machinery - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_7_pypy_.yml000066400000000000000000000002201475315453000335400ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib.abc - importlib.machinery - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_8_.yml000066400000000000000000000002451475315453000324670ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib.abc - importlib.machinery - importlib.metadata - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_8_pypy_.yml000066400000000000000000000002451475315453000335500ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib.abc - importlib.machinery - importlib.metadata - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_9_.yml000066400000000000000000000002711475315453000324670ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib._common - importlib.abc - importlib.machinery - importlib.metadata - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_importlib_3_9_pypy_.yml000066400000000000000000000002711475315453000335500ustar00rootroot00000000000000- importlib - importlib._bootstrap - importlib._bootstrap_external - importlib._common - importlib.abc - importlib.machinery - importlib.metadata - importlib.resources - importlib.util domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_10_.yml000066400000000000000000000001001475315453000314760ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_6_.yml000066400000000000000000000001001475315453000314230ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_7_.yml000066400000000000000000000001001475315453000314240ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_7_pypy_.yml000066400000000000000000000001001475315453000325050ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_8_.yml000066400000000000000000000001001475315453000314250ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_8_pypy_.yml000066400000000000000000000001001475315453000325060ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_9_.yml000066400000000000000000000001001475315453000314260ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_json_3_9_pypy_.yml000066400000000000000000000001001475315453000325070ustar00rootroot00000000000000- json - json.decoder - json.encoder - json.scanner - json.tool domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_10_.yml000066400000000000000000000000121475315453000321660ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_6_.yml000066400000000000000000000000121475315453000321130ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_7_.yml000066400000000000000000000000121475315453000321140ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_7_pypy_.yml000066400000000000000000000000121475315453000331750ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_8_.yml000066400000000000000000000000121475315453000321150ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_8_pypy_.yml000066400000000000000000000000121475315453000331760ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_9_.yml000066400000000000000000000000121475315453000321160ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_import_tools_/test_iter_submodules_reprlib_3_9_pypy_.yml000066400000000000000000000000121475315453000331770ustar00rootroot00000000000000- reprlib domdf_python_tools-3.10.0/tests/test_iterative.py000066400000000000000000000363301475315453000222700ustar00rootroot00000000000000""" iterative ~~~~~~~~~~~~~~~ Test functions in iterative.py """ # test_count, test_count_with_stride and pickletest # adapted from https://github.com/python/cpython/blob/master/Lib/test/test_itertools.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2021 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import pickle import sys from itertools import islice from random import shuffle from types import GeneratorType from typing import Any, Iterable, List, Optional, Sequence, Tuple, TypeVar # 3rd party import pytest from coincidence.regressions import ( AdvancedDataRegressionFixture, AdvancedFileRegressionFixture, check_file_regression ) # this package from domdf_python_tools.iterative import ( Len, chunks, count, double_chain, extend, extend_with, extend_with_none, flatten, groupfloats, make_tree, natmax, natmin, permutations, ranges_from_iterable, split_len ) from domdf_python_tools.utils import trim_precision def test_chunks(): assert isinstance(chunks(list(range(100)), 5), GeneratorType) assert list(chunks(list(range(100)), 5))[0] == [0, 1, 2, 3, 4] assert list(chunks(['a', 'b', 'c'], 1)) == [['a'], ['b'], ['c']] def test_permutations(): data = ["egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam"] assert permutations(data, 1) == [(x, ) for x in data] assert permutations(data, 2) == [ ("egg and bacon", "egg sausage and bacon"), ("egg and bacon", "egg and spam"), ("egg and bacon", "egg bacon and spam"), ("egg sausage and bacon", "egg and spam"), ("egg sausage and bacon", "egg bacon and spam"), ("egg and spam", "egg bacon and spam"), ] assert permutations(data, 3) == [ ("egg and bacon", "egg sausage and bacon", "egg and spam"), ("egg and bacon", "egg sausage and bacon", "egg bacon and spam"), ("egg and bacon", "egg and spam", "egg sausage and bacon"), ("egg and bacon", "egg and spam", "egg bacon and spam"), ("egg and bacon", "egg bacon and spam", "egg sausage and bacon"), ("egg and bacon", "egg bacon and spam", "egg and spam"), ("egg sausage and bacon", "egg and bacon", "egg and spam"), ("egg sausage and bacon", "egg and bacon", "egg bacon and spam"), ("egg sausage and bacon", "egg and spam", "egg bacon and spam"), ("egg sausage and bacon", "egg bacon and spam", "egg and spam"), ("egg and spam", "egg and bacon", "egg bacon and spam"), ("egg and spam", "egg sausage and bacon", "egg bacon and spam"), ] assert permutations(data, 4) == [ ("egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam"), ("egg and bacon", "egg sausage and bacon", "egg bacon and spam", "egg and spam"), ("egg and bacon", "egg and spam", "egg sausage and bacon", "egg bacon and spam"), ("egg and bacon", "egg and spam", "egg bacon and spam", "egg sausage and bacon"), ("egg and bacon", "egg bacon and spam", "egg sausage and bacon", "egg and spam"), ("egg and bacon", "egg bacon and spam", "egg and spam", "egg sausage and bacon"), ("egg sausage and bacon", "egg and bacon", "egg and spam", "egg bacon and spam"), ("egg sausage and bacon", "egg and bacon", "egg bacon and spam", "egg and spam"), ("egg sausage and bacon", "egg and spam", "egg and bacon", "egg bacon and spam"), ("egg sausage and bacon", "egg bacon and spam", "egg and bacon", "egg and spam"), ("egg and spam", "egg and bacon", "egg sausage and bacon", "egg bacon and spam"), ("egg and spam", "egg sausage and bacon", "egg and bacon", "egg bacon and spam"), ] assert permutations(data, 5) == [] assert permutations(data, 6) == [] assert permutations(data, 10) == [] assert permutations(data, 30) == [] assert permutations(data, 100) == [] with pytest.raises(ValueError, match="'n' cannot be 0"): permutations(data, 0) def test_split_len(): assert split_len("Spam Spam Spam Spam Spam Spam Spam Spam ", 5) == ["Spam "] * 8 def test_len(capsys): assert list(Len("Hello")) == [0, 1, 2, 3, 4] assert list(Len("Hello World")) == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10] for val in Len("Hello World"): print(val) captured = capsys.readouterr() assert captured.out.splitlines() == ['0', '1', '2', '3', '4', '5', '6', '7', '8', '9', "10"] assert Len("Hello") == range(5) @pytest.mark.parametrize( "value, expects", [ ([[(1, 2), (3, 4)], [(5, 6), (7, 8)]], [1, 2, 3, 4, 5, 6, 7, 8]), ([[(1, 2), (3, 4)], ((5, 6), (7, 8))], [1, 2, 3, 4, 5, 6, 7, 8]), ([((1, 2), (3, 4)), [(5, 6), (7, 8)]], [1, 2, 3, 4, 5, 6, 7, 8]), ([((1, 2), (3, 4)), ((5, 6), (7, 8))], [1, 2, 3, 4, 5, 6, 7, 8]), ((((1, 2), (3, 4)), ((5, 6), (7, 8))), [1, 2, 3, 4, 5, 6, 7, 8]), ((("12", "34"), ("56", "78")), ['1', '2', '3', '4', '5', '6', '7', '8']), ] ) def test_double_chain(value, expects): assert list(double_chain(value)) == expects def test_make_tree(advanced_file_regression: AdvancedFileRegressionFixture): check_file_regression( '\n'.join( make_tree([ "apeye>=0.3.0", [ "appdirs>=1.4.4", "cachecontrol[filecache]>=0.12.6", [ "requests", [ "chardet<4,>=3.0.2", "idna<3,>=2.5", "urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1", "certifi>=2017.4.17", ], "msgpack>=0.5.2", ], ], "domdf_python_tools==2.2.0", ]) ), advanced_file_regression ) @pytest.mark.parametrize( "data", [ ["abc", "def", ["ghi", "jkl", ["mno", "pqr"]]], ["abc", "def", ["ghi", "jkl", "mno", "pqr"]], ["abc", "def", "ghi", "jkl", ["mno", "pqr"]], ["abc", "def", "ghi", "jkl", "mno", "pqr"], ] ) def test_flatten(data, advanced_data_regression: AdvancedDataRegressionFixture): advanced_data_regression.check(list(flatten(data))) @pytest.mark.parametrize( "data", [ pytest.param([1, 3, 5, 7, 9], id="integers"), pytest.param([1.2, 3.4, 5.6, 7.8, 9.0], id="floats"), pytest.param(['1', '3', '5', '7', '9'], id="numerical_strings"), pytest.param(["1.2", "3.4", "5.6", "7.8", "9.0"], id="float strings"), pytest.param(["0.9", "0.12.4", '1', "2.5"], id="versions"), ] ) def test_natmin(data): orig_data = data[:] for _ in range(5): shuffle(data) assert natmin(data) == orig_data[0] @pytest.mark.parametrize( "data", [ pytest.param([1, 3, 5, 7, 9], id="integers"), pytest.param([1.2, 3.4, 5.6, 7.8, 9.0], id="floats"), pytest.param(['1', '3', '5', '7', '9'], id="numerical_strings"), pytest.param(["1.2", "3.4", "5.6", "7.8", "9.0"], id="float strings"), pytest.param(["0.9", "0.12.4", '1', "2.5"], id="versions"), ] ) def test_natmax(data): orig_data = data[:] for _ in range(5): shuffle(data) assert natmax(data) == orig_data[-1] def test_groupfloats(): expects: List[Tuple[float, ...]] = [(170.0, 170.05, 170.1, 170.15), (171.05, 171.1, 171.15, 171.2)] assert list(groupfloats([170.0, 170.05, 170.1, 170.15, 171.05, 171.1, 171.15, 171.2], step=0.05)) == expects expects = [(170.0, 170.05, 170.1, 170.15), (171.05, 171.1, 171.15, 171.2)] values = [170.0, 170.05, 170.10000000000002, 170.15, 171.05, 171.10000000000002, 171.15, 171.2] values = list(map(lambda v: trim_precision(v, 4), values)) assert list(groupfloats(values, step=0.05)) == expects expects = [(1, 2, 3, 4, 5), (7, 8, 9, 10)] assert list(groupfloats([1, 2, 3, 4, 5, 7, 8, 9, 10])) == expects def test_ranges_from_iterable(): expects = [(170.0, 170.15), (171.05, 171.2)] assert list( ranges_from_iterable([170.0, 170.05, 170.1, 170.15, 171.05, 171.1, 171.15, 171.2], step=0.05) ) == expects expects = [(1, 5), (7, 10)] assert list(ranges_from_iterable([1, 2, 3, 4, 5, 7, 8, 9, 10])) == expects def _extend_param(sequence: str, expects: Any): return pytest.param(sequence, expects, id=sequence) @pytest.mark.parametrize( "sequence, expects", [ _extend_param('a', "aaaa"), _extend_param("ab", "abab"), _extend_param("abc", "abca"), _extend_param("abcd", "abcd"), _extend_param("abcde", "abcde"), pytest.param(('a', 'b', 'c', 'd', 'e'), "abcde", id="tuple"), pytest.param(['a', 'b', 'c', 'd', 'e'], "abcde", id="list"), ] ) def test_extend(sequence: Sequence[str], expects: str): assert ''.join(extend(sequence, 4)) == expects @pytest.mark.parametrize( "sequence, expects", [ _extend_param('a', "azzz"), _extend_param("ab", "abzz"), _extend_param("abc", "abcz"), _extend_param("abcd", "abcd"), _extend_param("abcde", "abcde"), pytest.param(('a', 'b', 'c', 'd', 'e'), "abcde", id="tuple"), pytest.param(['a', 'b', 'c', 'd', 'e'], "abcde", id="list"), ] ) def test_extend_with(sequence: Sequence[str], expects: str): assert ''.join(extend_with(sequence, 4, 'z')) == expects def test_extend_with_none(): expects = ('a', 'b', 'c', 'd', 'e', 'f', 'g', None, None, None) assert tuple(extend_with("abcdefg", 10, None)) == expects expects = ('a', 'b', 'c', 'd', 'e', 'f', 'g', None, None, None) assert tuple(extend_with_none("abcdefg", 10)) == expects def test_extend_with_int(): expects = ('a', 'b', 'c', 'd', 'e', 'f', 'g', 0, 0, 0) assert tuple(extend_with("abcdefg", 10, 0)) == expects def lzip(*args): return list(zip(*args)) _T = TypeVar("_T") def take(n: Optional[int], seq: Iterable[_T]) -> List[_T]: """ Convenience function for partially consuming a long of infinite iterable """ return list(islice(seq, n)) def test_count(): assert lzip("abc", count()) == [('a', 0), ('b', 1), ('c', 2)] assert lzip("abc", count(3)) == [('a', 3), ('b', 4), ('c', 5)] assert take(2, lzip("abc", count(3))) == [('a', 3), ('b', 4)] assert take(2, zip("abc", count(-1))) == [('a', -1), ('b', 0)] assert take(2, zip("abc", count(-3))) == [('a', -3), ('b', -2)] with pytest.raises(TypeError, match=r"count\(\) takes from 0 to 2 positional arguments but 3 were given"): count(2, 3, 4) # type: ignore[call-arg] with pytest.raises(TypeError, match="a number is required"): count('a') # type: ignore[type-var] assert take(10, count(sys.maxsize - 5)) == list(range(sys.maxsize - 5, sys.maxsize + 5)) assert take(10, count(-sys.maxsize - 5)) == list(range(-sys.maxsize - 5, -sys.maxsize + 5)) assert take(3, count(3.25)) == [3.25, 4.25, 5.25] assert take(3, count(3.25 - 4j)) == [3.25 - 4j, 4.25 - 4j, 5.25 - 4j] BIGINT = 1 << 1000 assert take(3, count(BIGINT)) == [BIGINT, BIGINT + 1, BIGINT + 2] c = count(3) assert repr(c) == "count(3)" next(c) assert repr(c) == "count(4)" c = count(-9) assert repr(c) == "count(-9)" next(c) assert next(c) == -8 assert repr(count(10.25)) == "count(10.25)" assert repr(count(10.0)) == "count(10.0)" assert type(next(count(10.0))) == float # pylint: disable=unidiomatic-typecheck for i in (-sys.maxsize - 5, -sys.maxsize + 5, -10, -1, 0, 10, sys.maxsize - 5, sys.maxsize + 5): # Test repr r1 = repr(count(i)) r2 = "count(%r)".__mod__(i) assert r1 == r2 # # check copy, deepcopy, pickle # for value in -3, 3, sys.maxsize - 5, sys.maxsize + 5: # c = count(value) # assert next(copy.copy(c)) == value # assert next(copy.deepcopy(c)) == value # for proto in range(pickle.HIGHEST_PROTOCOL + 1): # pickletest(proto, count(value)) # check proper internal error handling for large "step' sizes count(1, sys.maxsize + 5) sys.exc_info() def test_count_with_stride(): assert lzip("abc", count(2, 3)) == [('a', 2), ('b', 5), ('c', 8)] assert lzip("abc", count(start=2, step=3)) == [('a', 2), ('b', 5), ('c', 8)] assert lzip("abc", count(step=-1)) == [('a', 0), ('b', -1), ('c', -2)] with pytest.raises(TypeError, match="a number is required"): count('a', 'b') # type: ignore[type-var] with pytest.raises(TypeError, match="a number is required"): count(5, 'b') # type: ignore[type-var] assert lzip("abc", count(2, 0)) == [('a', 2), ('b', 2), ('c', 2)] assert lzip("abc", count(2, 1)) == [('a', 2), ('b', 3), ('c', 4)] assert lzip("abc", count(2, 3)) == [('a', 2), ('b', 5), ('c', 8)] assert take(20, count(sys.maxsize - 15, 3)) == take(20, range(sys.maxsize - 15, sys.maxsize + 100, 3)) assert take(20, count(-sys.maxsize - 15, 3)) == take(20, range(-sys.maxsize - 15, -sys.maxsize + 100, 3)) assert take(3, count(10, sys.maxsize + 5)) == list(range(10, 10 + 3 * (sys.maxsize + 5), sys.maxsize + 5)) assert take(3, count(2, 1.25)) == [2, 3.25, 4.5] assert take(3, count(2, 3.25 - 4j)) == [2, 5.25 - 4j, 8.5 - 8j] BIGINT = 1 << 1000 assert take(3, count(step=BIGINT)) == [0, BIGINT, 2 * BIGINT] assert repr(take(3, count(10, 2.5))) == repr([10, 12.5, 15.0]) c = count(3, 5) assert repr(c) == "count(3, 5)" next(c) assert repr(c) == "count(8, 5)" c = count(-9, 0) assert repr(c) == "count(-9, 0)" next(c) assert repr(c) == "count(-9, 0)" c = count(-9, -3) assert repr(c) == "count(-9, -3)" next(c) assert repr(c) == "count(-12, -3)" assert repr(c) == "count(-12, -3)" assert repr(count(10.5, 1.25)) == "count(10.5, 1.25)" assert repr(count(10.5, 1)) == "count(10.5)" # suppress step=1 when it's an int assert repr(count(10.5, 1.00)) == "count(10.5, 1.0)" # do show float values like 1.0 assert repr(count(10, 1.00)) == "count(10, 1.0)" c = count(10, 1.0) assert type(next(c)) == int # pylint: disable=unidiomatic-typecheck assert type(next(c)) == float # pylint: disable=unidiomatic-typecheck for i in (-sys.maxsize - 5, -sys.maxsize + 5, -10, -1, 0, 10, sys.maxsize - 5, sys.maxsize + 5): for j in (-sys.maxsize - 5, -sys.maxsize + 5, -10, -1, 0, 1, 10, sys.maxsize - 5, sys.maxsize + 5): # Test repr r1 = repr(count(i, j)) if j == 1: r2 = ("count(%r)" % i) else: r2 = (f'count({i!r}, {j!r})') assert r1 == r2 # for proto in range(pickle.HIGHEST_PROTOCOL + 1): # pickletest(proto, count(i, j)) def pickletest(protocol: int, it, stop: int = 4, take: int = 1, compare=None): """ Test that an iterator is the same after pickling, also when part-consumed """ def expand(it, i=0): # Recursively expand iterables, within sensible bounds if i > 10: raise RuntimeError("infinite recursion encountered") if isinstance(it, str): return it try: l = list(islice(it, stop)) except TypeError: return it # can't expand it return [expand(e, i + 1) for e in l] # Test the initial copy against the original dump = pickle.dumps(it, protocol) # nosec: B301 i2 = pickle.loads(dump) # nosec: B301 assert type(it) is type(i2) # pylint: disable=unidiomatic-typecheck a, b = expand(it), expand(i2) assert a == b if compare: c = expand(compare) assert a == c # Take from the copy, and create another copy and compare them. i3 = pickle.loads(dump) # nosec: B301 took = 0 try: for i in range(take): next(i3) took += 1 except StopIteration: pass # in case there is less data than 'take' dump = pickle.dumps(i3, protocol) # nosec: B301 i4 = pickle.loads(dump) # nosec: B301 a, b = expand(i3), expand(i4) assert a == b if compare: c = expand(compare[took:]) assert a == c def test_subclassing_count(): CountType = type(count(1)) with pytest.raises( TypeError, match="type 'domdf_python_tools.iterative.count' is not an acceptable base type", ): class MyCount(CountType): # type: ignore[valid-type,misc] pass domdf_python_tools-3.10.0/tests/test_iterative_/000077500000000000000000000000001475315453000220505ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_iterative_/test_flatten_data0_.yml000066400000000000000000000000441475315453000264750ustar00rootroot00000000000000- abc - def - ghi - jkl - mno - pqr domdf_python_tools-3.10.0/tests/test_iterative_/test_flatten_data1_.yml000066400000000000000000000000441475315453000264760ustar00rootroot00000000000000- abc - def - ghi - jkl - mno - pqr domdf_python_tools-3.10.0/tests/test_iterative_/test_flatten_data2_.yml000066400000000000000000000000441475315453000264770ustar00rootroot00000000000000- abc - def - ghi - jkl - mno - pqr domdf_python_tools-3.10.0/tests/test_iterative_/test_flatten_data3_.yml000066400000000000000000000000441475315453000265000ustar00rootroot00000000000000- abc - def - ghi - jkl - mno - pqr domdf_python_tools-3.10.0/tests/test_iterative_/test_make_tree.txt000066400000000000000000000006141475315453000256050ustar00rootroot00000000000000├── apeye>=0.3.0 │ ├── appdirs>=1.4.4 │ └── cachecontrol[filecache]>=0.12.6 │ ├── requests │ │ ├── chardet<4,>=3.0.2 │ │ ├── idna<3,>=2.5 │ │ ├── urllib3!=1.25.0,!=1.25.1,<1.26,>=1.21.1 │ │ └── certifi>=2017.4.17 │ └── msgpack>=0.5.2 └── domdf_python_tools==2.2.0 domdf_python_tools-3.10.0/tests/test_namedlist.py000066400000000000000000000111641475315453000222520ustar00rootroot00000000000000# From https://raw.githubusercontent.com/python/cpython/master/Lib/test/test_userlist.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # Check every path through every method of UserList # stdlib import sys from typing import Callable, Type, Union # this package from domdf_python_tools.bases import NamedList, UserList, namedlist from domdf_python_tools.utils import printr, printt from tests.test_userlist import TestList class TestNamedList(TestList): type2test: Type[NamedList] = NamedList def test_add_specials(self): u = NamedList("spam") u2 = u + "eggs" assert u2 == list("spameggs") def test_radd_specials(self): u = NamedList("eggs") u2 = "spam" + u assert u2 == list("spameggs") u2 = u.__radd__(NamedList("spam")) assert u2 == list("spameggs") def test_repr(self): a0 = self.type2test([]) a1 = self.type2test([0, 1, 2]) assert str(a0) == "NamedList[]" assert repr(a0) == "[]" assert str(a1) == "NamedList[0, 1, 2]" assert repr(a1) == "[0, 1, 2]" a1.append(a1) a1.append(3) assert str(a1) == "NamedList[0, 1, 2, [0, 1, 2, [...], 3], 3]" assert repr(a1) == "[0, 1, 2, [...], 3]" class ShoppingList(NamedList): pass class TestShoppingList(TestNamedList): """ Test a subclass of NamedList. """ type2test: Type[ShoppingList] = ShoppingList def test_repr(self): a0 = self.type2test([]) a1 = self.type2test([0, 1, 2]) assert str(a0) == "ShoppingList[]" assert repr(a0) == "[]" assert str(a1) == "ShoppingList[0, 1, 2]" assert repr(a1) == "[0, 1, 2]" a1.append(a1) a1.append(3) assert str(a1) == "ShoppingList[0, 1, 2, [0, 1, 2, [...], 3], 3]" assert repr(a1) == "[0, 1, 2, [...], 3]" class NamedListTest: shopping_list: Union[NamedList[str], Callable] repr_out = "['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" str_out = "ShoppingList['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" cls_str: str def test_(self, capsys): assert isinstance(self.shopping_list, UserList) assert self.shopping_list[0] == "egg and bacon" printt(self.shopping_list) printr(self.shopping_list) print(self.shopping_list) captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout[0] == self.cls_str assert str(type(self.shopping_list)) == self.cls_str assert stdout[1] == self.repr_out assert stdout[2] == self.str_out assert repr(self.shopping_list) == self.repr_out assert str(self.shopping_list) == self.str_out class TestNamedListFunction(NamedListTest): if sys.version_info[:2] == (3, 6): cls_str = "domdf_python_tools.bases.namedlist..cls" else: cls_str = ".cls'>" repr_out = "['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" str_out = "ShoppingList['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" mylist = namedlist() assert isinstance(mylist(), UserList) ShoppingList = namedlist("ShoppingList") shopping_list = ShoppingList(["egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam"]) class TestNamedlistSubclassFunction: class ShoppingList(namedlist()): # type: ignore pass shopping_list = ShoppingList(["egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam"]) if sys.version_info[:2] == (3, 6): cls_str = "tests.test_bases.test_namedlist_subclass_function..ShoppingList" else: cls_str = ".ShoppingList'>" repr_out = "['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" str_out = "ShoppingList['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" class TestNamedlistSubclassClass: class ShoppingList(NamedList): pass shopping_list = ShoppingList(["egg and bacon", "egg sausage and bacon", "egg and spam", "egg bacon and spam"]) if sys.version_info[:2] == (3, 6): cls_str = "tests.test_bases.test_namedlist_subclass_class..ShoppingList" else: cls_str = ".ShoppingList'>" repr_out = "['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" str_out = "ShoppingList['egg and bacon', 'egg sausage and bacon', 'egg and spam', 'egg bacon and spam']" domdf_python_tools-3.10.0/tests/test_pagesizes/000077500000000000000000000000001475315453000217075ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_pagesizes/__init__.py000066400000000000000000000000001475315453000240060ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_pagesizes/test_pagesizes.py000066400000000000000000000150431475315453000253150ustar00rootroot00000000000000""" test_pagesizes ~~~~~~~~~~~~~~~ Test functions in pagesizes.py """ # stdlib from math import isclose from typing import List, Tuple, Type # 3rd party import pytest # this package from domdf_python_tools.pagesizes import ( A0, A1, A2, A3, A4, A5, A6, BaseSize, PageSize, Size_cm, Size_inch, Size_mm, Size_pica, Size_um, Unit, cm, convert_from, inch, mm, parse_measurement, pc, pt, um ) from domdf_python_tools.pagesizes.utils import _measurement_re @pytest.mark.parametrize( "obj, expects", [ ( Size_inch(12, 34), "Size_inch(width=, height=)", ), ( Size_cm(12, 34), "Size_cm(width=, height=)", ), ( Size_mm(12, 34), "Size_mm(width=, height=)", ), ( Size_um(12, 34), "Size_um(width=, height=)", ), ( Size_pica(12, 34), "Size_pica(width=, height=)", ), ], ) def test_repr(obj: Unit, expects: str): assert repr(obj) == expects @pytest.mark.parametrize( "obj, expects", [ (Size_mm(12, 34), "Size_mm(width=12, height=34)"), (Size_cm(12, 34), "Size_cm(width=12, height=34)"), (Size_um(12, 34), "Size_um(width=12, height=34)"), (Size_pica(12, 34), "Size_pica(width=12, height=34)"), ], ) def test_str(obj: Unit, expects: str): assert str(obj) == expects @pytest.mark.parametrize("size", [A6, A5, A4, A3, A2, A1, A0]) def test_orientation(size: BaseSize): assert size.is_portrait() assert size.portrait().is_portrait() assert size.landscape().portrait().is_portrait() assert size.landscape().portrait() == size assert size.landscape().is_landscape() def test_base_size(): assert BaseSize(10, 5) == (10, 5) assert BaseSize(10, 5).landscape() == (10, 5) assert BaseSize(10, 5).portrait() == (5, 10) def test_is_square(): assert BaseSize(10, 10).is_square() assert BaseSize(5, 5).is_square() assert Size_mm(5, 5).is_square() assert Size_um(5, 5).is_square() assert Size_inch(5, 5).is_square() @pytest.mark.parametrize("unit", [pt, inch, cm, mm, um, pc]) def test_convert_size(unit: Unit): size = PageSize(12, 34, unit) unit_str = unit.name if unit_str == "µm": unit_str = "um" assert isclose(getattr(size, unit_str)[0], 12, abs_tol=1e-8) assert isclose(getattr(size, unit_str)[1], 34, abs_tol=1e-8) # # @pytest.mark.parametrize( # "size, expected", # [ # (sizes.A0, (841, 1189)), # (sizes.A1, (594, 841)), # (sizes.A2, (420, 594)), # (sizes.A3, (297, 420)), # (sizes.A4, (210, 297)), # (sizes.A5, (148, 210)), # (sizes.A6, (105, 148)), # (sizes.A7, (74, 105)), # (sizes.A8, (52, 74)), # (sizes.A9, (37, 52)), # (sizes.A10, (26, 37)), # ] # ) # def test_sizes(size, expected): # assert size.mm == expected # # # # TODO: tests for Unit # @pytest.mark.parametrize( "value, unit, expects", [ (1, pt, 1), (1, inch, 72), (1, cm, 28.3464566929), (1, mm, 2.83464566929), (1, um, 0.00283464566929), (1, pc, 12), (5, pt, 1 * 5), (5, inch, 72 * 5), (5, cm, 28.3464566929 * 5), (5, mm, 2.83464566929 * 5), (5, um, 0.00283464566929 * 5), (5, pc, 12 * 5), ([1], pt, (1, )), ([1], inch, (72, )), ([1], cm, (28.3464566929, )), ([1], mm, (2.83464566929, )), ([1], um, (0.00283464566929, )), ([1], pc, (12, )), ([5], pt, (1 * 5, )), ([5], inch, (72 * 5, )), ([5], cm, (28.3464566929 * 5, )), ([5], mm, (2.83464566929 * 5, )), ([5], um, (0.00283464566929 * 5, )), ([5], pc, (12 * 5, )), ([1, 5], pt, (1, 1 * 5)), ([1, 5], inch, (72, 72 * 5)), ([1, 5], cm, (28.3464566929, 28.3464566929 * 5)), ([1, 5], mm, (2.83464566929, 2.83464566929 * 5)), ([1, 5], um, (0.00283464566929, 0.00283464566929 * 5)), ([1, 5], pc, (12, 12 * 5)), pytest.param(2, 5, 10, id="not isinstance(from_, Unit)"), ], ) def test_convert_from(value: List[int], unit: Unit, expects: Tuple[float, ...]): assert convert_from(value, unit) == expects @pytest.mark.parametrize( "size, expected, class_", [ ((12, 34), PageSize(12, 34), PageSize), ((12, 34), Size_mm(12, 34), Size_mm), ], ) def test_from_size(size: Tuple[int, int], expected: Unit, class_: Type[BaseSize]): print(class_.from_size(size)) assert class_.from_size(size) == expected @pytest.mark.parametrize( "string, expects", [ ("12.34mm", [("12.34", "mm")]), ("12.34 mm", [("12.34", "mm")]), (".34 mm", [(".34", "mm")]), ("12.34in", [("12.34", "in")]), ("12.34 in", [("12.34", "in")]), (".34 in", [(".34", "in")]), ('12.34"', [("12.34", '"')]), ('12.34 "', [("12.34", '"')]), ('.34 "', [(".34", '"')]), ('12.34mm .34"', [("12.34", "mm"), (".34", '"')]), ("12", [("12", '')]), ('', []), ("10μm", [("10", "μm")]), ], ) def test_measurement_re(string: str, expects: Unit): assert _measurement_re.findall(string) == expects def test_parse_measurement_errors(): with pytest.raises(ValueError, match="Too many measurements"): parse_measurement('12.34mm .34"') with pytest.raises(ValueError, match="Unable to parse measurement"): parse_measurement('') with pytest.raises(ValueError, match="Unable to parse measurement"): parse_measurement("bananas") with pytest.raises(ValueError, match="Unable to parse measurement"): parse_measurement('') with pytest.raises(ValueError, match="Unable to parse measurement"): parse_measurement("12") with pytest.raises(ValueError, match="Unable to parse measurement"): parse_measurement("mm") with pytest.raises(ValueError, match="Unknown unit"): parse_measurement("12'") @pytest.mark.parametrize( "string, expects", [ ("12mm", mm(12)), ("12 mm", mm(12)), ("12.34 mm", mm(12.34)), ("12 um", um(12)), ("12um", um(12)), ("12 μm", um(12)), ("12μm", um(12)), ("12 µm", um(12)), ("12µm", um(12)), ("12 in", inch(12)), ("12 inch", inch(12)), ('12"', inch(12)), ("12 cm", cm(12)), ("12cm", cm(12)), ("12 pc", pc(12)), ("12pc", pc(12)), ("12 pica", pc(12)), ("12pica", pc(12)), ("12 pt", pt(12)), ("12pt", pt(12)), ("12mm", 12 * mm), ("12 mm", 12 * mm), ("12.34 mm", 12.34 * mm), ("5inch", 5 * inch), ("5in", 5 * inch), ], ) def test_parse_measurement(string: str, expects: Unit): assert parse_measurement(string) == expects domdf_python_tools-3.10.0/tests/test_pagesizes/test_units.py000066400000000000000000000144741475315453000244740ustar00rootroot00000000000000# stdlib import re from operator import floordiv, truediv from typing import List, Union # 3rd party import pytest # this package from domdf_python_tools.pagesizes.units import Unit, Unitcm, UnitInch, Unitmm, Unitpc, Unitpt, Unitum units_of_12: List[Unit] = [Unit(12), UnitInch(12), Unitcm(12), Unitmm(12), Unitpc(12), Unitpt(12), Unitum(12)] zero2thousand: List[int] = list(range(0, 1000, 10)) one2thousand: List[int] = list(range(1, 1000, 10)) units_zero2thousand: List[Unit] = [Unit(x) for x in zero2thousand] units_ints_zero2thousand: List[Union[Unit, int]] = [*units_zero2thousand, *zero2thousand] @pytest.mark.parametrize("unit", units_of_12) def test_repr(unit: Unit): assert re.match(r" (3, 11), reason="No longer valid on Python 3.12+") def test_instantiate_wrong_platform(): if os.name == "nt": with pytest.raises(NotImplementedError, match="cannot instantiate .* on your system"): paths.PosixPathPlus() else: with pytest.raises(NotImplementedError, match="cannot instantiate .* on your system"): paths.WindowsPathPlus() def test_copytree(tmp_pathplus): srcdir = tmp_pathplus / "src" srcdir.mkdir() (srcdir / "root.txt").touch() (srcdir / 'a').mkdir() (srcdir / 'a' / "a.txt").touch() (srcdir / 'b').mkdir() (srcdir / 'b' / "b.txt").touch() (srcdir / 'c').mkdir() (srcdir / 'c' / "c.txt").touch() assert (srcdir / "root.txt").exists() assert (srcdir / "root.txt").is_file() assert (srcdir / 'a').exists() assert (srcdir / 'a').is_dir() assert (srcdir / 'a' / "a.txt").exists() assert (srcdir / 'a' / "a.txt").is_file() assert (srcdir / 'b').exists() assert (srcdir / 'b').is_dir() assert (srcdir / 'b' / "b.txt").exists() assert (srcdir / 'b' / "b.txt").is_file() assert (srcdir / 'c').exists() assert (srcdir / 'c').is_dir() assert (srcdir / 'c' / "c.txt").exists() assert (srcdir / 'c' / "c.txt").is_file() destdir = tmp_pathplus / "dest" destdir.mkdir() copytree(srcdir, destdir) assert set(os.listdir(srcdir)) == set(os.listdir(destdir)) assert (destdir / "root.txt").exists() assert (destdir / "root.txt").is_file() assert (destdir / 'a').exists() assert (destdir / 'a').is_dir() assert (destdir / 'a' / "a.txt").exists() assert (destdir / 'a' / "a.txt").is_file() assert (destdir / 'b').exists() assert (destdir / 'b').is_dir() assert (destdir / 'b' / "b.txt").exists() assert (destdir / 'b' / "b.txt").is_file() assert (destdir / 'c').exists() assert (destdir / 'c').is_dir() assert (destdir / 'c' / "c.txt").exists() assert (destdir / 'c' / "c.txt").is_file() def test_copytree_exists(tmp_pathplus): srcdir = tmp_pathplus / "src" srcdir.mkdir() (srcdir / "root.txt").touch() (srcdir / 'a').mkdir() (srcdir / 'a' / "a.txt").touch() (srcdir / 'b').mkdir() (srcdir / 'b' / "b.txt").touch() (srcdir / 'c').mkdir() (srcdir / 'c' / "c.txt").touch() assert (srcdir / "root.txt").exists() assert (srcdir / "root.txt").is_file() assert (srcdir / 'a').exists() assert (srcdir / 'a').is_dir() assert (srcdir / 'a' / "a.txt").exists() assert (srcdir / 'a' / "a.txt").is_file() assert (srcdir / 'b').exists() assert (srcdir / 'b').is_dir() assert (srcdir / 'b' / "b.txt").exists() assert (srcdir / 'b' / "b.txt").is_file() assert (srcdir / 'c').exists() assert (srcdir / 'c').is_dir() assert (srcdir / 'c' / "c.txt").exists() assert (srcdir / 'c' / "c.txt").is_file() destdir = tmp_pathplus / "dest" destdir.mkdir() copytree(srcdir, destdir) assert set(os.listdir(srcdir)) == set(os.listdir(destdir)) assert (destdir / "root.txt").exists() assert (destdir / "root.txt").is_file() assert (destdir / 'a').exists() assert (destdir / 'a').is_dir() assert (destdir / 'a' / "a.txt").exists() assert (destdir / 'a' / "a.txt").is_file() assert (destdir / 'b').exists() assert (destdir / 'b').is_dir() assert (destdir / 'b' / "b.txt").exists() assert (destdir / 'b' / "b.txt").is_file() assert (destdir / 'c').exists() assert (destdir / 'c').is_dir() assert (destdir / 'c' / "c.txt").exists() assert (destdir / 'c' / "c.txt").is_file() @pytest.mark.xfail( condition=(sys.version_info < (3, 6, 9) and platform.python_implementation() == "PyPy"), reason="Fails with unrelated error on PyPy 7.1.1 / 3.6.1", ) def test_copytree_exists_stdlib(tmp_pathplus): srcdir = tmp_pathplus / "src" srcdir.mkdir() (srcdir / "root.txt").touch() (srcdir / 'a').mkdir() (srcdir / 'a' / "a.txt").touch() (srcdir / 'b').mkdir() (srcdir / 'b' / "b.txt").touch() (srcdir / 'c').mkdir() (srcdir / 'c' / "c.txt").touch() assert (srcdir / "root.txt").exists() assert (srcdir / "root.txt").is_file() assert (srcdir / 'a').exists() assert (srcdir / 'a').is_dir() assert (srcdir / 'a' / "a.txt").exists() assert (srcdir / 'a' / "a.txt").is_file() assert (srcdir / 'b').exists() assert (srcdir / 'b').is_dir() assert (srcdir / 'b' / "b.txt").exists() assert (srcdir / 'b' / "b.txt").is_file() assert (srcdir / 'c').exists() assert (srcdir / 'c').is_dir() assert (srcdir / 'c' / "c.txt").exists() assert (srcdir / 'c' / "c.txt").is_file() destdir = tmp_pathplus / "dest" destdir.mkdir() with pytest.raises(FileExistsError, match=r".*[\\/]dest"): shutil.copytree(srcdir, destdir) def test_write_lines(tmp_pathplus): tmp_file = tmp_pathplus / "test.txt" contents = [ "this ", "is", 'a', "list", "of", "words", "to", "write\t\t\t", "to", "the", "file", ] tmp_file.write_lines(contents) content = tmp_file.read_text() assert content == "this\nis\na\nlist\nof\nwords\nto\nwrite\nto\nthe\nfile\n" def test_write_lines_trailing_whitespace(tmp_pathplus: PathPlus): tmp_file = tmp_pathplus / "test.txt" contents = [ "this ", "is", 'a', "list", "of", "words", "to", "write\t\t\t", "to", "the", "file", ] tmp_file.write_lines(contents, trailing_whitespace=True) content = tmp_file.read_text() assert content == "this \nis\na\nlist\nof\nwords\nto\nwrite\t\t\t\nto\nthe\nfile\n" def test_read_lines(tmp_pathplus: PathPlus): tmp_file = tmp_pathplus / "test.txt" contents = "this\nis\na\nlist\nof\nwords\nto\nwrite\nto\nthe\nfile\n" tmp_file.write_text(contents) expected = [ "this", "is", 'a', "list", "of", "words", "to", "write", "to", "the", "file", '', ] assert tmp_file.read_lines() == expected def test_dump_json(tmpdir): tmpdir_p = PathPlus(tmpdir) tmp_file = tmpdir_p / "test.txt" tmp_file.dump_json({"key": "value", "int": 1234, "float": 12.34}) assert tmp_file.read_text() == '{"key": "value", "int": 1234, "float": 12.34}\n' tmp_file.dump_json({"key": "value", "int": 1234, "float": 12.34}, indent=2) assert tmp_file.read_text() == dedent("""\ { "key": "value", "int": 1234, "float": 12.34 } """) def test_dump_json_gzip(tmpdir): tmpdir_p = PathPlus(tmpdir) tmp_file = tmpdir_p / "test.txt" tmp_file.dump_json({"key": "value", "int": 1234, "float": 12.34}, compress=True) assert tmp_file.load_json(decompress=True) == {"key": "value", "int": 1234, "float": 12.34} tmp_file.dump_json({"key": "value", "int": 1234, "float": 12.34}, indent=2, compress=True) assert tmp_file.load_json(decompress=True) == {"key": "value", "int": 1234, "float": 12.34} def test_load_json(tmpdir): tmpdir_p = PathPlus(tmpdir) tmp_file = tmpdir_p / "test.txt" tmp_file.write_text('{"key": "value", "int": 1234, "float": 12.34}') assert tmp_file.load_json() == {"key": "value", "int": 1234, "float": 12.34} tmp_file.write_text(dedent("""\ { "key": "value", "int": 1234, "float": 12.34 }""")) assert tmp_file.load_json() == {"key": "value", "int": 1234, "float": 12.34} def test_in_directory(tmp_pathplus: PathPlus): cwd = os.getcwd() with in_directory(tmp_pathplus): assert str(os.getcwd()) == str(tmp_pathplus) assert os.getcwd() == cwd tmpdir = tmp_pathplus / "tmp" tmpdir.maybe_make() with in_directory(tmpdir): assert str(os.getcwd()) == str(tmpdir) assert os.getcwd() == cwd @pytest.mark.parametrize( "location, expected", [ ("foo.yml", ''), ("foo/foo.yml", "foo"), ("foo/bar/foo.yml", "foo/bar"), ("foo/bar/baz/foo.yml", "foo/bar/baz"), ] ) def test_traverse_to_file(tmp_pathplus: PathPlus, location: str, expected: str): (tmp_pathplus / location).parent.maybe_make(parents=True) (tmp_pathplus / location).touch() assert traverse_to_file(tmp_pathplus / "foo" / "bar" / "baz", "foo.yml") == tmp_pathplus / expected # TODO: height def test_traverse_to_file_errors(tmp_pathplus: PathPlus): (tmp_pathplus / "foo/bar/baz").parent.maybe_make(parents=True) if os.sep == '/': with pytest.raises(FileNotFoundError, match="'foo.yml' not found in .*/foo/bar/baz"): traverse_to_file(tmp_pathplus / "foo" / "bar" / "baz", "foo.yml") elif os.sep == '\\': with pytest.raises(FileNotFoundError, match=r"'foo.yml' not found in .*\\foo\\bar\\baz"): traverse_to_file(tmp_pathplus / "foo" / "bar" / "baz", "foo.yml") else: raise NotImplementedError with pytest.raises(TypeError, match="traverse_to_file expected 2 or more arguments, got 1"): traverse_to_file(tmp_pathplus) def test_iterchildren(advanced_data_regression: AdvancedDataRegressionFixture): repo_path = PathPlus(__file__).parent.parent assert repo_path.is_dir() children = list((repo_path / "domdf_python_tools").iterchildren()) assert children advanced_data_regression.check(sorted(p.relative_to(repo_path).as_posix() for p in children)) def test_iterchildren_exclusions(): repo_path = PathPlus(__file__).parent.parent assert repo_path.is_dir() if (repo_path / "build").is_dir(): shutil.rmtree(repo_path / "build") children = list(repo_path.iterchildren()) assert children for directory in children: directory = directory.relative_to(repo_path) # print(directory) assert directory.parts[0] not in paths.unwanted_dirs @pytest.mark.parametrize("absolute", [True, False]) def test_iterchildren_match(advanced_data_regression: AdvancedDataRegressionFixture, absolute: bool): repo_path = PathPlus(__file__).parent.parent with in_directory(repo_path.parent): assert repo_path.is_dir() if not absolute: repo_path = repo_path.relative_to(repo_path.parent) if (repo_path / "build").is_dir(): shutil.rmtree(repo_path / "build") children = list(repo_path.iterchildren(match="**/*.py")) assert children child_paths = sorted(p.relative_to(repo_path).as_posix() for p in children) for exclude_filename in { ".coverage", "pathtype_demo.py", "dist", "htmlcov", "conda", ".idea", "mutdef.py" }: if exclude_filename in child_paths: child_paths.remove(exclude_filename) advanced_data_regression.check(child_paths, basename="test_iterchildren_match") def test_iterchildren_no_exclusions(tmp_pathplus: PathPlus): (tmp_pathplus / ".git").mkdir() (tmp_pathplus / "venv").mkdir() (tmp_pathplus / ".venv").mkdir() (tmp_pathplus / ".tox").mkdir() (tmp_pathplus / ".tox4").mkdir() (tmp_pathplus / ".mypy_cache").mkdir() (tmp_pathplus / ".pytest_cache").mkdir() (tmp_pathplus / "normal_dir").mkdir() children = sorted(p.relative_to(tmp_pathplus) for p in tmp_pathplus.iterchildren(None)) assert children == [ PathPlus(".git"), PathPlus(".mypy_cache"), PathPlus(".pytest_cache"), PathPlus(".tox"), PathPlus(".tox4"), PathPlus(".venv"), PathPlus("normal_dir"), PathPlus("venv"), ] children = sorted(p.relative_to(tmp_pathplus) for p in tmp_pathplus.iterchildren(())) assert children == [ PathPlus(".git"), PathPlus(".mypy_cache"), PathPlus(".pytest_cache"), PathPlus(".tox"), PathPlus(".tox4"), PathPlus(".venv"), PathPlus("normal_dir"), PathPlus("venv"), ] children = sorted(p.relative_to(tmp_pathplus) for p in tmp_pathplus.iterchildren((".git", ".tox"))) assert children == [ PathPlus(".mypy_cache"), PathPlus(".pytest_cache"), PathPlus(".tox4"), PathPlus(".venv"), PathPlus("normal_dir"), PathPlus("venv"), ] children = sorted(p.relative_to(tmp_pathplus) for p in tmp_pathplus.iterchildren()) assert children == [ PathPlus("normal_dir"), ] @pytest.mark.parametrize( "pattern, filename, match", [ ("domdf_python_tools/**/", "domdf_python_tools", True), ("domdf_python_tools/**/", "domdf_python_tools/testing/selectors.c", True), ("domdf_python_tools/**/*.py", "domdf_python_tools/testing/selectors.c", False), ("domdf_python_tools/**/*.py", "domdf_python_tools/foo/bar/baz.py", True), ("domdf_python_tools/**/*.py", "domdf_python_tools/words.py", True), ("domdf_python_tools/*.py", "domdf_python_tools/words.py", True), ("domdf_python_tools/**/*.py", "domdf_python_tools/testing/selectors.py", True), ("domdf_python_tools/**/*.py", "demo.py", False), ("domdf_python_tools/*.py", "demo.py", False), ("domdf_python_tools/[!abc].py", "domdf_python_tools/d.py", True), ("domdf_python_tools/[!abc].py", "domdf_python_tools/a.py", False), ("domdf_python_tools/[abc].py", "domdf_python_tools/d.py", False), ("domdf_python_tools/[abc].py", "domdf_python_tools/a.py", True), ("domdf_python_tools/?.py", "domdf_python_tools/a.py", True), ("domdf_python_tools/?.py", "domdf_python_tools/Z.py", True), ("domdf_python_tools/?.py", "domdf_python_tools/abc.py", False), ("domdf_python_tools/Law*", "domdf_python_tools/Law", True), ("domdf_python_tools/Law*", "domdf_python_tools/Laws", True), ("domdf_python_tools/Law*", "domdf_python_tools/Lawyer", True), ("domdf_python_tools/Law*", "domdf_python_tools/La", False), ("domdf_python_tools/Law*", "domdf_python_tools/aw", False), ("domdf_python_tools/Law*", "domdf_python_tools/GrokLaw", False), ("domdf_python_tools/*Law*", "domdf_python_tools/Law", True), ("domdf_python_tools/*Law*", "domdf_python_tools/Laws", True), ("domdf_python_tools/*Law*", "domdf_python_tools/Lawyer", True), ("domdf_python_tools/*Law*", "domdf_python_tools/La", False), ("domdf_python_tools/*Law*", "domdf_python_tools/aw", False), ("domdf_python_tools/*Law*", "domdf_python_tools/GrokLaw", True), ("domdf_python_tools/?at", "domdf_python_tools/Cat", True), ("domdf_python_tools/?at", "domdf_python_tools/cat", True), ("domdf_python_tools/?at", "domdf_python_tools/Bat", True), ("domdf_python_tools/?at", "domdf_python_tools/at", False), ("domdf_python_tools/[A-Z]at", "domdf_python_tools/at", False), ("domdf_python_tools/[A-Z]at", "domdf_python_tools/cat", False), ("domdf_python_tools/[A-Z]at", "domdf_python_tools/Cat", True), ("domdf_python_tools/Letter[!3-5]", "domdf_python_tools/Letter1", True), ("domdf_python_tools/Letter[!3-5]", "domdf_python_tools/Letter6", True), ( "/home/domdf/Python/01 GitHub Repos/03 Libraries/domdf_python_tools/**/*.py", "/home/domdf/Python/01 GitHub Repos/03 Libraries/domdf_python_tools/domdf_python_tools/pagesizes/units.py", True ), ("domdf_python_tools/**/*.py", "domdf_python_tools/domdf_python_tools/pagesizes/units.py", True), ("**/*.py", ".pre-commit-config.yaml", False), ("**/*.yaml", ".pre-commit-config.yaml", True), ("./**/*.py", ".pre-commit-config.yaml", False), ("./**/*.yaml", ".pre-commit-config.yaml", True), ("foo/**/**/bar.py", "foo/bar.py", True), ("foo/**/**/bar.py", "foo/baz/bar.py", True), ("foo/**/**/bar.py", "foo/baz/baz/bar.py", True), ("foo/**/**", "foo/", True), ("foo/**/**", "foo/bar.py", True), ("foo/**/**", "foo/baz/bar.py", True), ("foo/**/**", "foo/baz/baz/bar.py", True), ("**/.tox", "foo/bar/.tox", True), ("**/.tox", "foo/bar/.tox/build", False), ("**/.tox/*", "foo/bar/.tox/build", True), ("**/.tox/**", "foo/bar/.tox/build", True), ("**/.tox/**", "foo/bar/.tox/build/baz", True), ] ) def test_matchglob(pattern: str, filename: str, match: bool): assert matchglob(filename, pattern) is match pypy_no_symlink = pytest.mark.skipif( condition=PYPY and platform.system() == "Windows", reason="symlink() is not implemented for PyPy on Windows", ) @pypy_no_symlink def test_abspath(tmp_pathplus: PathPlus): assert (tmp_pathplus / "foo" / "bar" / "baz" / "..").abspath() == tmp_pathplus / "foo" / "bar" file = tmp_pathplus / "foo" / "bar.py" file.parent.mkdir(parents=True) file.write_text("I'm the original") link = tmp_pathplus / "baz.py" os.symlink(file, link) assert link.read_text() == "I'm the original" assert link.is_symlink() assert link.resolve() == file assert link.abspath() == link file.unlink() file.parent.rmdir() assert isinstance((tmp_pathplus / "foo" / "bar" / "baz" / "..").abspath(), PathPlus) @pypy_no_symlink def test_abspath_dotted(tmp_pathplus: PathPlus): file = tmp_pathplus / "baz.py" file.write_text("I'm the original") link = tmp_pathplus / "bar" / "foo.py" link.parent.mkdir(parents=True) os.symlink(os.path.join(link.parent, "..", "baz.py"), link) assert link.read_text() == "I'm the original" assert link.is_symlink() assert link.resolve() == file assert link.abspath() == link def test_temporarypathplus(): with TemporaryPathPlus() as tmpdir: assert isinstance(tmpdir, PathPlus) assert tmpdir.exists() assert tmpdir.is_dir() t = TemporaryPathPlus() assert isinstance(t.name, PathPlus) assert t.name.exists() assert t.name.is_dir() t.cleanup() def test_sort_paths(): paths = ["foo.txt", "bar.toml", "bar.py", "baz.yaml", "baz.YAML", "fizz/buzz.c", "fizz/buzz.h"] expected = [ PathPlus("fizz/buzz.c"), PathPlus("fizz/buzz.h"), PathPlus("bar.py"), PathPlus("bar.toml"), PathPlus("baz.YAML"), PathPlus("baz.yaml"), PathPlus("foo.txt"), ] assert sort_paths(*paths) == expected if platform.system() == "Windows": _from_uri_paths = [ "c:/", "c:/users/domdf/☃.txt", "c:/a/b.c", "c:/a/b%#c", "c:/a/bé", "//some/share/", "//some/share/a/b.c", "//some/share/a/b%#cé" ] else: _from_uri_paths = ['/', "/home/domdf/☃.txt", "/a/b.c", "/a/b%#c"] @pytest.mark.parametrize("path", _from_uri_paths) @pytest.mark.parametrize("left_type", [pathlib.PurePath, pathlib.Path, PathPlus]) def test_pathplus_from_uri(path: str, left_type: Type): assert PathPlus.from_uri(left_type(path).as_uri()).as_posix() == path def test_write_text_line_endings(tmp_pathplus: PathPlus): the_file = (tmp_pathplus / "foo.md") the_file.write_text("Hello\nWorld") assert the_file.read_bytes() == b"Hello\nWorld" with the_file.open('w') as fp: fp.write("Hello\nWorld") assert the_file.read_bytes() == b"Hello\nWorld" with the_file.open('w', newline="\r\n") as fp: fp.write("Hello\nWorld") assert the_file.read_bytes() == b"Hello\r\nWorld" # The following from https://github.com/python/cpython/pull/22420/files # Check that `\n` character change nothing the_file.write_text('abcde\r\nfghlk\n\rmnopq', newline='\n') assert the_file.read_bytes() == b'abcde\r\nfghlk\n\rmnopq' # Check that `\r` character replaces `\n` the_file.write_text('abcde\r\nfghlk\n\rmnopq', newline='\r') assert the_file.read_bytes() == b'abcde\r\rfghlk\r\rmnopq' # Check that `\r\n` character replaces `\n` the_file.write_text('abcde\r\nfghlk\n\rmnopq', newline='\r\n') assert the_file.read_bytes() == b'abcde\r\r\nfghlk\r\n\rmnopq' # Check that no argument passed will change `\n` to `os.linesep` the_file.write_text('abcde\nfghlk\n\rmnopq') assert the_file.read_bytes() == b'abcde\nfghlk\n\rmnopq' @pytest.fixture() def move_example_file(tmp_pathplus) -> PathPlus: src_file = tmp_pathplus / "tmpdir/foo" src_file.parent.maybe_make(parents=True) src_file.write_bytes(b"spam") return src_file class TestMove: def test_move_file(self, move_example_file: PathPlus): # Move a file to another location on the same filesystem. contents = move_example_file.read_bytes() with TemporaryPathPlus() as dst_dir: dst = dst_dir / move_example_file.name assert move_example_file.move(dst) == dst assert contents == dst.read_bytes() assert not move_example_file.exists() def test_move_file_to_dir(self, move_example_file: PathPlus): # Move a file inside an existing dir on the same filesystem. contents = move_example_file.read_bytes() with TemporaryPathPlus() as dst_dir: dst = dst_dir / move_example_file.name assert move_example_file.move(dst_dir) == dst assert contents == dst.read_bytes() assert not move_example_file.exists() def test_move_dir(self, move_example_file: PathPlus): # Move a dir to another location on the same filesystem. src_dir = move_example_file.parent with TemporaryPathPlus() as tmpdir: dst_dir = tmpdir / "target" contents = sorted(os.listdir(src_dir)) assert src_dir.move(dst_dir) == dst_dir assert contents == sorted(os.listdir(dst_dir)) assert not os.path.exists(src_dir) def test_move_dir_to_dir(self, move_example_file: PathPlus): # Move a dir inside an existing dir on the same filesystem. src_dir = move_example_file.parent with TemporaryPathPlus() as dst_dir: assert src_dir.move(dst_dir) == dst_dir / "tmpdir" assert sorted(os.listdir(dst_dir)) == ["tmpdir"] assert sorted(os.listdir(dst_dir / "tmpdir")) == ["foo"] assert not os.path.exists(src_dir) def test_existing_file_inside_dest_dir(self, move_example_file: PathPlus): # A file with the same name inside the destination dir already exists. with TemporaryPathPlus() as dst_dir: (dst_dir / "foo").touch() with pytest.raises(shutil.Error): move_example_file.move(dst_dir) def test_dont_move_dir_in_itself(self, move_example_file: PathPlus): # Moving a dir inside itself raises an Error. dst = os.path.join(move_example_file.parent, "bar") with pytest.raises(shutil.Error): move_example_file.parent.move(dst) def test_stream(tmp_pathplus: PathPlus, advanced_data_regression: AdvancedDataRegressionFixture): the_file = tmp_pathplus / "file.dat" the_file.write_text("The quick brown fox jumps over the lazy dog" * 100) advanced_data_regression.check(list(map(bytes.decode, the_file.stream(chunk_size=10)))) domdf_python_tools-3.10.0/tests/test_paths_/000077500000000000000000000000001475315453000211735ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_paths_/test_iterchildren.yml000066400000000000000000000022371475315453000254350ustar00rootroot00000000000000- domdf_python_tools/__init__.py - domdf_python_tools/_is_match.py - domdf_python_tools/bases.py - domdf_python_tools/compat - domdf_python_tools/compat/__init__.py - domdf_python_tools/compat/importlib_metadata.py - domdf_python_tools/compat/importlib_metadata.pyi - domdf_python_tools/compat/importlib_resources.py - domdf_python_tools/compat/importlib_resources.pyi - domdf_python_tools/dates.py - domdf_python_tools/delegators.py - domdf_python_tools/doctools.py - domdf_python_tools/getters.py - domdf_python_tools/google-10000-english-no-swears.txt - domdf_python_tools/import_tools.py - domdf_python_tools/iterative.py - domdf_python_tools/pagesizes - domdf_python_tools/pagesizes/__init__.py - domdf_python_tools/pagesizes/classes.py - domdf_python_tools/pagesizes/sizes.py - domdf_python_tools/pagesizes/units.py - domdf_python_tools/pagesizes/utils.py - domdf_python_tools/paths.py - domdf_python_tools/pretty_print.py - domdf_python_tools/py.typed - domdf_python_tools/secrets.py - domdf_python_tools/stringlist.py - domdf_python_tools/terminal.py - domdf_python_tools/typing.py - domdf_python_tools/utils.py - domdf_python_tools/versions.py - domdf_python_tools/words.py domdf_python_tools-3.10.0/tests/test_paths_/test_iterchildren_match.yml000066400000000000000000000036471475315453000266170ustar00rootroot00000000000000- .github/milestones.py - __pkginfo__.py - doc-source/api/count_demo.py - doc-source/conf.py - doc-source/latex_unicode.py - domdf_python_tools/__init__.py - domdf_python_tools/_is_match.py - domdf_python_tools/bases.py - domdf_python_tools/compat/__init__.py - domdf_python_tools/compat/importlib_metadata.py - domdf_python_tools/compat/importlib_resources.py - domdf_python_tools/dates.py - domdf_python_tools/delegators.py - domdf_python_tools/doctools.py - domdf_python_tools/getters.py - domdf_python_tools/import_tools.py - domdf_python_tools/iterative.py - domdf_python_tools/pagesizes/__init__.py - domdf_python_tools/pagesizes/classes.py - domdf_python_tools/pagesizes/sizes.py - domdf_python_tools/pagesizes/units.py - domdf_python_tools/pagesizes/utils.py - domdf_python_tools/paths.py - domdf_python_tools/pretty_print.py - domdf_python_tools/secrets.py - domdf_python_tools/stringlist.py - domdf_python_tools/terminal.py - domdf_python_tools/typing.py - domdf_python_tools/utils.py - domdf_python_tools/versions.py - domdf_python_tools/words.py - tests/__init__.py - tests/conftest.py - tests/discover_demo_module/__init__.py - tests/discover_demo_module/submodule_a.py - tests/discover_demo_module/submodule_b.py - tests/list_tests.py - tests/mypy_test.py - tests/seq_tests.py - tests/test_bases.py - tests/test_compat.py - tests/test_dates.py - tests/test_delegators.py - tests/test_dir_comparator.py - tests/test_docstrings.py - tests/test_doctools.py - tests/test_getters.py - tests/test_import_tools.py - tests/test_iterative.py - tests/test_namedlist.py - tests/test_pagesizes/__init__.py - tests/test_pagesizes/test_pagesizes.py - tests/test_pagesizes/test_units.py - tests/test_paths.py - tests/test_paths_stdlib.py - tests/test_pretty_print.py - tests/test_secrets.py - tests/test_stringlist.py - tests/test_terminal.py - tests/test_typing.py - tests/test_userlist.py - tests/test_utils.py - tests/test_versions.py - tests/test_words.py domdf_python_tools-3.10.0/tests/test_paths_/test_stream.yml000066400000000000000000000133561475315453000242600ustar00rootroot00000000000000- 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog - 'The quick ' - 'brown fox ' - jumps over - ' the lazy ' - dogThe qui - ck brown f - ox jumps o - ver the la - 'zy dogThe ' - quick brow - n fox jump - s over the - ' lazy dogT' - he quick b - rown fox j - 'umps over ' - the lazy d - ogThe quic - k brown fo - x jumps ov - er the laz - y dogThe q - uick brown - ' fox jumps' - ' over the ' - lazy dogTh - e quick br - own fox ju - mps over t - he lazy do - gThe quick - ' brown fox' - ' jumps ove' - r the lazy - ' dogThe qu' - 'ick brown ' - 'fox jumps ' - over the l - azy dogThe - ' quick bro' - wn fox jum - ps over th - e lazy dog domdf_python_tools-3.10.0/tests/test_paths_stdlib.py000066400000000000000000000424071475315453000227560ustar00rootroot00000000000000# Adapted from https://github.com/python/cpython/blob/master/Lib/test/test_pathlib.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import errno import os import pathlib import pickle import shutil import socket import stat import sys from typing import Iterator, Set from unittest import mock # 3rd party import pytest # this package from domdf_python_tools.compat import PYPY from domdf_python_tools.paths import PathPlus, PosixPathPlus, WindowsPathPlus try: # stdlib import grp import pwd except ImportError: grp = pwd = None # type: ignore if sys.version_info[:2] >= (3, 10): # stdlib from test.support.os_helper import TESTFN, can_symlink else: # stdlib from test.support import TESTFN, can_symlink # type: ignore @pytest.fixture() def _umask_0(): old_mask = os.umask(0) try: yield finally: os.umask(old_mask) only_nt = pytest.mark.skipif(condition=os.name != "nt", reason="test requires a Windows-compatible system") only_posix = pytest.mark.skipif(condition=os.name == "nt", reason="test requires a POSIX-compatible system") @pytest.fixture() def BASE(tmp_pathplus: PathPlus) -> Iterator[PathPlus]: top_dir = tmp_pathplus tmp_pathplus = top_dir / "a/b/c/d" tmp_pathplus.maybe_make(parents=True) join = lambda *x: os.path.join(tmp_pathplus, *x) if os.name == "nt": # Workaround for http://bugs.python.org/issue13772. def dirlink(src, dest): os.symlink(src, dest, target_is_directory=True) else: def dirlink(src, dest): os.symlink(src, dest) os.mkdir(join("dirA")) os.mkdir(join("dirB")) os.mkdir(join("dirC")) os.mkdir(join("dirC", "dirD")) os.mkdir(join("dirE")) with open(join("fileA"), "wb") as f: f.write(b"this is file A\n") with open(join("dirB", "fileB"), "wb") as f: f.write(b"this is file B\n") with open(join("dirC", "fileC"), "wb") as f: f.write(b"this is file C\n") with open(join("dirC", "dirD", "fileD"), "wb") as f: f.write(b"this is file D\n") os.chmod(join("dirE"), 0) if not PYPY and sys.platform != "win32": # Relative symlinks. os.symlink("fileA", join("linkA")) os.symlink("non-existing", join("brokenLink")) dirlink("dirB", join("linkB")) dirlink(os.path.join("..", "dirB"), join("dirA", "linkC")) # This one goes upwards, creating a loop. dirlink(os.path.join("..", "dirB"), join("dirB", "linkD")) yield tmp_pathplus os.chmod(join("dirE"), 0o777) shutil.rmtree(top_dir) def assertEqualNormCase(path_a, path_b): assert (os.path.normcase(path_a) == os.path.normcase(path_b)) if os.name == "nt": # Workaround for http://bugs.python.org/issue13772. def dirlink(src, dest): os.symlink(src, dest, target_is_directory=True) else: def dirlink(src, dest): os.symlink(src, dest) def test_stat(BASE: PathPlus): p = PathPlus(BASE) / "fileA" st = p.stat() assert (p.stat() == st) # Change file mode by flipping write bit. p.chmod(st.st_mode ^ 0o222) try: assert (p.stat() != st) finally: p.chmod(st.st_mode) @pytest.mark.skipif(not hasattr(socket, "AF_UNIX"), reason="Unix sockets required") def test_is_socket_true(BASE: PathPlus): P = PathPlus(BASE, "mysock") sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) try: try: sock.bind(str(P)) except OSError as e: if isinstance(e, PermissionError) or "AF_UNIX path too long" in str(e): pytest.skip("cannot bind Unix socket: " + str(e)) assert (P.is_socket()) assert not (P.is_fifo()) assert not (P.is_file()) finally: sock.close() def test_cwd(): p = PathPlus.cwd() q = PathPlus(os.getcwd()) assert (p == q) assertEqualNormCase(str(p), str(q)) assert (type(p) is type(q)) assert (p.is_absolute()) def test_read_write_text(BASE: PathPlus): p = PathPlus(BASE) (p / "fileA").write_text("äbcdefg", encoding="latin-1") assert ((p / "fileA").read_text(encoding="utf-8", errors="ignore") == "bcdefg") # Check that trying to write bytes does not truncate the file. with pytest.raises(TypeError): (p / "fileA").write_text(b"somebytes") # type: ignore assert ((p / "fileA").read_text(encoding="latin-1") == "äbcdefg") def test_with(BASE: PathPlus): p = PathPlus(BASE) it = p.iterdir() it2 = p.iterdir() next(it2) with p: pass # Using a path as a context manager is a no-op, thus the following # operations should still succeed after the context manage exits. next(it) next(it2) p.exists() p.resolve() p.absolute() with p: pass def test_chmod(BASE: PathPlus): p = PathPlus(BASE) / "fileA" mode = p.stat().st_mode # Clear writable bit. new_mode = mode & ~0o222 p.chmod(new_mode) assert (p.stat().st_mode == new_mode) # Set writable bit. new_mode = mode | 0o222 p.chmod(new_mode) assert (p.stat().st_mode == new_mode) def test_lstat_nosymlink(BASE: PathPlus): p = PathPlus(BASE) / "fileA" st = p.stat() assert (st == p.lstat()) def test_owner(BASE: PathPlus): pwd = pytest.importorskip("pwd", reason="the pwd module is needed for this test") if sys.platform == "win32": return p = PathPlus(BASE) / "fileA" uid = p.stat().st_uid try: name = pwd.getpwuid(uid).pw_name except KeyError: pytest.skip(f"user {uid:d} doesn't have an entry in the system database") assert (name == p.owner()) def test_group(BASE: PathPlus): grp = pytest.importorskip("grp", reason="the grp module is needed for this test") if sys.platform == "win32": return p = PathPlus(BASE) / "fileA" gid = p.stat().st_gid try: name = grp.getgrgid(gid).gr_name except KeyError: pytest.skip(f"group {gid:d} doesn't have an entry in the system database") assert (name == p.group()) def test_unlink(BASE: PathPlus): p = PathPlus(BASE) / "fileA" p.unlink() with pytest.raises(FileNotFoundError): p.stat() with pytest.raises(FileNotFoundError): p.unlink() def test_unlink_missing_ok(BASE: PathPlus): p = PathPlus(BASE) / "fileAAA" with pytest.raises(FileNotFoundError): p.unlink() p.unlink(missing_ok=True) def test_rmdir(BASE: PathPlus): p = PathPlus(BASE) / "dirA" for q in p.iterdir(): q.unlink() p.rmdir() with pytest.raises(FileNotFoundError): p.stat() with pytest.raises(FileNotFoundError): p.unlink() @pytest.mark.skipif(hasattr(os, "link"), reason="os.link() is present") def test_link_to_not_implemented(BASE: PathPlus): P = PathPlus(BASE) / TESTFN p = P / "fileA" # linking to another path. q = P / "dirA" / "fileAA" with pytest.raises(NotImplementedError): p.link_to(q) def test_rename(BASE, tmp_pathplus: PathPlus): P = PathPlus(BASE) p = P / "fileA" size = p.stat().st_size # Renaming to another path. q = P / "dirA" / "fileAA" if sys.version_info < (3, 9): # pragma: no cover (>=py39) p.replace(q) else: # pragma: no cover (=py39) q.replace(r) else: # pragma: no cover ( (3, 11): cm = mock.patch("os.mkdir", my_mkdir) else: cm = mock.patch("pathlib._normal_accessor.mkdir", my_mkdir) with cm: p12.mkdir(parents=True, exist_ok=False) except FileExistsError: assert (str(p12) in concurrently_created) else: assert (str(p12) not in concurrently_created) assert (p.exists()) @pytest.mark.skipif( PYPY and sys.platform == "win32", reason="symlink() is not implemented for PyPy on Windows", ) def test_symlink_to(BASE: PathPlus): P = PathPlus(BASE) target = P / "fileA" # Symlinking a path target. link = P / "dirA" / "linkAA" link.symlink_to(target) assert link.stat() == target.stat() assert link.lstat() != target.stat() # Symlinking a str target. link = P / "dirA" / "linkAAA" link.symlink_to(str(target)) assert link.stat() == target.stat() assert link.lstat() != target.stat() assert not link.is_dir() # Symlinking to a directory. target = P / "dirB" link = P / "dirA" / "linkAAAA" link.symlink_to(target, target_is_directory=True) assert link.stat() == target.stat() assert link.lstat() != target.stat() assert (link.is_dir()) assert (list(link.iterdir())) def test_is_dir(BASE: PathPlus): P = PathPlus(BASE) assert ((P / "dirA").is_dir()) assert not ((P / "fileA").is_dir()) assert not ((P / "non-existing").is_dir()) assert not ((P / "fileA" / "bah").is_dir()) if not PYPY and sys.platform != "win32": assert not ((P / "linkA").is_dir()) assert ((P / "linkB").is_dir()) assert not (P / "brokenLink").is_dir() def test_is_file(BASE: PathPlus): P = PathPlus(BASE) assert ((P / "fileA").is_file()) assert not ((P / "dirA").is_file()) assert not ((P / "non-existing").is_file()) assert not ((P / "fileA" / "bah").is_file()) if not PYPY and sys.platform != "win32": assert ((P / "linkA").is_file()) assert not ((P / "linkB").is_file()) assert not ((P / "brokenLink").is_file()) @only_posix def test_is_mount(BASE: PathPlus): P = PathPlus(BASE) R = PathPlus('/') # TODO: Work out Windows. assert not ((P / "fileA").is_mount()) assert not ((P / "dirA").is_mount()) assert not ((P / "non-existing").is_mount()) assert not ((P / "fileA" / "bah").is_mount()) assert (R.is_mount()) if can_symlink(): assert not ((P / "linkA").is_mount()) def test_is_symlink(BASE: PathPlus): P = PathPlus(BASE) assert not ((P / "fileA").is_symlink()) assert not ((P / "dirA").is_symlink()) assert not ((P / "non-existing").is_symlink()) assert not ((P / "fileA" / "bah").is_symlink()) if not PYPY and sys.platform != "win32": assert ((P / "linkA").is_symlink()) assert ((P / "linkB").is_symlink()) assert ((P / "brokenLink").is_symlink()) def test_is_fifo_false(BASE: PathPlus): P = PathPlus(BASE) assert not ((P / "fileA").is_fifo()) assert not ((P / "dirA").is_fifo()) assert not ((P / "non-existing").is_fifo()) assert not ((P / "fileA" / "bah").is_fifo()) def test_is_socket_false(BASE: PathPlus): P = PathPlus(BASE) assert not (P / "fileA").is_socket() assert not (P / "dirA").is_socket() assert not (P / "non-existing").is_socket() assert not (P / "fileA" / "bah").is_socket() def test_is_block_device_false(tmp_pathplus: PathPlus): P = tmp_pathplus.resolve() / TESTFN assert not (P / "fileA").is_block_device() assert not (P / "dirA").is_block_device() assert not (P / "non-existing").is_block_device() assert not (P / "fileA" / "bah").is_block_device() def test_is_char_device_false(tmp_pathplus: PathPlus): P = tmp_pathplus.resolve() / TESTFN assert not (P / "fileA").is_char_device() assert not (P / "dirA").is_char_device() assert not (P / "non-existing").is_char_device() assert not (P / "fileA" / "bah").is_char_device() def test_is_char_device_true(): # Under Unix, /dev/null should generally be a char device. P = PathPlus("/dev/null") if not P.exists(): pytest.skip("/dev/null required") assert P.is_char_device() assert not P.is_block_device() assert not P.is_file() def test_pickling_common(BASE: PathPlus): p = PathPlus(BASE, "fileA") for proto in range(0, pickle.HIGHEST_PROTOCOL + 1): dumped = pickle.dumps(p, proto) pp = pickle.loads(dumped) assert pp.stat() == p.stat() def test_concrete_class(): p = PathPlus('a') if os.name == "nt": assert type(p) is WindowsPathPlus # pylint: disable=unidiomatic-typecheck else: assert type(p) is PosixPathPlus # pylint: disable=unidiomatic-typecheck def test_unsupported_flavour(): if os.name == "nt": with pytest.raises(NotImplementedError): pathlib.PosixPath() else: with pytest.raises(NotImplementedError): pathlib.WindowsPath() def test_glob_empty_pattern(tmp_pathplus: PathPlus): p = tmp_pathplus with pytest.raises(ValueError, match="Unacceptable pattern"): list(p.glob('')) @pytest.mark.usefixtures("_umask_0") @only_posix def test_open_mode(BASE: PathPlus): p = PathPlus(BASE) with (p / "new_file").open("wb"): pass st = os.stat(os.path.join(BASE, "new_file")) assert stat.S_IMODE(st.st_mode) == 0o666 os.umask(0o022) with (p / "other_new_file").open("wb"): pass st = os.stat(os.path.join(BASE, "other_new_file")) assert stat.S_IMODE(st.st_mode) == 0o644 @only_posix def test_touch_mode(BASE: PathPlus): old_mask = os.umask(0) try: p = PathPlus(BASE) (p / "new_file").touch() st = os.stat(os.path.join(BASE, "new_file")) assert stat.S_IMODE(st.st_mode) == 0o666 os.umask(0o022) (p / "other_new_file").touch() st = os.stat(os.path.join(BASE, "other_new_file")) assert stat.S_IMODE(st.st_mode) == 0o644 (p / "masked_new_file").touch(mode=0o750) st = os.stat(os.path.join(BASE, "masked_new_file")) assert stat.S_IMODE(st.st_mode) == 0o750 finally: os.umask(old_mask) domdf_python_tools-3.10.0/tests/test_pretty_print.py000066400000000000000000000644611475315453000230450ustar00rootroot00000000000000# Based on CPython. # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # # stdlib import collections import io import itertools import random import types from textwrap import dedent from typing import no_type_check # 3rd party import pytest from coincidence.regressions import AdvancedFileRegressionFixture # this package from domdf_python_tools.pretty_print import FancyPrinter, simple_repr from domdf_python_tools.stringlist import StringList # list, tuple and dict subclasses that do or don't overwrite __repr__ class list2(list): pass class list3(list): def __repr__(self): return list.__repr__(self) class list_custom_repr(list): def __repr__(self): return '*' * len(list.__repr__(self)) class tuple2(tuple): __slots__ = () class tuple3(tuple): __slots__ = () def __repr__(self): return tuple.__repr__(self) class tuple_custom_repr(tuple): __slots__ = () def __repr__(self): return '*' * len(tuple.__repr__(self)) class set2(set): pass class set3(set): def __repr__(self): return set.__repr__(self) class set_custom_repr(set): def __repr__(self): return '*' * len(set.__repr__(self)) class frozenset2(frozenset): pass class frozenset3(frozenset): def __repr__(self): return frozenset.__repr__(self) class frozenset_custom_repr(frozenset): def __repr__(self): return '*' * len(frozenset.__repr__(self)) class dict2(dict): pass class dict3(dict): def __repr__(self): return dict.__repr__(self) class dict_custom_repr(dict): def __repr__(self): return '*' * len(dict.__repr__(self)) class Unorderable: def __repr__(self): return str(id(self)) # Class Orderable is orderable with any type class Orderable: def __init__(self, hash): # noqa: A002 # pylint: disable=redefined-builtin self._hash = hash def __lt__(self, other): return False def __gt__(self, other): return self != other def __le__(self, other): return self == other def __ge__(self, other): return True def __eq__(self, other): return self is other def __ne__(self, other): return self is not other def __hash__(self): return self._hash fruit = [ "apple", "orange", "pear", "lemon", "grape", "strawberry", "banana", "plum", "tomato", "cherry", "blackcurrant", ] class TestFancyPrinter: def test_list(self): assert FancyPrinter().pformat([1, 2, 3, 4, 5, 6, 7, 8, 9, 10]) == "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10]" assert FancyPrinter().pformat(fruit) == dedent( """\ [ 'apple', 'orange', 'pear', 'lemon', 'grape', 'strawberry', 'banana', 'plum', 'tomato', 'cherry', 'blackcurrant', ]""" ) @no_type_check def test_init(self): FancyPrinter() FancyPrinter(indent=4, width=40, depth=5, stream=io.StringIO(), compact=True) FancyPrinter(4, 40, 5, io.StringIO()) with pytest.raises(TypeError): FancyPrinter(4, 40, 5, io.StringIO(), True) with pytest.raises(ValueError): FancyPrinter(indent=-1) with pytest.raises(ValueError): FancyPrinter(depth=0) with pytest.raises(ValueError): FancyPrinter(depth=-1) with pytest.raises(ValueError): FancyPrinter(width=0) @pytest.mark.parametrize( "safe", [ 2, 2.0, 2j, "abc", [3], (2, 2), {3: 3}, b"def", bytearray(b"ghi"), True, False, None, ..., list(range(100)), list(range(200)), ] ) def test_basic(self, safe): # Verify .isrecursive() and .isreadable() w/o recursion pp = FancyPrinter() # PrettyPrinter methods assert not pp.isrecursive(safe), f"expected not isrecursive for {safe!r}" assert pp.isreadable(safe), f"expected isreadable for {safe!r}" @no_type_check def test_knotted(self): a = list(range(100)) b = list(range(200)) a[-12] = b # Verify .isrecursive() and .isreadable() w/ recursion # Tie a knot. b[67] = a # Messy dict. d = {} d[0] = d[1] = d[2] = d pp = FancyPrinter() for icky in a, b, d, (d, d): assert pp.isrecursive(icky), "expected isrecursive" assert not pp.isreadable(icky), "expected not isreadable" # Break the cycles. d.clear() del a[:] del b[:] for safe in a, b, d, (d, d): # module-level convenience functions # PrettyPrinter methods assert not pp.isrecursive(safe), f"expected not isrecursive for {safe!r}" assert pp.isreadable(safe), f"expected isreadable for {safe!r}" # # def test_unreadable(self): # # Not recursive but not readable anyway # pp = FancyPrinter() # for unreadable in type(3), pprint, pprint.isrecursive: # # PrettyPrinter methods # assert not pp.isrecursive(unreadable), "expected not isrecursive for %r" % (unreadable,) # assert not pp.isreadable(unreadable), "expected not isreadable for %r" % (unreadable,) # # def test_same_as_repr(self): # # Simple objects, small containers and classes that override __repr__ # # to directly call super's __repr__. # # For those the result should be the same as repr(). # # Ahem. The docs don't say anything about that -- this appears to # # be testing an implementation quirk. Starting in Python 2.5, it's # # not true for dicts: pprint always sorts dicts by key now; before, # # it sorted a dict display if and only if the display required # # multiple lines. For that reason, dicts with more than one element # # aren't tested here. # for simple in (0, 0, 0 + 0j, 0.0, "", b"", bytearray(), # (), tuple2(), tuple3(), # [], list2(), list3(), # set(), set2(), set3(), # frozenset(), frozenset2(), frozenset3(), # {}, dict2(), dict3(), # self.assertTrue, pprint, # -6, -6, -6 - 6j, -1.5, "x", b"x", bytearray(b"x"), # (3,), [3], {3: 6}, # (1, 2), [3, 4], {5: 6}, # tuple2((1, 2)), tuple3((1, 2)), tuple3(range(100)), # [3, 4], list2([3, 4]), list3([3, 4]), list3(range(100)), # set({7}), set2({7}), set3({7}), # frozenset({8}), frozenset2({8}), frozenset3({8}), # dict2({5: 6}), dict3({5: 6}), # range(10, -11, -1), # True, False, None, ..., # ): # native = repr(simple) # self.assertEqual(FancyPrinter().pformat(simple), native) # self.assertEqual(FancyPrinter(width=1, indent=0).pformat(simple) # .replace('\n', ' '), native) # # def test_container_repr_override_called(self): # N = 1000 # # Ensure that __repr__ override is called for subclasses of containers # # for cont in (list_custom_repr(), # list_custom_repr([1, 2, 3]), # list_custom_repr(range(N)), # tuple_custom_repr(), # tuple_custom_repr([1, 2, 3]), # tuple_custom_repr(range(N)), # set_custom_repr(), # set_custom_repr([1, 2, 3]), # set_custom_repr(range(N)), # frozenset_custom_repr(), # frozenset_custom_repr([1, 2, 3]), # frozenset_custom_repr(range(N)), # dict_custom_repr(), # dict_custom_repr({5: 6}), # dict_custom_repr(zip(range(N), range(N))), # ): # native = repr(cont) # expected = '*' * len(native) # self.assertEqual(FancyPrinter().pformat(cont), expected) # self.assertEqual(FancyPrinter(width=1, indent=0).pformat(cont), expected) @no_type_check def test_basic_line_wrap(self): # verify basic line-wrapping operation o = { "RPM_cal": 0, "RPM_cal2": 48059, "Speed_cal": 0, "controldesk_runtime_us": 0, "main_code_runtime_us": 0, "read_io_runtime_us": 0, "write_io_runtime_us": 43690 } exp = """\ { 'RPM_cal': 0, 'RPM_cal2': 48059, 'Speed_cal': 0, 'controldesk_runtime_us': 0, 'main_code_runtime_us': 0, 'read_io_runtime_us': 0, 'write_io_runtime_us': 43690, }""" for t in [dict, dict2]: assert FancyPrinter().pformat(t(o)) == exp o = range(100) exp = "[\n %s,\n ]" % ",\n ".join(map(str, o)) for t in [list, list2]: assert FancyPrinter().pformat(t(o)) == exp o = tuple(range(100)) exp = "(\n %s,\n )" % ",\n ".join(map(str, o)) for t in [tuple, tuple2]: assert FancyPrinter().pformat(t(o)) == exp # indent parameter o = range(100) exp = "[\n %s,\n ]" % ",\n ".join(map(str, o)) for t in [list, list2]: assert FancyPrinter(indent=4).pformat(t(o)) == exp def test_nested_indentations(self): o1 = list(range(10)) o2 = dict(first=1, second=2, third=3) o = [o1, o2] expected = """\ [ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], {'first': 1, 'second': 2, 'third': 3}, ]""" assert FancyPrinter(indent=4, width=42).pformat(o) == expected expected = """\ [ [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], { 'first': 1, 'second': 2, 'third': 3, }, ]""" assert FancyPrinter(indent=4, width=41).pformat(o) == expected def test_width(self): expected = """\ [ [ [ [ [ [1, 2, 3], '1 2', ], ], ], ], { 1: [1, 2, 3], 2: [12, 34], }, 'abc def ghi', ('ab cd ef',), set2({1, 23}), [ [ [ [ [1, 2, 3], '1 2', ], ], ], ], ]""" eval_ = eval o = eval_(expected) assert FancyPrinter(width=15).pformat(o) == expected assert FancyPrinter(width=16).pformat(o) == expected assert FancyPrinter(width=25).pformat(o) == expected assert FancyPrinter(width=14).pformat( o ) == """\ [ [ [ [ [ [ 1, 2, 3, ], '1 ' '2', ], ], ], ], { 1: [ 1, 2, 3, ], 2: [ 12, 34, ], }, 'abc def ' 'ghi', ( 'ab cd ' 'ef',), set2({ 1, 23, }), [ [ [ [ [ 1, 2, 3, ], '1 ' '2', ], ], ], ], ]""" def test_sorted_dict(self): # Starting in Python 2.5, pprint sorts dict displays by key regardless # of how small the dictionary may be. # Before the change, on 32-bit Windows pformat() gave order # 'a', 'c', 'b' here, so this test failed. d = {'a': 1, 'b': 1, 'c': 1} assert FancyPrinter().pformat(d) == "{'a': 1, 'b': 1, 'c': 1}" assert FancyPrinter().pformat([d, d]) == "[{'a': 1, 'b': 1, 'c': 1}, {'a': 1, 'b': 1, 'c': 1}]" # The next one is kind of goofy. The sorted order depends on the # alphabetic order of type names: "int" < "str" < "tuple". Before # Python 2.5, this was in the test_same_as_repr() test. It's worth # keeping around for now because it's one of few tests of pprint # against a crazy mix of types. assert FancyPrinter().pformat({ "xy\tab\n": (3, ), 5: [[]], (): {}, }) == r"{5: [[]], 'xy\tab\n': (3,), (): {}}" def test_ordered_dict(self, advanced_file_regression: AdvancedFileRegressionFixture): d: collections.OrderedDict = collections.OrderedDict() assert FancyPrinter(width=1).pformat(d) == "OrderedDict()" d = collections.OrderedDict([]) assert FancyPrinter(width=1).pformat(d) == "OrderedDict()" words = "the quick brown fox jumped over a lazy dog".split() d = collections.OrderedDict(zip(words, itertools.count())) advanced_file_regression.check(FancyPrinter().pformat(d)) def test_mapping_proxy(self): words = "the quick brown fox jumped over a lazy dog".split() d = dict(zip(words, itertools.count())) m = types.MappingProxyType(d) assert FancyPrinter().pformat( m ) == """\ mappingproxy({ 'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8, })""" d = collections.OrderedDict(zip(words, itertools.count())) m = types.MappingProxyType(d) assert FancyPrinter().pformat( m ) == """\ mappingproxy(OrderedDict([ ('the', 0), ('quick', 1), ('brown', 2), ('fox', 3), ('jumped', 4), ('over', 5), ('a', 6), ('lazy', 7), ('dog', 8), ]))""" def test_empty_simple_namespace(self): ns = types.SimpleNamespace() formatted = FancyPrinter().pformat(ns) assert formatted == "namespace()" def test_small_simple_namespace(self): ns = types.SimpleNamespace(a=1, b=2) formatted = FancyPrinter().pformat(ns) assert formatted == "namespace(a=1, b=2)" def test_subclassing(self, advanced_file_regression: AdvancedFileRegressionFixture): o = {"names with spaces": "should be presented using repr()", "others.should.not.be": "like.this"} advanced_file_regression.check(DottedPrettyPrinter().pformat(o)) @pytest.mark.parametrize( "value, width", [ pytest.param(set(range(7)), 20, id="case_1"), pytest.param(set2(range(7)), 20, id="case_2"), pytest.param(set3(range(7)), 20, id="case_3"), ] ) def test_set_reprs(self, value, width, advanced_file_regression: AdvancedFileRegressionFixture): assert FancyPrinter().pformat(set()) == "set()" assert FancyPrinter().pformat(set(range(3))) == "{0, 1, 2}" advanced_file_regression.check(FancyPrinter(width=width).pformat(value)) @pytest.mark.parametrize( "value, width", [ pytest.param(frozenset(range(7)), 20, id="case_1"), pytest.param(frozenset2(range(7)), 20, id="case_2"), pytest.param(frozenset3(range(7)), 20, id="case_3"), ] ) def test_frozenset_reprs(self, value, width, advanced_file_regression: AdvancedFileRegressionFixture): assert FancyPrinter().pformat(frozenset()) == "frozenset()" assert FancyPrinter().pformat(frozenset(range(3))) == "frozenset({0, 1, 2})" advanced_file_regression.check(FancyPrinter(width=width).pformat(value)) def test_depth(self): nested_tuple = (1, (2, (3, (4, (5, 6))))) nested_dict = {1: {2: {3: {4: {5: {6: 6}}}}}} nested_list = [1, [2, [3, [4, [5, [6, []]]]]]] assert FancyPrinter().pformat(nested_tuple) == repr(nested_tuple) assert FancyPrinter().pformat(nested_dict) == repr(nested_dict) assert FancyPrinter().pformat(nested_list) == repr(nested_list) lv1_tuple = "(1, (...))" lv1_dict = "{1: {...}}" lv1_list = "[1, [...]]" assert FancyPrinter(depth=1).pformat(nested_tuple) == lv1_tuple assert FancyPrinter(depth=1).pformat(nested_dict) == lv1_dict assert FancyPrinter(depth=1).pformat(nested_list) == lv1_list def test_sort_unorderable_values(self): # Issue 3976: sorted pprints fail for unorderable values. n = 20 keys = [Unorderable() for i in range(n)] random.shuffle(keys) skeys = sorted(keys, key=id) clean = lambda s: s.replace(' ', '').replace('\n', '') assert clean(FancyPrinter().pformat(set(keys))) == '{' + ','.join(map(repr, skeys)) + ",}" assert clean(FancyPrinter().pformat(frozenset(keys))) == "frozenset({" + ','.join(map(repr, skeys)) + ",})" assert clean(FancyPrinter().pformat(dict.fromkeys(keys)) ) == '{' + ','.join("%r:None" % k for k in keys) + ",}" # Issue 10017: TypeError on user-defined types as dict keys. assert FancyPrinter().pformat({Unorderable: 0, 1: 0}) == "{1: 0, " + repr(Unorderable) + ": 0}" # Issue 14998: TypeError on tuples with NoneTypes as dict keys. keys = [(1, ), (None, )] # type: ignore assert FancyPrinter().pformat(dict.fromkeys(keys, 0)) == "{%r: 0, %r: 0}" % tuple(sorted(keys, key=id)) def test_sort_orderable_and_unorderable_values(self): # Issue 22721: sorted pprints is not stable a = Unorderable() b = Orderable(hash(a)) # should have the same hash value # self-test assert a < b assert str(type(b)) < str(type(a)) assert sorted([b, a]) == [a, b] # type: ignore assert sorted([a, b]) == [a, b] # type: ignore # set assert FancyPrinter(width=1).pformat({b, a}) == f"{{\n {a!r},\n {b!r},\n }}" assert FancyPrinter(width=1).pformat({a, b}) == f"{{\n {a!r},\n {b!r},\n }}" # dict assert FancyPrinter(width=1).pformat(dict.fromkeys([b, a])) == f"{{\n {b!r}: None,\n {a!r}: None,\n }}" assert FancyPrinter(width=1).pformat(dict.fromkeys([a, b])) == f"{{\n {a!r}: None,\n {b!r}: None,\n }}" def test_str_wrap(self): # pprint tries to wrap strings intelligently fox = "the quick brown fox jumped over a lazy dog" assert FancyPrinter(width=19 ).pformat(fox) == """\ ('the quick brown ' 'fox jumped over ' 'a lazy dog')""" assert FancyPrinter(width=25).pformat({'a': 1, 'b': fox, 'c': 2}) == """\ { 'a': 1, 'b': 'the quick brown ' 'fox jumped over ' 'a lazy dog', 'c': 2, }""" # With some special characters # - \n always triggers a new line in the pprint # - \t and \n are escaped # - non-ASCII is allowed # - an apostrophe doesn't disrupt the pprint special = "Portons dix bons \"whiskys\"\nà l'avocat goujat\t qui fumait au zoo" assert FancyPrinter(width=68).pformat(special) == repr(special) assert FancyPrinter(width=31).pformat( special ) == """\ ('Portons dix bons "whiskys"\\n' "à l'avocat goujat\\t qui " 'fumait au zoo')""" assert FancyPrinter(width=20).pformat( special ) == """\ ('Portons dix bons ' '"whiskys"\\n' "à l'avocat " 'goujat\\t qui ' 'fumait au zoo')""" assert FancyPrinter(width=35).pformat([[[[[special]]]]]) == """\ [ [ [ [ [ 'Portons dix bons "whiskys"\\n' "à l'avocat goujat\\t qui " 'fumait au zoo', ], ], ], ], ]""" assert FancyPrinter(width=25).pformat([[[[[special]]]]]) == """\ [ [ [ [ [ 'Portons dix bons ' '"whiskys"\\n' "à l'avocat " 'goujat\\t qui ' 'fumait au zoo', ], ], ], ], ]""" assert FancyPrinter(width=23).pformat([[[[[special]]]]]) == """\ [ [ [ [ [ 'Portons dix ' 'bons "whiskys"\\n' "à l'avocat " 'goujat\\t qui ' 'fumait au ' 'zoo', ], ], ], ], ]""" # An unwrappable string is formatted as its repr unwrappable = 'x' * 100 assert FancyPrinter(width=80).pformat(unwrappable) == repr(unwrappable) assert FancyPrinter().pformat('') == "''" # Check that the pprint is a usable repr special *= 10 eval_ = eval for width in range(3, 40): assert eval_(FancyPrinter(width=width).pformat(special)) == special assert eval_(FancyPrinter(width=width).pformat([special] * 2)) == [special] * 2 def test_compact(self): o = ([list(range(i * i)) for i in range(5)] + [list(range(i)) for i in range(6)]) expected = """\ [[], [0], [0, 1, 2, 3], [0, 1, 2, 3, 4, 5, 6, 7, 8], [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15], [], [0], [0, 1], [0, 1, 2], [0, 1, 2, 3], [0, 1, 2, 3, 4]]""" assert FancyPrinter(width=47, compact=True).pformat(o, ) == expected def test_compact_width(self): levels = 20 number = 10 o = [0] * number for i in range(levels - 1): o = [o] # type: ignore for w in range(levels * 2 + 1, levels + 3 * number - 1): lines = FancyPrinter(width=w, compact=True).pformat(o, ).splitlines() maxwidth = max(map(len, lines)) assert maxwidth <= w maxwidth > w - 3 # pylint: disable=pointless-statement def test_bytes_wrap(self): assert FancyPrinter(width=1).pformat(b'') == "b''" assert FancyPrinter(width=1).pformat(b"abcd") == "b'abcd'" letters = b"abcdefghijklmnopqrstuvwxyz" assert FancyPrinter(width=29).pformat(letters) == repr(letters) assert FancyPrinter(width=19).pformat(letters) == """\ (b'abcdefghijkl' b'mnopqrstuvwxyz')""" assert FancyPrinter(width=18).pformat(letters) == """\ (b'abcdefghijkl' b'mnopqrstuvwx' b'yz')""" assert FancyPrinter(width=16).pformat(letters) == """\ (b'abcdefghijkl' b'mnopqrstuvwx' b'yz')""" special = bytes(range(16)) assert FancyPrinter(width=61).pformat(special) == repr(special) assert FancyPrinter(width=48).pformat( special ) == """\ (b'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b' b'\\x0c\\r\\x0e\\x0f')""" assert FancyPrinter(width=32).pformat( special ) == """\ (b'\\x00\\x01\\x02\\x03' b'\\x04\\x05\\x06\\x07\\x08\\t\\n\\x0b' b'\\x0c\\r\\x0e\\x0f')""" assert FancyPrinter(width=1).pformat( special ) == """\ (b'\\x00\\x01\\x02\\x03' b'\\x04\\x05\\x06\\x07' b'\\x08\\t\\n\\x0b' b'\\x0c\\r\\x0e\\x0f')""" assert FancyPrinter(width=21).pformat({'a': 1, 'b': letters, 'c': 2} == """\ { 'a': 1, 'b': b'abcdefghijkl' b'mnopqrstuvwx' b'yz', 'c': 2, }""") assert FancyPrinter(width=20).pformat({'a': 1, 'b': letters, 'c': 2}) == """\ { 'a': 1, 'b': b'abcdefgh' b'ijklmnop' b'qrstuvwxyz', 'c': 2, }""" assert FancyPrinter(width=25).pformat([[[[[[letters]]]]]]) == """\ [ [ [ [ [ [ b'abcdefghijklmnop' b'qrstuvwxyz', ], ], ], ], ], ]""" assert FancyPrinter(width=41).pformat([[[[[[special]]]]]]) == """\ [ [ [ [ [ [ b'\\x00\\x01\\x02\\x03\\x04\\x05\\x06\\x07' b'\\x08\\t\\n\\x0b\\x0c\\r\\x0e\\x0f', ], ], ], ], ], ]""" # Check that the pprint is a usable repr eval_ = eval for width in range(1, 64): assert eval_(FancyPrinter(width=width).pformat(special)) == special assert eval_(FancyPrinter(width=width).pformat([special] * 2)) == [special] * 2 @pytest.mark.parametrize( "value, width", [ pytest.param(bytearray(), 1, id="case_1"), pytest.param(bytearray(b"abcdefghijklmnopqrstuvwxyz"), 40, id="case_2"), pytest.param(bytearray(b"abcdefghijklmnopqrstuvwxyz"), 28, id="case_3"), pytest.param(bytearray(b"abcdefghijklmnopqrstuvwxyz"), 27, id="case_4"), pytest.param(bytearray(b"abcdefghijklmnopqrstuvwxyz"), 25, id="case_5"), pytest.param(bytearray(range(16)), 72, id="case_6"), pytest.param(bytearray(range(16)), 57, id="case_7"), pytest.param(bytearray(range(16)), 41, id="case_8"), pytest.param(bytearray(range(16)), 1, id="case_9"), pytest.param( {'a': 1, 'b': bytearray(b"abcdefghijklmnopqrstuvwxyz"), 'c': 2}, 31, id="case_10", ), pytest.param([[[[[bytearray(b"abcdefghijklmnopqrstuvwxyz")]]]]], 37, id="case_11"), pytest.param([[[[[bytearray(range(16))]]]]], 50, id="case_12"), ] ) def test_bytearray_wrap(self, value, width, advanced_file_regression: AdvancedFileRegressionFixture): advanced_file_regression.check(FancyPrinter(width=width).pformat(value)) def test_default_dict(self, advanced_file_regression: AdvancedFileRegressionFixture): d: collections.defaultdict = collections.defaultdict(int) assert FancyPrinter(width=1).pformat(d) == "defaultdict(, {})" words = "the quick brown fox jumped over a lazy dog".split() d = collections.defaultdict(int, zip(words, itertools.count())) advanced_file_regression.check(FancyPrinter().pformat(d)) def test_counter(self, advanced_file_regression: AdvancedFileRegressionFixture): d: collections.Counter = collections.Counter() assert FancyPrinter(width=1).pformat(d) == "Counter()" d = collections.Counter("senselessness") advanced_file_regression.check(FancyPrinter(width=40).pformat(d)) def test_chainmap(self, advanced_file_regression: AdvancedFileRegressionFixture): d: collections.ChainMap = collections.ChainMap() assert FancyPrinter(width=1).pformat(d) == "ChainMap({})" words = "the quick brown fox jumped over a lazy dog".split() items = list(zip(words, itertools.count())) d = collections.ChainMap(dict(items)) advanced_file_regression.check(FancyPrinter().pformat(d)) def test_chainmap_nested(self, advanced_file_regression: AdvancedFileRegressionFixture): words = "the quick brown fox jumped over a lazy dog".split() items = list(zip(words, itertools.count())) d = collections.ChainMap(dict(items), collections.OrderedDict(items)) advanced_file_regression.check(FancyPrinter().pformat(d)) def test_deque(self): d: collections.deque = collections.deque() assert FancyPrinter(width=1).pformat(d) == "deque([])" d = collections.deque(maxlen=7) assert FancyPrinter(width=1).pformat(d) == "deque([], maxlen=7)" words = "the quick brown fox jumped over a lazy dog".split() d = collections.deque(zip(words, itertools.count())) assert FancyPrinter().pformat( d ) == """\ deque([('the', 0), ('quick', 1), ('brown', 2), ('fox', 3), ('jumped', 4), ('over', 5), ('a', 6), ('lazy', 7), ('dog', 8)])""" d = collections.deque(zip(words, itertools.count()), maxlen=7) assert FancyPrinter().pformat( d ) == """\ deque([('brown', 2), ('fox', 3), ('jumped', 4), ('over', 5), ('a', 6), ('lazy', 7), ('dog', 8)], maxlen=7)""" def test_user_dict(self, advanced_file_regression: AdvancedFileRegressionFixture): d: collections.UserDict = collections.UserDict() assert FancyPrinter(width=1).pformat(d) == "{}" words = "the quick brown fox jumped over a lazy dog".split() d = collections.UserDict(zip(words, itertools.count())) advanced_file_regression.check(FancyPrinter().pformat(d)) def test_user_list(self, advanced_file_regression: AdvancedFileRegressionFixture): d: collections.UserList = collections.UserList() assert FancyPrinter(width=1).pformat(d) == "[]" words = "the quick brown fox jumped over a lazy dog".split() d = collections.UserList(zip(words, itertools.count())) advanced_file_regression.check(FancyPrinter().pformat(d)) @pytest.mark.parametrize( "value, width, expects", [ (collections.UserString(''), 1, "''"), ( collections.UserString("the quick brown fox jumped over a lazy dog"), 20, str(StringList([ "('the quick brown '", " 'fox jumped over '", " 'a lazy dog')", ])) ), ({1: collections.UserString("the quick brown fox jumped over a lazy dog")}, 20, str( StringList([ '{', " 1: 'the quick '", " 'brown fox '", " 'jumped over a '", " 'lazy dog',", " }" ]) )), ] ) def test_user_string(self, value, width, expects): assert FancyPrinter(width=width).pformat(value) == expects class DottedPrettyPrinter(FancyPrinter): def format(self, object, context, maxlevels, level): # noqa: A002,A003 # pylint: disable=redefined-builtin if isinstance(object, str): if ' ' in object: return repr(object), 1, 0 else: return object, 0, 0 else: return FancyPrinter.format(self, object, context, maxlevels, level) def test_simple_repr(advanced_file_regression: AdvancedFileRegressionFixture): @simple_repr('a', 'b', 'c', 'd', width=10) class F: a = "apple" b = "banana" c = "cherry" d = list(range(100)) advanced_file_regression.check(repr(F())) domdf_python_tools-3.10.0/tests/test_pretty_print_/000077500000000000000000000000001475315453000226175ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_10_.txt000066400000000000000000000001571475315453000311070ustar00rootroot00000000000000{ 'a': 1, 'b': bytearray(b'abcdefghijkl' b'mnopqrstuvwx' b'yz'), 'c': 2, } domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_11_.txt000066400000000000000000000001631475315453000311050ustar00rootroot00000000000000[ [ [ [ [ bytearray(b'abcdefghijklmnop' b'qrstuvwxyz'), ], ], ], ], ] domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_12_.txt000066400000000000000000000002231475315453000311030ustar00rootroot00000000000000[ [ [ [ [ bytearray(b'\x00\x01\x02\x03\x04\x05\x06\x07' b'\x08\t\n\x0b\x0c\r\x0e\x0f'), ], ], ], ], ] domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_1_.txt000066400000000000000000000000171475315453000310220ustar00rootroot00000000000000bytearray(b'') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_2_.txt000066400000000000000000000000511475315453000310210ustar00rootroot00000000000000bytearray(b'abcdefghijklmnopqrstuvwxyz') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_3_.txt000066400000000000000000000000671475315453000310310ustar00rootroot00000000000000bytearray(b'abcdefghijkl' b'mnopqrstuvwxyz') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_4_.txt000066400000000000000000000001051475315453000310230ustar00rootroot00000000000000bytearray(b'abcdefghijkl' b'mnopqrstuvwx' b'yz') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_5_.txt000066400000000000000000000001051475315453000310240ustar00rootroot00000000000000bytearray(b'abcdefghijkl' b'mnopqrstuvwx' b'yz') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_6_.txt000066400000000000000000000001111475315453000310220ustar00rootroot00000000000000bytearray(b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_7_.txt000066400000000000000000000001271475315453000310320ustar00rootroot00000000000000bytearray(b'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b' b'\x0c\r\x0e\x0f') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_8_.txt000066400000000000000000000001451475315453000310330ustar00rootroot00000000000000bytearray(b'\x00\x01\x02\x03' b'\x04\x05\x06\x07\x08\t\n\x0b' b'\x0c\r\x0e\x0f') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_bytearray_wrap_case_9_.txt000066400000000000000000000001631475315453000310340ustar00rootroot00000000000000bytearray(b'\x00\x01\x02\x03' b'\x04\x05\x06\x07' b'\x08\t\n\x0b' b'\x0c\r\x0e\x0f') domdf_python_tools-3.10.0/tests/test_pretty_print_/test_chainmap.txt000066400000000000000000000003231475315453000261750ustar00rootroot00000000000000ChainMap({ 'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8, }) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_chainmap_nested.txt000066400000000000000000000010751475315453000275440ustar00rootroot00000000000000ChainMap({ 'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8, }, OrderedDict([ ('the', 0), ('quick', 1), ('brown', 2), ('fox', 3), ('jumped', 4), ('over', 5), ('a', 6), ('lazy', 7), ('dog', 8), ])) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_counter.txt000066400000000000000000000001051475315453000260720ustar00rootroot00000000000000Counter({'s': 6, 'e': 4, 'n': 2, 'l': 1}) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_default_dict.txt000066400000000000000000000004171475315453000270500ustar00rootroot00000000000000defaultdict(, { 'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8, }) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_frozenset_reprs_case_1_.txt000066400000000000000000000001741475315453000312250ustar00rootroot00000000000000frozenset({ 0, 1, 2, 3, 4, 5, 6, }) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_frozenset_reprs_case_2_.txt000066400000000000000000000002051475315453000312210ustar00rootroot00000000000000frozenset2({ 0, 1, 2, 3, 4, 5, 6, }) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_frozenset_reprs_case_3_.txt000066400000000000000000000000421475315453000312210ustar00rootroot00000000000000frozenset3({0, 1, 2, 3, 4, 5, 6}) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_ordered_dict.txt000066400000000000000000000004061475315453000270460ustar00rootroot00000000000000OrderedDict([ ('the', 0), ('quick', 1), ('brown', 2), ('fox', 3), ('jumped', 4), ('over', 5), ('a', 6), ('lazy', 7), ('dog', 8), ]) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_set_reprs_case_1_.txt000066400000000000000000000000411475315453000277720ustar00rootroot00000000000000{ 0, 1, 2, 3, 4, 5, 6, } domdf_python_tools-3.10.0/tests/test_pretty_print_/test_set_reprs_case_2_.txt000066400000000000000000000001171475315453000277770ustar00rootroot00000000000000set2({ 0, 1, 2, 3, 4, 5, 6, }) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_set_reprs_case_3_.txt000066400000000000000000000000341475315453000277760ustar00rootroot00000000000000set3({0, 1, 2, 3, 4, 5, 6}) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_simple_repr.txt000066400000000000000000000016621475315453000267450ustar00rootroot00000000000000F( a='apple', b='banana', c='cherry', d=[ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, ] ) domdf_python_tools-3.10.0/tests/test_pretty_print_/test_subclassing.txt000066400000000000000000000001411475315453000267300ustar00rootroot00000000000000{ 'names with spaces': 'should be presented using repr()', others.should.not.be: like.this, } domdf_python_tools-3.10.0/tests/test_pretty_print_/test_user_dict.txt000066400000000000000000000001571475315453000264030ustar00rootroot00000000000000{ 'the': 0, 'quick': 1, 'brown': 2, 'fox': 3, 'jumped': 4, 'over': 5, 'a': 6, 'lazy': 7, 'dog': 8, } domdf_python_tools-3.10.0/tests/test_pretty_print_/test_user_list.txt000066400000000000000000000002011475315453000264210ustar00rootroot00000000000000[ ('the', 0), ('quick', 1), ('brown', 2), ('fox', 3), ('jumped', 4), ('over', 5), ('a', 6), ('lazy', 7), ('dog', 8), ] domdf_python_tools-3.10.0/tests/test_secrets.py000066400000000000000000000016071475315453000217430ustar00rootroot00000000000000# 3rd party import pytest # this package from domdf_python_tools.secrets import Secret from domdf_python_tools.words import get_words_list @pytest.mark.parametrize("value", get_words_list()) def test_secret(value): the_secret = Secret(value) assert isinstance(the_secret, str) assert isinstance(the_secret.value, str) assert the_secret.value == value assert the_secret == value assert str(the_secret) == "" assert repr(the_secret) == "''" assert str([the_secret]) == "['']" assert str((the_secret, )) == "('',)" assert str({the_secret}) == "{''}" assert str({"token": the_secret}) == "{'token': ''}" assert repr([the_secret]) == "['']" assert repr((the_secret, )) == "('',)" assert repr({the_secret}) == "{''}" assert repr({"token": the_secret}) == "{'token': ''}" assert hash(the_secret) == hash(value) domdf_python_tools-3.10.0/tests/test_stringlist.py000066400000000000000000000377101475315453000225010ustar00rootroot00000000000000# stdlib import pickle import textwrap from textwrap import dedent from typing import no_type_check # 3rd party import pytest # this package from domdf_python_tools.stringlist import DelimitedList, Indent, StringList, joinlines, splitlines class TestStringList: def test_creation(self): assert not StringList() assert not StringList([]) assert not StringList(()) assert StringList([1]) == ['1'] assert StringList(['1']) == ['1'] assert StringList('1') == ['1'] assert StringList("1\n") == ['1', ''] with pytest.raises(TypeError, match="'int' object is not iterable"): StringList(1) # type: ignore def test_append(self): sl = StringList() sl.append('') assert sl == [''] sl.append('') assert sl == ['', ''] sl.append("hello") assert sl == ['', '', "hello"] sl.append("world\n\n\n") assert sl == ['', '', "hello", "world", '', '', ''] sl.append("1234") assert sl == ['', '', "hello", "world", '', '', '', "1234"] def test_insert(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.insert(0, "foo") assert sl == ["foo", '', '', "hello", "world", '', '', '', "1234"] sl.insert(1, "bar") assert sl == ["foo", "bar", '', '', "hello", "world", '', '', '', "1234"] sl.insert(0, "1234") assert sl == ["1234", "foo", "bar", '', '', "hello", "world", '', '', '', "1234"] sl.insert(11, "baz") assert sl == ["1234", "foo", "bar", '', '', "hello", "world", '', '', '', "1234", "baz"] sl.insert(3, "\na line\n") assert sl == ["1234", "foo", "bar", '', "a line", '', '', '', "hello", "world", '', '', '', "1234", "baz"] sl.insert(100, "end") assert sl == [ "1234", "foo", "bar", '', "a line", '', '', '', "hello", "world", '', '', '', "1234", "baz", "end" ] def test_setitem(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl[0] = "foo" assert sl == ["foo", '', "hello", "world", '', '', '', "1234"] sl[1] = "bar" assert sl == ["foo", "bar", "hello", "world", '', '', '', "1234"] sl[2] = "\nhello\nworld\n" assert sl == ["foo", "bar", '', "hello", "world", '', "world", '', '', '', "1234"] sl[3:4] = "\nfoo\nbar\n", "baz" assert sl == ["foo", "bar", '', '', "foo", "bar", '', "baz", '', "world", '', '', '', "1234"] sl[3:5] = iter(["foo", "bar", "baz"]) assert sl == ["foo", "bar", '', "foo", "bar", "baz", '', "baz", '', "world", '', '', '', "1234"] def test_blankline(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.blankline() assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.blankline() assert sl == ['', '', "hello", "world", '', '', '', "1234", '', ''] sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.append('\t') sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.append(" ") sl.blankline(ensure_single=True) assert sl == ['', '', "hello", "world", '', '', '', "1234", ''] sl.append(" ") sl.blankline(ensure_single=True) sl.blankline() assert sl == ['', '', "hello", "world", '', '', '', "1234", '', ''] def test_slicing(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl[:-3] == ['', '', "hello", "world", ''] assert sl[-3:] == ['', '', "1234"] def test_start_of_line_indents(self): assert StringList("Hello\n World") == ["Hello", " World"] assert StringList("Hello\n World", convert_indents=True) == ["Hello", "\tWorld"] def test_negative_getitem(self): sl = StringList(['', '', "hello", "world", '', '', "abc", "1234"]) assert sl[-1] == "1234" sl[-1] += "5678" assert sl == ['', '', "hello", "world", '', '', "abc", "12345678"] assert sl[-2] == "abc" sl[-2] += "def" assert sl == ['', '', "hello", "world", '', '', "abcdef", "12345678"] def test_indent_size(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.indent_size == 0 sl.indent_size = 7 assert sl.indent_size == 7 sl.set_indent_size() assert sl.indent_size == 0 sl.set_indent_size(2) assert sl.indent_size == 2 sl.indent_size += 1 assert sl.indent_size == 3 sl.indent_size -= 2 assert sl.indent_size == 1 def test_indent_type(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.indent_type == '\t' with pytest.raises(ValueError, match="'type' cannot an empty string."): sl.indent_type = '' assert sl.indent_type == '\t' sl.indent_type = ' ' assert sl.indent_type == ' ' sl.set_indent_type('\t') assert sl.indent_type == '\t' sl.set_indent_type(' ') assert sl.indent_type == ' ' with pytest.raises(ValueError, match="'type' cannot an empty string."): sl.set_indent_type('') assert sl.indent_type == ' ' sl.set_indent_type() assert sl.indent_type == '\t' def test_indent(self): sl = StringList() sl.set_indent_size(1) sl.append("Indented") assert sl == ["\tIndented"] sl.set_indent_type(" ") sl.append("Indented") assert sl == ["\tIndented", " Indented"] expected_list = [ "class Foo:", '', "\tdef bar(self, listicle: List[Item]):", "\t\t...", '', "\tdef __repr__(self) -> str:", '\t\treturn "Foo()"', '', ] expected_string = dedent( """\ class Foo: def bar(self, listicle: List[Item]): ... def __repr__(self) -> str: return "Foo()" """ ) sl = StringList() sl.append("class Foo:") sl.blankline(True) sl.set_indent_size(1) sl.append("def bar(self, listicle: List[Item]):") sl.indent_size += 1 sl.append("...") sl.indent_size -= 1 sl.blankline(True) sl.append("def __repr__(self) -> str:") sl.indent_size += 1 sl.append('return "Foo()"') sl.indent_size -= 1 sl.blankline(True) sl.set_indent_size(0) assert sl == expected_list assert str(sl) == expected_string assert sl == expected_string sl = StringList() sl.append("class Foo:") sl.blankline(True) with sl.with_indent('\t', 1): sl.append("def bar(self, listicle: List[Item]):") with sl.with_indent('\t', 2): sl.append("...") sl.blankline(True) sl.append("def __repr__(self) -> str:") with sl.with_indent('\t', 2): sl.append('return "Foo()"') sl.blankline(True) assert sl.indent_size == 0 assert sl == expected_list assert str(sl) == expected_string assert sl == expected_string sl = StringList() sl.append("class Foo:") sl.blankline(True) with sl.with_indent_size(1): sl.append("def bar(self, listicle: List[Item]):") with sl.with_indent_size(2): sl.append("...") sl.blankline(True) sl.append("def __repr__(self) -> str:") with sl.with_indent_size(2): sl.append('return "Foo()"') sl.blankline(True) assert sl.indent_size == 0 assert sl == expected_list assert str(sl) == expected_string assert sl == expected_string sl = StringList() sl.append("class Foo:") sl.set_indent(Indent(0, " ")) sl.blankline(True) with sl.with_indent_size(1): sl.append("def bar(self, listicle: List[Item]):") with sl.with_indent_size(2): sl.append("...") sl.blankline(True) sl.append("def __repr__(self) -> str:") with sl.with_indent_size(2): sl.append('return "Foo()"') sl.blankline(True) assert sl.indent_size == 0 assert sl == [x.expandtabs(4) for x in expected_list] assert str(sl) == expected_string.expandtabs(4) assert sl == expected_string.expandtabs(4) sl = StringList() sl.append("class Foo:") sl.set_indent(" ", 0) sl.blankline(True) with sl.with_indent_size(1): sl.append("def bar(self, listicle: List[Item]):") with sl.with_indent_size(2): sl.append("...") sl.blankline(True) sl.append("def __repr__(self) -> str:") with sl.with_indent_size(2): sl.append('return "Foo()"') sl.blankline(True) assert sl.indent_size == 0 assert sl == [x.expandtabs(4) for x in expected_list] assert str(sl) == expected_string.expandtabs(4) assert sl == expected_string.expandtabs(4) sl = StringList() sl.append("class Foo:") sl.blankline(True) with sl.with_indent_size(1): sl.append("def bar(self, listicle: List[Item]):") with sl.with_indent_size(2): sl.append("...") sl.blankline(True) sl.append("def __repr__(self) -> str:") with sl.with_indent_size(2): with sl.with_indent_type(" "): sl.append('return "Foo()"') sl.blankline(True) assert sl.indent_size == 0 expected_list[-2] = ' return "Foo()"' assert sl == expected_list assert str(sl) == expected_string.replace('\t\treturn "Foo()"', ' return "Foo()"') assert sl == expected_string.replace('\t\treturn "Foo()"', ' return "Foo()"') def test_convert_indents(self): sl = StringList(convert_indents=True) sl.append(" Indented") assert sl == ["\tIndented"] def test_set_indent_error(self): sl = StringList() with pytest.raises(TypeError, match="'size' argument cannot be used when providing an 'Indent' object."): sl.set_indent(Indent(0, " "), 5) def test_extend(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.extend(["\nfoo\nbar\n baz"]) assert sl == ['', '', "hello", "world", '', '', '', "1234", '', "foo", "bar", " baz"] def test_clear(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.clear() assert sl == [] def test_copy(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl2 = sl.copy() assert sl == sl2 assert sl2 == ['', '', "hello", "world", '', '', '', "1234"] assert isinstance(sl2, StringList) def test_count(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.count("hello") == 1 def test_count_blanklines(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.count_blanklines() == 5 def test_index(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.index("hello") == 2 def test_pop(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.pop(2) == "hello" assert sl == ['', '', "world", '', '', '', "1234"] assert isinstance(sl, StringList) def test_remove(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.remove("hello") assert sl == ['', '', "world", '', '', '', "1234"] assert isinstance(sl, StringList) def test_reverse(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.reverse() assert sl == ["1234", '', '', '', "world", "hello", '', ''] assert isinstance(sl, StringList) def test_sort(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.sort() assert sl == ['', '', '', '', '', "1234", "hello", "world"] assert isinstance(sl, StringList) sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) sl.sort(reverse=True) assert sl == ["world", "hello", "1234", '', '', '', '', ''] assert isinstance(sl, StringList) def test_str(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert str(sl) == "\n\nhello\nworld\n\n\n\n1234" sl = StringList(['', '', "hello", "world", '', '', '', "1234", '']) assert str(sl) == "\n\nhello\nworld\n\n\n\n1234\n" def test_bytes(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert bytes(sl) == b"\n\nhello\nworld\n\n\n\n1234" sl = StringList(['', '', "hello", "world", '', '', '', "1234", '']) assert bytes(sl) == b"\n\nhello\nworld\n\n\n\n1234\n" @pytest.mark.xfail() def test_pickle(self): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) loaded = pickle.loads(pickle.dumps(sl)) # nosec: B301 assert sl == loaded assert sl.indent == loaded.indent assert isinstance(loaded, StringList) class TestIndent: def test_creation(self): indent = Indent() assert indent.size == 0 assert indent.type == '\t' indent = Indent(3, " ") assert indent.size == 3 assert indent.type == " " def test_iter(self): indent = Indent(3, " ") assert tuple(indent) == (3, " ") assert list(iter(indent)) == [3, " "] def test_size(self): indent = Indent() indent.size = 1 assert indent.size == 1 indent.size = '2' # type: ignore assert indent.size == 2 indent.size = 3.0 # type: ignore assert indent.size == 3 def test_type(self): indent = Indent() indent.type = " " assert indent.type == " " indent.type = ' ' assert indent.type == ' ' indent.type = 1 # type: ignore assert indent.type == '1' indent.type = ">>> " assert indent.type == ">>> " with pytest.raises(ValueError, match="'type' cannot an empty string."): indent.type = '' def test_str(self): assert str(Indent()) == '' assert str(Indent(1)) == '\t' assert str(Indent(5)) == "\t\t\t\t\t" assert str(Indent(type=" ")) == '' assert str(Indent(1, type=" ")) == " " assert str(Indent(5, type=" ")) == " " * 5 assert str(Indent(type=">>> ")) == '' assert str(Indent(1, type=">>> ")) == ">>> " def test_repr(self): assert repr(Indent()) == "Indent(size=0, type='\\t')" assert repr(Indent(1)) == "Indent(size=1, type='\\t')" assert repr(Indent(5)) == "Indent(size=5, type='\\t')" assert repr(Indent(type=" ")) == "Indent(size=0, type=' ')" assert repr(Indent(1, type=" ")) == "Indent(size=1, type=' ')" assert repr(Indent(5, type=" ")) == "Indent(size=5, type=' ')" assert repr(Indent(type=">>> ")) == "Indent(size=0, type='>>> ')" assert repr(Indent(1, type=">>> ")) == "Indent(size=1, type='>>> ')" def test_eq(self): assert Indent() == Indent() assert Indent() == (0, '\t') assert Indent() == '' assert Indent(1, " ") == Indent(1, " ") assert Indent(1, " ") == (1, " ") assert Indent(1, " ") == " " assert Indent(2, '\t') == Indent(2, '\t') assert Indent(2, '\t') == (2, '\t') assert Indent(2, '\t') == "\t\t" assert Indent() != 1 def test_pickle(self): indent = Indent(2, " ") assert indent == pickle.loads(pickle.dumps(indent)) # nosec: B301 def test_delimitedlist(): data = DelimitedList(['a', 'b', 'c', 'd', 'e']) assert data.__format__(", ") == "a, b, c, d, e" assert data.__format__("; ") == "a; b; c; d; e" assert data.__format__(';') == "a;b;c;d;e" assert data.__format__('\n') == "a\nb\nc\nd\ne" assert f"{data:, }" == "a, b, c, d, e" assert f"{data:; }" == "a; b; c; d; e" assert f"{data:;}" == "a;b;c;d;e" assert f"{data:\n}" == "a\nb\nc\nd\ne" assert f"{data:, }" == "a, b, c, d, e" assert f"{data:; }" == "a; b; c; d; e" assert f"{data:;}" == "a;b;c;d;e" assert f"{data:\n}" == "a\nb\nc\nd\ne" joinlines_splitlines_param = pytest.mark.parametrize( "string, lines", [ ("abc\ndef\n\rghi", [("abc", '\n'), ("def", '\n'), ('', '\r'), ("ghi", '')]), ("abc\ndef\n\r\nghi", [("abc", '\n'), ("def", '\n'), ('', "\r\n"), ("ghi", '')]), ("abc\ndef\r\nghi", [("abc", '\n'), ("def", "\r\n"), ("ghi", '')]), ("abc\ndef\r\nghi\n", [("abc", '\n'), ("def", "\r\n"), ("ghi", '\n')]), ("abc\ndef\r\nghi\n\r", [("abc", '\n'), ("def", "\r\n"), ("ghi", '\n'), ('', '\r')]), ("\nabc\ndef\r\nghi\n\r", [('', '\n'), ("abc", '\n'), ("def", "\r\n"), ("ghi", '\n'), ('', '\r')]), ("abcdef", [("abcdef", '')]), ] ) @joinlines_splitlines_param def test_splitlines(string, lines): assert splitlines(string) == lines @joinlines_splitlines_param def test_joinlines(string, lines): assert string == joinlines(lines) @no_type_check def test_stringlist_textwrap_indent(): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert textwrap.indent(sl, " ") == "\n\n hello\n world\n\n\n\n 1234\n" assert textwrap.indent(sl, '\t') == "\n\n\thello\n\tworld\n\n\n\n\t1234\n" assert textwrap.indent(sl, ">>> ") == "\n\n>>> hello\n>>> world\n\n\n\n>>> 1234\n" def test_stringlist_splitlines(): sl = StringList(['', '', "hello", "world", '', '', '', "1234"]) assert sl.splitlines() is sl assert list(sl.splitlines()) == ['', '', "hello", "world", '', '', '', "1234"] assert sl.splitlines(keepends=True) == ['\n', '\n', "hello\n", "world\n", '\n', '\n', '\n', "1234\n"] domdf_python_tools-3.10.0/tests/test_terminal.py000066400000000000000000000076271475315453000221160ustar00rootroot00000000000000# stdlib import json import re import sys # 3rd party from coincidence.selectors import not_windows, only_windows from faker import Faker from faker.providers import bank, company, internet, phone_number, python # this package from domdf_python_tools.terminal import Echo, br, clear, interrupt, overtype fake = Faker() fake.add_provider(internet) fake.add_provider(bank) fake.add_provider(company) fake.add_provider(phone_number) fake.add_provider(python) def test_br(capsys): br() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ['', ''] br() print("foo") captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ['', "foo", ''] print("foo") br() print("bar") captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["foo", '', "bar", ''] @only_windows(reason="Different test used for POSIX") def test_interrupt_windows(capsys): interrupt() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["(Press Ctrl-C to quit at any time)", ''] @not_windows(reason="Different test used for Windows") def test_interrupt_posix(capsys): interrupt() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["(Press Ctrl-D to quit at any time)", ''] # @only_windows(reason="Different test used for POSIX") # def test_clear_windows(capsys): # clear() # # captured = capsys.readouterr() # stdout = captured.out.split("\n") # assert stdout == ['(Press Ctrl-C to quit at any time.)', ''] # @not_windows(reason="Different test used for Windows") def test_clear_posix(capsys): clear() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["\u001bc"] print("Hello World!") clear() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["Hello World!", "\u001bc"] def test_overtype(capsys): print("Waiting...", end='') overtype("foo", "bar") sys.stdout.flush() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["Waiting...\rfoo bar"] print("Waiting...", end='') overtype("foo", "bar", sep='') sys.stdout.flush() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["Waiting...\rfoobar"] print("Waiting...", end='') overtype("foo", "bar", sep='-', end='\n') sys.stdout.flush() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout == ["Waiting...\rfoo-bar", ''] sys.stderr.write("Waiting...") overtype("foo", "bar", file=sys.stderr) sys.stdout.flush() captured = capsys.readouterr() stderr = captured.err.split('\n') assert stderr == ["Waiting...\rfoo bar"] def test_echo(capsys): with Echo(): abc = "a variable" var = 1234 captured = capsys.readouterr() stdout = captured.out.split('\n') data = { "abc": "a variable", "var": 1234, } dictionary = json.dumps(data).replace('"', "'") assert stdout == [f" {dictionary}", ''] # def test_echo_pprint(capsys): # Lots of variables, which should be pretty printed with Echo(): name = fake.name() address = fake.address() ip_address = fake.ipv4_private() iban = fake.iban() employer = fake.company() telephone = fake.phone_number() alive = fake.pybool() z_other = fake.pydict() captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout[0] == " {{'address': '{}',".format(address.replace('\n', "\\n")) assert stdout[1] == f" 'alive': {alive}," assert stdout[2] == f" 'employer': '{employer}'," assert stdout[3] == f" 'iban': '{iban}'," assert stdout[4] == f" 'ip_address': '{ip_address}'," assert stdout[5] == f" 'name': '{name}'," assert stdout[6] == f" 'telephone': '{telephone}'," assert stdout[7].startswith(" 'z_other': {") assert stdout[7].endswith(',') for line in range(8, 13, 1): assert re.match(r"^\s*'.*':.*[,}]$", stdout[line]) assert stdout[-2].endswith('}') assert stdout[-1] == '' domdf_python_tools-3.10.0/tests/test_testing_/000077500000000000000000000000001475315453000215315ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_testing_/test_check_file_output.txt000066400000000000000000000000111475315453000270150ustar00rootroot00000000000000Success! domdf_python_tools-3.10.0/tests/test_typing.py000066400000000000000000000047351475315453000216120ustar00rootroot00000000000000""" test_typing ~~~~~~~~~~~~~~~ Test functions in typing.py """ # stdlib import os import pathlib from typing import Dict, List, Sequence, Set, Tuple, Union # 3rd party import pytest # this package from domdf_python_tools.typing import PathLike, check_membership @pytest.mark.parametrize( "obj, type_", [ ("abc", Union[str, int, float, bytes]), (1234, Union[str, int, float, bytes]), (12.34, Union[str, int, float, bytes]), (b"\x0F", Union[str, int, float, bytes]), ("abc", List[str]), (1234, Sequence[int]), (12.34, Set[float]), (1234, Tuple[int, float, str]), (12.34, Tuple[int, float, str]), ("abc", Tuple[int, float, str]), (1234, Dict[int, float]), (12.34, Dict[int, float]), ], ) def test_check_membership_true(obj, type_): # todo: Positions for Tuple and Dict assert check_membership(obj, type_) @pytest.mark.parametrize( "obj, type_", [ ("abc", Union[float, bytes]), (1234, Union[str, float, bytes]), (12.34, Union[str, int]), (b"\x0F", Union[str, int, float]), ("abc", List[int]), (1234, Sequence[bytes]), (12.34, Set[str]), (1234, Tuple[str, float, bytes]), (12.34, Tuple[int, bytes, str]), ("abc", Tuple[int, float, bytes]), (1234, Dict[bytes, float]), (12.34, Dict[int, str]), ], ) def test_check_membership_false(obj, type_): # todo: Positions for Tuple and Dict assert not check_membership(obj, type_) class MyPathLike(os.PathLike): def __init__(self, directory, filename): self.directory = str(directory) self.filename = str(filename) def __fspath__(self): os.path.join(self.directory, self.filename) class MyStr(str): __slots__ = () class MyPath(type(pathlib.Path())): # type: ignore pass @pytest.mark.parametrize( "obj", [ "/home/domdf/Python", "test_typing.py", pathlib.Path("/home/domdf/Python"), pathlib.Path("test_typing.py"), pathlib.PurePosixPath("test_typing.py"), pathlib.PureWindowsPath("test_typing.py"), MyPath("/home/domdf/Python"), MyPath("test_typing.py"), MyStr("/home/domdf/Python"), MyStr("test_typing.py"), MyPathLike("/home/domdf", "Python"), MyPathLike('.', "test_typing.py"), ], ) def test_pathlike_true(obj): assert check_membership(obj, PathLike) @pytest.mark.parametrize( "obj", [ 1234, 12.34, [1, 2, 3, 4, 5], {1, 2, 3, 4, 5}, (1, 2, 3, 4, 5), {'a': 1, 'b': 2}, ] ) def test_pathlike_false(obj): assert not check_membership(obj, PathLike) domdf_python_tools-3.10.0/tests/test_userlist.py000066400000000000000000000051741475315453000221500ustar00rootroot00000000000000# From https://raw.githubusercontent.com/python/cpython/master/Lib/test/test_userlist.py # Licensed under the Python Software Foundation License Version 2. # Copyright © 2001-2020 Python Software Foundation. All rights reserved. # Copyright © 2000 BeOpen.com. All rights reserved. # Copyright © 1995-2000 Corporation for National Research Initiatives. All rights reserved. # Copyright © 1991-1995 Stichting Mathematisch Centrum. All rights reserved. # Check every path through every method of UserList # stdlib from typing import Sequence, Type, no_type_check # 3rd party from coincidence.selectors import not_pypy # this package from domdf_python_tools.bases import Lineup, UserList from tests import list_tests class TestList(list_tests.CommonTest): type2test: Type[Sequence] = list def test_getslice(self): super().test_getslice() l = [0, 1, 2, 3, 4] u = self.type2test(l) # type: ignore for i in range(-3, 6): assert u[:i] == l[:i] assert u[i:] == l[i:] for j in range(-3, 6): assert u[i:j] == l[i:j] def test_slice_type(self): l = [0, 1, 2, 3, 4] u = self.type2test(l) # type: ignore assert u[:] != u.__class__ assert u[:] == u @not_pypy("Doesn't work on PyPy") def test_iadd(self): super().test_iadd() u = [0, 1] u += self.type2test([0, 1]) # type: ignore assert u == [0, 1, 0, 1] @no_type_check def test_mixedcmp(self): u = self.type2test([0, 1]) assert u == [0, 1] assert u != [0] assert u != [0, 2] @no_type_check def test_mixedadd(self): u = self.type2test([0, 1]) assert u + [] == u assert u + [2] == [0, 1, 2] @no_type_check def test_userlist_copy(self): u = self.type2test([6, 8, 1, 9, 1]) v = u.copy() assert u == v assert type(u) == type(v) # pylint: disable=unidiomatic-typecheck class TestUserList(TestList): type2test: Type[UserList] = UserList def test_add_specials(self): u = UserList("spam") u2 = u + "eggs" assert u2 == list("spameggs") def test_radd_specials(self): u = UserList("eggs") u2 = "spam" + u assert u2 == list("spameggs") u2 = u.__radd__(UserList("spam")) assert u2 == list("spameggs") class TestLineup(TestList): type2test: Type[Lineup] = Lineup def test_replace(self): u = self.type2test([-2, -1, 0, 1, 2]) u.replace(2, 3) assert u[-1] == 3 def test_fluent(self): u = self.type2test([2, 1, 0, -1, -2]) assert u.sort() is u assert u == [-2, -1, 0, 1, 2] assert u.replace(2, 3) is u assert u == [-2, -1, 0, 1, 3] assert u.reverse() is u assert u == [3, 1, 0, -1, -2] assert u.append(4) is u assert u == [3, 1, 0, -1, -2, 4] assert u.insert(0, -3) is u assert u == [-3, 3, 1, 0, -1, -2, 4] domdf_python_tools-3.10.0/tests/test_utils.py000066400000000000000000000342411475315453000214330ustar00rootroot00000000000000""" test_utils ~~~~~~~~~~~~~~~ Test functions in utils.py """ # stdlib import decimal import pathlib import platform import re import sys from collections import namedtuple # 3rd party import click import coincidence import pytest from coincidence.regressions import AdvancedDataRegressionFixture # this package from domdf_python_tools.typing import HasHead from domdf_python_tools.utils import ( cmp, convert_indents, divide, double_repr_string, enquote_value, head, list2str, posargs2kwargs, printr, printt, pyversion, redirect_output, redivide, replace_nonprinting, stderr_writer, str2tuple, strtobool, trim_precision, unique_sorted ) def test_pyversion(): assert isinstance(pyversion, int) class TestList2Str: @pytest.mark.parametrize( "value, expects", [ ([1, 2, 3], "1,2,3"), (['a', 'b', 'c'], "a,b,c"), (['a', 'b', 1, 2], "a,b,1,2"), (['a', 2, pathlib.Path("foo.txt")], "a,2,foo.txt"), ], ) def test_list2str(self, value, expects): str_representation = list2str(value) assert isinstance(str_representation, str) assert str_representation == expects @pytest.mark.parametrize( "value, expects", [ ([1, 2, 3], "1;2;3"), (['a', 'b', 'c'], "a;b;c"), (['a', 'b', 1, 2], "a;b;1;2"), (['a', 2, pathlib.Path("foo.txt")], "a;2;foo.txt"), ], ) def test_list2str_semicolon(self, value, expects): str_representation = list2str(value, sep=';') assert isinstance(str_representation, str) assert str_representation == expects class CustomRepr: def __init__(self): pass def __repr__(self): return "This is my custom __repr__!" class NoRepr: def __init__(self): pass no_repr_instance = NoRepr() def get_mem_addr(obj): if sys.platform == "win32" and platform.python_implementation() != "PyPy": return f"0x0*{hex(id(obj))[2:].upper()}" else: return f"0x0*{hex(id(obj))[2:]}" @pytest.mark.parametrize( "obj, expects", [ ("This is a test", "'This is a test'"), (pathlib.PurePosixPath("foo.txt"), r"PurePosixPath\('foo.txt'\)"), (1234, "1234"), (12.34, "12.34"), (CustomRepr(), "This is my custom __repr__!"), (no_repr_instance, f""), ], ) def test_printr(obj, expects, capsys): printr(obj) captured = capsys.readouterr() stdout = captured.out.split('\n') assert re.match(expects, stdout[0]) if sys.version_info >= (3, 13): pure_posix_path_expected = "" else: pure_posix_path_expected = "" @pytest.mark.parametrize( "obj, expects", [ ("This is a test", ""), (pathlib.PurePosixPath("foo.txt"), pure_posix_path_expected), (1234, ""), (12.34, ""), (CustomRepr(), ""), (no_repr_instance, ""), ], ) def test_printt(obj, expects, capsys): printt(obj) captured = capsys.readouterr() stdout = captured.out.split('\n') assert stdout[0] == expects @pytest.mark.parametrize( "obj, expects", [ ("This is a test", "This is a test"), (pathlib.PurePosixPath("foo.txt"), "foo.txt"), (1234, "1234"), (12.34, "12.34"), (CustomRepr(), "This is my custom __repr__!"), (no_repr_instance, f""), ], ) def test_stderr_writer(obj, expects, capsys): stderr_writer(obj) captured = capsys.readouterr() stderr = captured.err.split('\n') assert re.match(expects, stderr[0]) class TestStr2Tuple: @pytest.mark.parametrize( "value, expects", [ ("1,2,3", (1, 2, 3)), # tests without spaces ("1, 2, 3", (1, 2, 3)), # tests with spaces ], ) def test_str2tuple(self, value, expects): assert isinstance(str2tuple(value), tuple) assert str2tuple(value) == expects @pytest.mark.parametrize( "value, expects", [ ("1;2;3", (1, 2, 3)), # tests without semicolon ("1; 2; 3", (1, 2, 3)), # tests with semicolon ], ) def test_str2tuple_semicolon(self, value, expects): assert isinstance(str2tuple(value, sep=';'), tuple) assert str2tuple(value, sep=';') == expects class TestStrToBool: @coincidence.testing_boolean_values(extra_truthy=[50, -1]) def test_strtobool(self, boolean_string, expected_boolean): assert strtobool(boolean_string) == expected_boolean @pytest.mark.parametrize( "obj, expects", [ ("truthy", ValueError), ("foo", ValueError), ("bar", ValueError), (None, AttributeError), (1.0, AttributeError), (0.0, AttributeError), ], ) def test_strtobool_errors(self, obj, expects): with pytest.raises(expects): strtobool(obj) @pytest.mark.parametrize( "obj, expects", [ (True, True), ("True", "True"), ("true", "'true'"), ('y', "'y'"), ('Y', "'Y'"), (1, 1), (0, 0), (50, 50), (1.0, 1.0), (0.0, 0.0), (50.0, 50.0), (decimal.Decimal("50.0"), "'50.0'"), (False, False), ("False", "False"), ("false", "'false'"), ("Hello World", "'Hello World'"), ], ) def test_enquote_value(obj, expects): assert enquote_value(obj) == expects # # # @pytest.mark.parametrize("obj, expects", [ # ("truthy", ValueError), # ("foo", ValueError), # ("bar", ValueError), # (None, AttributeError), # (1.0, AttributeError), # (0.0, AttributeError), # ]) # def test_enquote_value_errors(obj, expects): # with pytest.raises(expects): # enquote_value(obj) def test_cmp(): assert isinstance(cmp(5, 20), int) assert cmp(5, 20) < 0 assert cmp(5, 20) == -1 assert isinstance(cmp(20, 5), int) assert cmp(20, 5) > 0 assert cmp(20, 5) == 1 assert isinstance(cmp(20, 20), int) assert cmp(20, 20) == 0 def demo_function(arg1, arg2, arg3): pass cwd = pathlib.Path.cwd() @pytest.mark.parametrize( "args, posarg_names, kwargs, expects", [ ((1, 2, 3), ("arg1", "arg2", "arg3"), {}, {"arg1": 1, "arg2": 2, "arg3": 3}), ((1, 2, 3), ("arg1", "arg2", "arg3"), None, {"arg1": 1, "arg2": 2, "arg3": 3}), ((1, 2, 3), ("arg1", "arg2", "arg3"), {"arg4": 4}, {"arg1": 1, "arg2": 2, "arg3": 3, "arg4": 4}), ((1, 2, 3), demo_function, None, { "arg1": 1, "arg2": 2, "arg3": 3, }), ((cwd, "wb", -1, "UTF-8"), pathlib.Path.open, None, { "self": cwd, "mode": "wb", "buffering": -1, "encoding": "UTF-8", }), (("wb", -1, "UTF-8"), pathlib.Path().open, None, { "mode": "wb", "buffering": -1, "encoding": "UTF-8", }), ] ) def test_posargs2kwargs(args, posarg_names, kwargs, expects): assert posargs2kwargs(args, posarg_names, kwargs) == expects def test_convert_indents(): # TODO: test 'to' assert convert_indents("hello world") == "hello world" assert convert_indents("\thello world") == " hello world" assert convert_indents("\t\thello world") == " hello world" assert convert_indents("\t hello world") == " hello world" assert convert_indents("hello world", tab_width=2) == "hello world" assert convert_indents("\thello world", tab_width=2) == " hello world" assert convert_indents("\t\thello world", tab_width=2) == " hello world" assert convert_indents("\t hello world", tab_width=2) == " hello world" assert convert_indents("hello world", from_=" ") == "hello world" assert convert_indents(" hello world", from_=" ") == " hello world" assert convert_indents(" hello world", from_=" ") == " hello world" assert convert_indents(" hello world", from_=" ") == " hello world" assert convert_indents("hello world", tab_width=2, from_=" ") == "hello world" assert convert_indents(" hello world", tab_width=2, from_=" ") == " hello world" assert convert_indents(" hello world", tab_width=2, from_=" ") == " hello world" class TestHead: def test_protocol(self): assert not isinstance(str, HasHead) assert not isinstance(int, HasHead) assert not isinstance(float, HasHead) assert not isinstance(tuple, HasHead) assert not isinstance(list, HasHead) def test_protocol_pandas(self): pandas = pytest.importorskip("pandas") assert isinstance(pandas.DataFrame, HasHead) assert isinstance(pandas.Series, HasHead) foo = namedtuple("foo", "a, b, c, d, e, f, g, h, i, j, k, l, m") @pytest.mark.parametrize( "args, expects", [ ((foo(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13), ), "foo(a=1, b=2, c=3, d=4, e=5, f=6, g=7, h=8, i=9, j=10, ...)"), ((foo(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13), 13), "foo(a=1, b=2, c=3, d=4, e=5, f=6, g=7, h=8, i=9, j=10, k=11, l=12, m=13)"), ] ) def test_namedtuple(self, args, expects): assert head(*args) == expects @pytest.mark.parametrize( "args, expects", [ (((1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13), ), "(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ...)"), (( (1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13), 13, ), "(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13)"), ] ) def test_tuple(self, args, expects): assert head(*args) == expects @pytest.mark.parametrize( "args, expects", [ (([1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13], ), "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, ...]"), (( [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13], 13, ), "[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]"), ] ) def test_list(self, args, expects): assert head(*args) == expects def test_data_frame(self): pandas = pytest.importorskip("pandas") df = pandas.DataFrame( data=[["Bob", 20, "Apprentice"], ["Alice", 23, "Secretary"], ["Mario", 39, "Plumber"]], columns=["Name", "Age", "Occupation"], ) assert head( df ) == """ Name Age Occupation 0 Bob 20 Apprentice 1 Alice 23 Secretary 2 Mario 39 Plumber\ """ assert head(df, 1) == " Name Age Occupation\n0 Bob 20 Apprentice" def test_series(self): pandas = pytest.importorskip("pandas") df = pandas.DataFrame( data=[["Bob", 20, "Apprentice"], ["Alice", 23, "Secretary"], ["Mario", 39, "Plumber"]], columns=["Name", "Age", "Occupation"], ) ser = df.iloc[0] assert head(ser) == """\ Name Bob Age 20 Occupation Apprentice\ """ assert head(ser, 1) == "Name Bob" def test_str(self): assert head("Hello World") == "Hello Worl..." assert head("Hello World", 11) == "Hello World" assert head("Hello World", 5) == "Hello..." def test_trim_precision(): assert 170.10000000000002 != 170.1 assert trim_precision(170.10000000000002, 1) == 170.1 assert trim_precision(170.10000000000002, 2) == 170.1 assert trim_precision(170.10000000000002, 3) == 170.1 assert trim_precision(170.10000000000002, 4) == 170.1 assert trim_precision(170.10000000000002, 5) == 170.1 assert trim_precision(170.10000000000002) == 170.1 assert 170.15800000000002 != 170.158 assert trim_precision(170.15800000000002, 1) == 170.2 assert trim_precision(170.15800000000002, 2) == 170.16 assert trim_precision(170.15800000000002, 3) == 170.158 assert trim_precision(170.15800000000002, 4) == 170.158 assert trim_precision(170.15800000000002, 5) == 170.158 assert trim_precision(170.15800000000002) == 170.158 @pytest.mark.parametrize( "value, expects", [ ("foo", '"foo"'), ("'foo'", "\"'foo'\""), ("don't", "\"don't\""), ("Here's a single quote \"", "\"Here's a single quote \\\"\""), (enquote_value('☃'), "\"'☃'\""), ] ) def test_double_repr_string(value: str, expects: str): assert double_repr_string(value) == expects def test_redirect_output(): with redirect_output() as (stdout, stderr): print("I'm going to stdout") click.echo("I'm going to stderr", file=sys.stderr) click.echo("I'm also going to stdout", file=stdout) print("I'm also going to stderr", file=stderr) assert stdout.getvalue() == "I'm going to stdout\nI'm also going to stdout\n" assert stderr.getvalue() == "I'm going to stderr\nI'm also going to stderr\n" def test_redirect_output_combine(): with redirect_output(combine=True) as (stdout, stderr): click.echo("I'm going to stdout") print("I'm going to stderr", file=sys.stderr) print("I'm also going to stdout", file=stdout) click.echo("I'm also going to stderr", file=stderr) expected = "I'm going to stdout\nI'm going to stderr\nI'm also going to stdout\nI'm also going to stderr\n" assert stdout.getvalue() == expected assert stderr.getvalue() == expected @pytest.mark.parametrize( "string, sep", [ ("hello=world", '='), ("hello = world", '='), ("hello = world", " = "), ("hello: world", ':'), ("hello: world", ": "), ] ) def test_divide(string: str, sep: str, advanced_data_regression: AdvancedDataRegressionFixture): data = dict(divide(e, sep) for e in [string, string, string]) advanced_data_regression.check(data) def test_divide_errors(): with pytest.raises(ValueError, match="'=' not in 'hello: world'"): divide("hello: world", '=') @pytest.mark.parametrize( "string, sep", [ ("hello=world", r"\s?=\s?"), ("hello = world", r"\s?=\s?"), ("hello = world", '='), ("hello: world", r":\s?"), ("hello: world", r"\s?:\s?"), ] ) def test_redivide(string: str, sep: str, advanced_data_regression: AdvancedDataRegressionFixture): data = dict(redivide(e, sep) for e in [string, string, string]) advanced_data_regression.check(data) def test_redivide_errors(): with pytest.raises(ValueError, match=r"re.compile\('='\) has no matches in 'hello: world'"): redivide("hello: world", '=') with pytest.raises(ValueError, match=r"re.compile\(.*\) has no matches in 'hello: world'"): redivide("hello: world", r"\d") @pytest.mark.parametrize( "values, expected", [ (("foo", "bar"), ["bar", "foo"]), (("foo", "foo", "bar"), ["bar", "foo"]), (("foo", "bar", "bar"), ["bar", "foo"]), ] ) def test_unique_sorted(values, expected): assert unique_sorted(values) == expected @pytest.mark.parametrize( "the_string, expected", [ ("\t\t\t", "^I^I^I"), ("\u0000\u0000\u0000", "^@^@^@"), ("\r\n", "^M^J"), ("\b\u000b", "^H^K"), ("\u001a", "^Z^?"), ('\x81', "M+A"), ] ) def test_replace_nonprinting(the_string: str, expected: str): assert replace_nonprinting(the_string) == expected domdf_python_tools-3.10.0/tests/test_utils_/000077500000000000000000000000001475315453000212145ustar00rootroot00000000000000domdf_python_tools-3.10.0/tests/test_utils_/test_diff.txt000066400000000000000000000015451475315453000237310ustar00rootroot00000000000000--- original_file.txt (original) +++ modified_file.txt (modified) @@ -1,9 +1,8 @@ -Lorem ipsum dolor sit amet, consectetur adipiscing elit. -Quisque feugiat, lectus et interdum feugiat, magna enim -vestibulum diam, a ultrices urna odio at magna. Quisque -ut ullamcorper justo. Suspendisse ac tincidunt velit. +Lorem ipsum dolor sit amet, consectetur adipiscing elit. Quisque feugiat, +lectus et interdum feugiat, magna enim vestibulum diam, a ultrices urna +odio at magna. Quisque +ut ullamcorper justo. Integer lobortis eros eget diam varius eleifend. Donec ornare nisi vel purus aliquet consequat. Ut quis ipsum -et nunc sodales tristique. Maecenas justo libero, semper -eget feugiat et, aliquam et mauris. +et nunc sodales tristique. domdf_python_tools-3.10.0/tests/test_utils_/test_divide_hello___world___.yml000066400000000000000000000000231475315453000275620ustar00rootroot00000000000000'hello ': ' world' domdf_python_tools-3.10.0/tests/test_utils_/test_divide_hello___world_____.yml000066400000000000000000000000151475315453000300610ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_utils_/test_divide_hello__world___.yml000066400000000000000000000000201475315453000274200ustar00rootroot00000000000000hello: ' world' domdf_python_tools-3.10.0/tests/test_utils_/test_divide_hello__world____.yml000066400000000000000000000000151475315453000275630ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_utils_/test_divide_hello_world___.yml000066400000000000000000000000151475315453000272650ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_utils_/test_redivide_hello___world___.yml000066400000000000000000000000231475315453000301110ustar00rootroot00000000000000'hello ': ' world' domdf_python_tools-3.10.0/tests/test_utils_/test_redivide_hello___world___s____s__.yml000066400000000000000000000000151475315453000315520ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_utils_/test_redivide_hello__world____s__.yml000066400000000000000000000000151475315453000305730ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_utils_/test_redivide_hello__world___s____s__.yml000066400000000000000000000000151475315453000314130ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_utils_/test_redivide_hello_world___s____s__.yml000066400000000000000000000000151475315453000312540ustar00rootroot00000000000000hello: world domdf_python_tools-3.10.0/tests/test_versions.py000066400000000000000000000276401475315453000221500ustar00rootroot00000000000000# stdlib import copy import pickle import platform import sys from typing import Any # 3rd party import pytest # this package from domdf_python_tools.versions import Version, _iter_float, _iter_string, _prep_for_eq pytestmark = pytest.mark.skipif( condition=sys.version_info[:3] <= (3, 6, 1) and platform.python_implementation() == "PyPy", reason="Strange bug in PyPy 7.1.1/3.6.1 on Travis when subclassing from Tuple[int, int, int]", ) @pytest.mark.parametrize( "string, expects", [ ('1', (1, )), ("1.0", (1, 0)), ("1.5", (1, 5)), ("1.5.1", (1, 5, 1)), ("1.5.1.2.3.4.5", (1, 5, 1, 2, 3, 4, 5)), ("15", (15, )), ], ) def test_iter_string(string, expects): assert tuple(_iter_string(string)) == expects @pytest.mark.parametrize( "float_, expects", [ (1, (1, )), (1.0, (1, 0)), (1.5, (1, 5)), (15, (15, )), ('1', (1, )), ("1.0", (1, 0)), ("1.5", (1, 5)), ("1.5.1", (1, 5, 1)), ("1.5.1.2.3.4.5", (1, 5, 1, 2, 3, 4, 5)), ("15", (15, )), ], ) def test_iter_float(float_, expects): assert tuple(_iter_float(float_)) == expects @pytest.mark.parametrize( "other, expects", [ (1, (1, )), (1.0, (1, 0)), (1.5, (1, 5)), (15, (15, )), ('1', (1, )), ("1.0", (1, 0)), ("1.5", (1, 5)), ("1.5.1", (1, 5, 1)), ("1.5.1.2.3.4.5", (1, 5, 1, 2, 3, 4, 5)), ("15", (15, )), (('1', ), (1, )), (('1', '0'), (1, 0)), (('1', '5'), (1, 5)), (('1', '5', '1'), (1, 5, 1)), (('1', '5', '1', '2', '3', '4', '5'), (1, 5, 1, 2, 3, 4, 5)), (("15", ), (15, )), ([ '1', ], (1, )), (['1', '0'], (1, 0)), (['1', '5'], (1, 5)), (['1', '5', '1'], (1, 5, 1)), (['1', '5', '1', '2', '3', '4', '5'], (1, 5, 1, 2, 3, 4, 5)), ([ "15", ], (15, )), ((1, ), (1, )), ((1, 0), (1, 0)), ((1, 5), (1, 5)), ((1, 5, 1), (1, 5, 1)), ((1, 5, 1, 2, 3, 4, 5), (1, 5, 1, 2, 3, 4, 5)), ((15, ), (15, )), ([ 1, ], (1, )), ([1, 0], (1, 0)), ([1, 5], (1, 5)), ([1, 5, 1], (1, 5, 1)), ([1, 5, 1, 2, 3, 4, 5], (1, 5, 1, 2, 3, 4, 5)), ([ 15, ], (15, )), (Version(1, 2, 3), (1, 2, 3)), (Version(1, 2), (1, 2, 0)), (Version(1), (1, 0, 0)), ], ) def test_prep_for_eq(other, expects): assert _prep_for_eq(other) == expects @pytest.mark.parametrize( "float_, expects", [ (1, Version(1)), (1.0, Version(1)), (1.1, Version(1, 1)), (1.5, Version(1, 5)), (2.0, Version(2)), ], ) def test_from_float(float_, expects): assert Version.from_float(float_) == expects @pytest.mark.parametrize( "string, expects", [ ('1', Version(1)), ("1.0", Version(1)), ("1.1", Version(1, 1)), ("1.5", Version(1, 5)), ("2.0", Version(2)), ("1.0.0", Version(1)), ("1.0.1", Version(1, 0, 1)), ("1.1.5", Version(1, 1, 5)), ("1.5.2", Version(1, 5, 2)), ], ) def test_from_str(string, expects): assert Version.from_str(string) == expects @pytest.mark.parametrize( "tuple_, expects", [ (('1', ), Version(1)), (('1', '0'), Version(1, 0)), (('1', '5'), Version(1, 5)), (('1', '5', '1'), Version(1, 5, 1)), (("15", ), Version(15)), ([ '1', ], Version(1)), (['1', '0'], Version(1, 0)), (['1', '5'], Version(1, 5)), (['1', '5', '1'], Version(1, 5, 1)), ([ "15", ], Version(15)), ((1, ), Version(1)), ((1, 0), Version(1, 0)), ((1, 5), Version(1, 5)), ((1, 5, 1), Version(1, 5, 1)), ((15, ), Version(15)), ([ 1, ], Version(1)), ([1, 0], Version(1, 0)), ([1, 5], Version(1, 5)), ([1, 5, 1], Version(1, 5, 1)), ([ 15, ], Version(15)), ], ) def test_from_tuple(tuple_, expects): assert Version.from_tuple(tuple_) == expects def test_too_many_values(): with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 5 were given"): Version.from_str("1.2.3.4") with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 5 were given"): Version(1, 2, 3, 4) # type: ignore with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 5 were given"): Version('1', '2', '3', '4') # type: ignore # with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 8 were given"): # Version.from_tuple(("1", "5", "1", "2", "3", "4", "5")) # type: ignore # with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 8 were given"): # Version.from_tuple(["1", "5", "1", "2", "3", "4", "5"]) # type: ignore # with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 8 were given"): # Version.from_tuple((1, 5, 1, 2, 3, 4, 5)) # type: ignore # with pytest.raises(TypeError, match=".* takes from 1 to 4 positional arguments but 8 were given"): # Version.from_tuple([1, 5, 1, 2, 3, 4, 5]) # type: ignore @pytest.mark.parametrize( "value, version", [ (('1', ), Version(1)), (('1', '0'), Version(1, 0)), (('1', '5'), Version(1, 5)), (('1', '5', '1'), Version(1, 5, 1)), (("15", ), Version(15)), ([ '1', ], Version(1)), (['1', '0'], Version(1, 0)), (['1', '5'], Version(1, 5)), (['1', '5', '1'], Version(1, 5, 1)), ([ "15", ], Version(15)), ((1, ), Version(1)), ((1, 0), Version(1, 0)), ((1, 5), Version(1, 5)), ((1, 5, 1), Version(1, 5, 1)), ((15, ), Version(15)), ([ 1, ], Version(1)), ([1, 0], Version(1, 0)), ([1, 5], Version(1, 5)), ([1, 5, 1], Version(1, 5, 1)), ([ 15, ], Version(15)), ('1', Version(1)), ("1.0", Version(1, 0)), ("1.5", Version(1, 5)), ("1.5.1", Version(1, 5, 1)), ("1.5.1.2.3.4.5", Version(1, 5, 1)), ('1', Version(1, 2)), ("15", Version(15)), (1, Version(1)), (1.0, Version(1, 0)), (1.5, Version(1, 5)), (15, Version(15)), ], ) def test_equals(value, version): assert version == value assert value == version @pytest.mark.parametrize( "value, version", [ (1.2, Version(1.1)), (1.2, Version(1)), (1.2, Version(0, 9)), ("1.2", Version(1.1)), ("1.2", Version(1)), ("1.2", Version(0, 9)), ("1.1.1", Version(1.1)), ("0.9.1", Version(0, 9)), ((1, 2), Version(1.1)), ((1, 2), Version(1)), ((1, 2), Version(0, 9)), ((1, 1, 1), Version(1.1)), ((0, 9, 1), Version(0, 9)), ([1, 2], Version(1.1)), ([1, 1, 1], Version(1.1)), ([0, 9, 1], Version(0, 9)), ], ) def test_lt(value: Any, version: Version): assert value > version assert version < value @pytest.mark.parametrize( "value, version", [ (1.0, Version(1.1)), (0.9, Version(1)), (0.9, Version(0, 9)), ("1.0", Version(1.1)), ("1.0", Version(1)), ("0.8", Version(0, 9)), ("0.9.9", Version(1.1)), ((1, ), Version(1.1)), ((0, 9), Version(1)), ((0, 8, 8), Version(0, 9)), ([ 1, ], Version(1.1)), ([0, 9], Version(1)), ([0, 8, 8], Version(0, 9)), ], ) def test_gt(value, version): assert value < version assert version > value @pytest.mark.parametrize( "value, version", [ (1.2, Version(1.1)), (1.2, Version(1)), (1.2, Version(0, 9)), ("1.2", Version(1.1)), ("1.2", Version(1)), ("1.2", Version(0, 9)), ("1.1.1", Version(1.1)), ("0.9.1", Version(0, 9)), ((1, 2), Version(1.1)), ((1, 2), Version(1)), ((1, 2), Version(0, 9)), ((1, 1, 1), Version(1.1)), ((0, 9, 1), Version(0, 9)), ([1, 2], Version(1.1)), ([1, 1, 1], Version(1.1)), ([0, 9, 1], Version(0, 9)), (('1', ), Version(1)), (('1', '0'), Version(1, 0)), (('1', '5'), Version(1, 5)), (('1', '5', '1'), Version(1, 5, 1)), (("15", ), Version(15)), ([ '1', ], Version(1)), (['1', '0'], Version(1, 0)), (['1', '5'], Version(1, 5)), (['1', '5', '1'], Version(1, 5, 1)), ([ "15", ], Version(15)), ((1, ), Version(1)), ((1, 0), Version(1, 0)), ((1, 5), Version(1, 5)), ((1, 5, 1), Version(1, 5, 1)), ((15, ), Version(15)), ([ 1, ], Version(1)), ([1, 0], Version(1, 0)), ([1, 5], Version(1, 5)), ([1, 5, 1], Version(1, 5, 1)), ([ 15, ], Version(15)), ('1', Version(1)), ("1.0", Version(1, 0)), ("1.5", Version(1, 5)), ("1.5.1", Version(1, 5, 1)), ('1', Version(1, 2)), ("15", Version(15)), (1, Version(1)), (1.0, Version(1, 0)), (1.5, Version(1, 5)), (15, Version(15)), ("1.5.1.2.3.4.5", Version(1, 5, 1)), ], ) def test_le(value, version): assert value >= version assert version <= value @pytest.mark.parametrize( "value, version", [ (1.0, Version(1.1)), (0.9, Version(1)), (0.9, Version(0, 9)), ("1.0", Version(1.1)), ("1.0", Version(1)), ("0.8", Version(0, 9)), ("0.9.9", Version(1.1)), ((1, ), Version(1.1)), ((0, 9), Version(1)), ((0, 8, 8), Version(0, 9)), ([ 1, ], Version(1.1)), ([0, 9], Version(1)), ([0, 8, 8], Version(0, 9)), (('1', ), Version(1)), (('1', '0'), Version(1, 0)), (('1', '5'), Version(1, 5)), (('1', '5', '1'), Version(1, 5, 1)), (("15", ), Version(15)), ([ '1', ], Version(1)), (['1', '0'], Version(1, 0)), (['1', '5'], Version(1, 5)), (['1', '5', '1'], Version(1, 5, 1)), ([ "15", ], Version(15)), ((1, ), Version(1)), ((1, 0), Version(1, 0)), ((1, 5), Version(1, 5)), ((1, 5, 1), Version(1, 5, 1)), ((15, ), Version(15)), ([ 1, ], Version(1)), ([1, 0], Version(1, 0)), ([1, 5], Version(1, 5)), ([1, 5, 1], Version(1, 5, 1)), ([ 15, ], Version(15)), ('1', Version(1)), ("1.0", Version(1, 0)), ("1.5", Version(1, 5)), ("1.5.1", Version(1, 5, 1)), ('1', Version(1, 2)), ("15", Version(15)), (1, Version(1)), (1.0, Version(1, 0)), (1.5, Version(1, 5)), (15, Version(15)), ("1.5.0.2.3.4.5", Version(1, 5, 1)), ], ) def test_ge(value, version): assert value <= version assert version >= value @pytest.mark.parametrize( "version, expects", [ (Version(1), "Version(major=1, minor=0, patch=0)"), (Version(2), "Version(major=2, minor=0, patch=0)"), (Version(2, patch=3), "Version(major=2, minor=0, patch=3)"), (Version(2, 3), "Version(major=2, minor=3, patch=0)"), (Version(2, 3, 4), "Version(major=2, minor=3, patch=4)"), (Version(minor=3, patch=4), "Version(major=0, minor=3, patch=4)"), ], ) def test_repr(version, expects): assert repr(version) == expects @pytest.mark.parametrize( "version, expects", [ (Version(1), "v1.0.0"), (Version(2), "v2.0.0"), (Version(2, patch=3), "v2.0.3"), (Version(2, 3), "v2.3.0"), (Version(2, 3, 4), "v2.3.4"), (Version(minor=3, patch=4), "v0.3.4"), ], ) def test_str(version, expects): assert str(version) == expects @pytest.mark.parametrize( "version, expects", [ (Version(1), 1.0), (Version(2), 2.0), (Version(2, patch=3), 2.0), (Version(2, 3), 2.3), (Version(2, 3, 4), 2.3), (Version(minor=3, patch=4), 0.3), ], ) def test_float(version, expects): assert float(version) == expects @pytest.mark.parametrize( "version, expects", [ (Version(1), 1), (Version(2), 2), (Version(2, patch=3), 2), (Version(2, 3), 2), (Version(2, 3, 4), 2), (Version(minor=3, patch=4), 0), ], ) def test_int(version, expects): assert int(version) == expects @pytest.mark.parametrize( "obj", [ Version(1), Version(2), Version(2, patch=3), Version(2, 3), Version(2, 3, 4), Version(minor=3, patch=4), Version(1, 2, 3), ], ) def test_pickle(obj): assert pickle.loads(pickle.dumps(obj)) == obj # nosec: B301 @pytest.mark.parametrize( "obj", [ Version(1), Version(2), Version(2, patch=3), Version(2, 3), Version(2, 3, 4), Version(minor=3, patch=4), Version(1, 2, 3), ], ) def test_copy(obj): assert copy.copy(obj) == obj domdf_python_tools-3.10.0/tests/test_words.py000066400000000000000000000203771475315453000214360ustar00rootroot00000000000000# stdlib import decimal import pathlib import random import string from typing import List # 3rd party import pytest # this package from domdf_python_tools import words from domdf_python_tools.paths import PathPlus from domdf_python_tools.stringlist import StringList from domdf_python_tools.words import ( DOUBLESTRUCK_LETTERS, Plural, PluralPhrase, alpha_sort, get_random_word, get_words_list, truncate_string ) @pytest.mark.parametrize( "seed, expected_values", [ (1, ["element", "rebound", "stop", "underground", "buyer"]), (5, ["vacations", "tail", "maui", "features", "bundle"]), (100, ["stats", "seafood", "creativity", "outdoors", "sacred"]), (250, ["arranged", "volumes", "korea", "basename", "islands"]), (500, ["tuning", "knowledgestorm", "backing", "motivation", "calculated"]), ], ) def test_get_random_word(seed: int, expected_values: List[str]): random.seed(seed) for i in range(5): assert get_random_word() == expected_values.pop(0) def test_get_words_list(): assert isinstance(get_words_list(), list) assert isinstance(get_words_list()[0], str) assert isinstance(get_words_list(3), list) assert isinstance(get_words_list(3)[0], str) assert isinstance(get_words_list(17), list) assert isinstance(get_words_list(17)[0], str) assert isinstance(get_words_list(17000), list) assert get_words_list(17000) == [] assert isinstance(get_words_list(min_length=3), list) assert isinstance(get_words_list(min_length=3)[0], str) assert isinstance(get_words_list(min_length=17), list) assert isinstance(get_words_list(min_length=17)[0], str) assert isinstance(get_words_list(min_length=17000), list) assert get_words_list(min_length=17000) == [] assert isinstance(get_words_list(max_length=3), list) assert isinstance(get_words_list(max_length=3)[0], str) assert isinstance(get_words_list(max_length=17), list) assert isinstance(get_words_list(max_length=17)[0], str) assert isinstance(get_words_list(max_length=17000), list) assert isinstance(get_words_list(max_length=17000)[0], str) assert isinstance(get_words_list(min_length=3, max_length=17), list) assert isinstance(get_words_list(min_length=3, max_length=17)[0], str) assert isinstance(get_words_list(min_length=3, max_length=17000), list) assert isinstance(get_words_list(min_length=3, max_length=17000)[0], str) def test_font(): assert DOUBLESTRUCK_LETTERS("Hello World") == "â„ð•–ð•ð•ð•  ð•Žð• ð•£ð•ð••" assert DOUBLESTRUCK_LETTERS['A'] == 'ð”¸' assert DOUBLESTRUCK_LETTERS.get('A') == 'ð”¸' assert DOUBLESTRUCK_LETTERS['-'] == '-' assert DOUBLESTRUCK_LETTERS.get('-') == '-' assert DOUBLESTRUCK_LETTERS.get('-', "Default") == "Default" def test_alpha_sort(): alphabet = f"_{string.ascii_uppercase}{string.ascii_lowercase}0123456789" assert alpha_sort(["_hello", "apple", "world"], alphabet) == ["_hello", "apple", "world"] assert alpha_sort(["apple", "_hello", "world"], alphabet) == ["_hello", "apple", "world"] assert alpha_sort(["apple", "_hello", "world"], alphabet, reverse=True) == ["world", "apple", "_hello"] with pytest.raises(ValueError, match="The character '☃' was not found in the alphabet."): alpha_sort(["apple", "_hello", "world", '☃'], alphabet) assert alpha_sort(["apple", "_hello", "world", '☃'], alphabet + '☃') == ["_hello", "apple", "world", '☃'] @pytest.mark.parametrize( "value, expects", [ (12345, "12345"), (123.45, "123.45"), ([123.45], "[123.45]"), ({123.45}, "{123.45}"), ((123.45, ), "(123.45,)"), (None, ''), (pathlib.Path('.'), '.'), (PathPlus('.'), '.'), (StringList(["Hello", "World"]), "Hello\nWorld"), (decimal.Decimal("1234"), "1234"), ], ) def test_as_text(value, expects): assert words.as_text(value) == expects @pytest.mark.parametrize( "args, kwargs, expects", [ (([], ), {}, ''), (((), ), {}, ''), ((["bob"], ), {}, "bob"), ((["bob", "alice"], ), {}, "bob and alice"), ((["bob", "alice", "fred"], ), {}, "bob, alice and fred"), ((("bob", ), ), {}, "bob"), ((("bob", "alice"), ), {}, "bob and alice"), ((("bob", "alice", "fred"), ), {}, "bob, alice and fred"), ((("bob", ), ), {"delimiter": ';'}, "bob"), ((("bob", "alice"), ), {"delimiter": ';'}, "bob and alice"), ((("bob", "alice", "fred"), ), {"delimiter": ';'}, "bob; alice and fred"), ((["bob"], ), {"use_repr": True}, "'bob'"), ((["bob", "alice"], ), {"use_repr": True}, "'bob' and 'alice'"), ((["bob", "alice", "fred"], ), {"use_repr": True}, "'bob', 'alice' and 'fred'"), ((("bob", ), ), {"use_repr": True}, "'bob'"), ((("bob", "alice"), ), {"use_repr": True}, "'bob' and 'alice'"), ((("bob", "alice", "fred"), ), {"use_repr": True}, "'bob', 'alice' and 'fred'"), ((["bob"], ), {"use_repr": True, "oxford": True}, "'bob'"), ((["bob", "alice"], ), {"use_repr": True, "oxford": True}, "'bob' and 'alice'"), ((["bob", "alice", "fred"], ), {"use_repr": True, "oxford": True}, "'bob', 'alice', and 'fred'"), ((["bob", "alice", "fred"], ), {"use_repr": True, "oxford": True, "delimiter": ';'}, "'bob'; 'alice'; and 'fred'"), ((["bob", "alice", "fred"], ), {"use_repr": True, "oxford": True, "connective": "or"}, "'bob', 'alice', or 'fred'"), ((["bob", "alice"], ), {"connective": "or"}, "bob or alice"), ((("bob", ), ), {"use_repr": True, "oxford": True}, "'bob'"), ((("bob", "alice"), ), {"use_repr": True, "oxford": True}, "'bob' and 'alice'"), ((("bob", "alice", "fred"), ), {"use_repr": True, "oxford": True}, "'bob', 'alice', and 'fred'"), ], ) def test_word_join(args, kwargs, expects): assert words.word_join(*args, **kwargs) == expects def test_plural(): cow = Plural("cow", "cows") glass = Plural("glass", "glasses") n = 1 assert f"The farmer has {n} {cow(n)}." == "The farmer has 1 cow." assert f"The bar has {n} {glass(n)}." == "The bar has 1 glass." n = 2 assert f"The farmer has {n} {cow(n)}." == "The farmer has 2 cows." assert f"The bar has {n} {glass(n)}." == "The bar has 2 glasses." n = 3 assert f"The farmer has {n} {cow(n)}." == "The farmer has 3 cows." assert f"The bar has {n} {glass(n)}." == "The bar has 3 glasses." assert repr(cow) == "Plural('cow', 'cows')" assert repr(glass) == "Plural('glass', 'glasses')" def test_pluralphrase(): phrase1 = PluralPhrase("The proposed {} {} to ...", (Plural("change", "changes"), Plural("is", "are"))) phrase2 = PluralPhrase("The farmer has {n} {0}.", (Plural("cow", "cows"), )) phrase3 = PluralPhrase("The proposed {1} {0} to ...", (Plural("is", "are"), Plural("change", "changes"))) phrase4 = PluralPhrase( "The farmer has {n} {0}. The {0} {1} brown.", (Plural("cow", "cows"), Plural("is", "are")) ) n = 1 assert phrase1(n) == "The proposed change is to ..." assert phrase2(n) == "The farmer has 1 cow." assert phrase3(n) == "The proposed change is to ..." assert phrase4(n) == "The farmer has 1 cow. The cow is brown." n = 2 assert phrase1(n) == "The proposed changes are to ..." assert phrase2(n) == "The farmer has 2 cows." assert phrase3(n) == "The proposed changes are to ..." assert phrase4(n) == "The farmer has 2 cows. The cows are brown." n = 3 assert phrase1(n) == "The proposed changes are to ..." assert phrase2(n) == "The farmer has 3 cows." assert phrase3(n) == "The proposed changes are to ..." assert phrase4(n) == "The farmer has 3 cows. The cows are brown." phrase1_repr = "PluralPhrase(template='The proposed {} {} to ...', words=(Plural('change', 'changes'), Plural('is', 'are')))" assert repr(phrase1) == phrase1_repr assert repr(phrase2) == "PluralPhrase(template='The farmer has {n} {0}.', words=(Plural('cow', 'cows'),))" phrase3_repr = "PluralPhrase(template='The proposed {1} {0} to ...', words=(Plural('is', 'are'), Plural('change', 'changes')))" assert repr(phrase3) == phrase3_repr phrase4_repr = "PluralPhrase(template='The farmer has {n} {0}. The {0} {1} brown.', words=(Plural('cow', 'cows'), Plural('is', 'are')))" assert repr(phrase4) == phrase4_repr def test_truncate(): message = "hello world this is a very long sentance with no point" assert truncate_string(message, 20) == "hello world this ..." assert truncate_string(message, 30) == "hello world this is a very ..." assert truncate_string(message, 30, '…') == "hello world this is a very lo…" assert truncate_string(message, 200, '…') == message domdf_python_tools-3.10.0/tox.ini000066400000000000000000000214231475315453000170310ustar00rootroot00000000000000# This file is managed by 'repo_helper'. # You may add new sections, but any changes made to the following sections will be lost: # * tox # * envlists # * testenv:.package # * testenv:py313-dev # * testenv:py313 # * testenv:py312-dev # * testenv:py312 # * testenv:docs # * testenv:build # * testenv:lint # * testenv:perflint # * testenv:mypy # * testenv:pyup # * testenv:coverage # * flake8 # * coverage:run # * coverage:report # * check-wheel-contents # * pytest [tox] envlist = py36 py37 py38 py39 py310 py311 py312 py313-dev pypy36 pypy37 pypy38 pypy39 pypy310 mypy build skip_missing_interpreters = True isolated_build = True requires = pip>=21,!=22.2 tox-envlist>=0.2.1 tox~=3.0 virtualenv!=20.16.0 [envlists] test = py36 py37 py38 py39 py310 py311 py312 py313-dev pypy36 pypy37 pypy38 pypy39 pypy310 qa = mypy, lint cov = py38, coverage [testenv:.package] setenv = PYTHONDEVMODE=1 PIP_DISABLE_PIP_VERSION_CHECK=1 [testenv:py313-dev] download = True setenv = PYTHONDEVMODE=1 PIP_DISABLE_PIP_VERSION_CHECK=1 UNSAFE_PYO3_SKIP_VERSION_CHECK=1 [testenv:py312] download = True setenv = PYTHONDEVMODE=1 PIP_DISABLE_PIP_VERSION_CHECK=1 [testenv:docs] setenv = SHOW_TODOS = 1 passenv = SPHINX_BUILDER basepython = python3.8 changedir = {toxinidir}/doc-source extras = all deps = -r{toxinidir}/doc-source/requirements.txt commands = sphinx-build -M {env:SPHINX_BUILDER:html} . ./build {posargs} [testenv:build] setenv = PYTHONDEVMODE=1 PIP_DISABLE_PIP_VERSION_CHECK=1 PIP_PREFER_BINARY=1 UNSAFE_PYO3_SKIP_VERSION_CHECK=1 skip_install = True changedir = {toxinidir} deps = build[virtualenv]>=0.3.1 check-wheel-contents>=0.1.0 twine>=3.2.0 cryptography<40; implementation_name == "pypy" and python_version <= "3.7" commands = python -m build --sdist --wheel "{toxinidir}" twine check dist/*.tar.gz dist/*.whl check-wheel-contents dist/ [testenv:lint] basepython = python3.8 changedir = {toxinidir} ignore_errors = True skip_install = False deps = flake8>=3.8.2,<5 flake8-2020>=1.6.0 flake8-builtins>=1.5.3 flake8-docstrings>=1.5.0 flake8-dunder-all>=0.1.1 flake8-encodings>=0.1.0 flake8-github-actions>=0.1.0 flake8-noqa>=1.1.0,<=1.2.2 flake8-pyi>=20.10.0,<=22.8.0 flake8-pytest-style>=1.3.0,<2 flake8-quotes>=3.3.0 flake8-slots>=0.1.0 flake8-sphinx-links>=0.0.4 flake8-strftime>=0.1.1 flake8-typing-imports>=1.10.0 git+https://github.com/domdfcoding/flake8-rst-docstrings-sphinx.git git+https://github.com/domdfcoding/flake8-rst-docstrings.git git+https://github.com/python-formate/flake8-unused-arguments.git@magic-methods git+https://github.com/python-formate/flake8-missing-annotations.git pydocstyle>=6.0.0 pygments>=2.7.1 importlib_metadata<4.5.0; python_version<'3.8' commands = python3 -m flake8_rst_docstrings_sphinx domdf_python_tools tests --allow-toolbox {posargs} [testenv:perflint] basepython = python3.8 changedir = {toxinidir} ignore_errors = True skip_install = True deps = perflint commands = python3 -m perflint domdf_python_tools {posargs} [testenv:mypy] basepython = python3.8 ignore_errors = True changedir = {toxinidir} extras = all deps = mypy==0.971 -r{toxinidir}/tests/requirements.txt -r{toxinidir}/stubs.txt pprint36 commands = mypy domdf_python_tools tests {posargs} [testenv:pyup] basepython = python3.8 skip_install = True ignore_errors = True changedir = {toxinidir} deps = pyupgrade-directories extras = all commands = pyup_dirs domdf_python_tools tests --py36-plus --recursive [testenv:coverage] basepython = python3.8 skip_install = True ignore_errors = True whitelist_externals = /bin/bash passenv = COV_PYTHON_VERSION COV_PLATFORM COV_PYTHON_IMPLEMENTATION * changedir = {toxinidir} deps = coverage>=5 coverage_pyver_pragma>=0.2.1 commands = /bin/bash -c "rm -rf htmlcov" coverage html /bin/bash -c "DISPLAY=:0 firefox 'htmlcov/index.html'" [flake8] max-line-length = 120 select = E111 E112 E113 E121 E122 E125 E127 E128 E129 E131 E133 E201 E202 E203 E211 E222 E223 E224 E225 E225 E226 E227 E228 E231 E241 E242 E251 E261 E262 E265 E271 E272 E303 E304 E306 E402 E502 E703 E711 E712 E713 E714 E721 W291 W292 W293 W391 W504 YTT101 YTT102 YTT103 YTT201 YTT202 YTT203 YTT204 YTT301 YTT302 YTT303 STRFTIME001 STRFTIME002 SXL001 PT001 PT002 PT003 PT006 PT007 PT008 PT009 PT010 PT011 PT012 PT013 PT014 PT015 PT016 PT017 PT018 PT019 PT020 PT021 RST201 RST202 RST203 RST204 RST205 RST206 RST207 RST208 RST210 RST211 RST212 RST213 RST214 RST215 RST216 RST217 RST218 RST219 RST299 RST301 RST302 RST303 RST304 RST305 RST306 RST399 RST401 RST499 RST900 RST901 RST902 RST903 Q001 Q002 Q003 A001 A002 TYP001 TYP002 TYP003 TYP004 TYP005 TYP006 ENC001 ENC002 ENC003 ENC004 ENC011 ENC012 ENC021 ENC022 ENC023 ENC024 ENC025 ENC026 Y001,Y002 Y003 Y004 Y005 Y006 Y007 Y008 Y009 Y010 Y011 Y012 Y013 Y014 Y015 Y090 Y091 NQA001 NQA002 NQA003 NQA004 NQA005 NQA102 NQA103 E301 E302 E305 D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 SLOT000 SLOT001 SLOT002 extend-exclude = doc-source,old,build,dist,__pkginfo__.py,setup.py,venv rst-directives = TODO autosummary-widths autovariable bold-title envvar extras-require license license-info rst-roles = bold-title choosealicense inline-code manpage per-file-ignores = tests/*: D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 SLOT000 SLOT001 SLOT002 */*.pyi: E301 E302 E305 D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 SLOT000 SLOT001 SLOT002 tests/list_tests.py: PT011 D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 tests/seq_tests.py: PT011 D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 tests/test_paths_stdlib.py: PT011 D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 tests/test_pretty_print.py: PT011 D100 D101 D102 D103 D104 D106 D201 D204 D207 D208 D209 D210 D211 D212 D213 D214 D215 D300 D301 D400 D402 D403 D404 D415 D417 DALL000 pytest-parametrize-names-type = csv inline-quotes = " multiline-quotes = """ docstring-quotes = """ count = True min_python_version = 3.6 unused-arguments-ignore-abstract-functions = True unused-arguments-ignore-overload-functions = True unused-arguments-ignore-magic-methods = True unused-arguments-ignore-variadic-names = True [coverage:run] plugins = coverage_pyver_pragma [coverage:report] fail_under = 95 show_missing = True exclude_lines = raise AssertionError raise NotImplementedError if 0: if False: if TYPE_CHECKING if typing.TYPE_CHECKING if __name__ == .__main__.: [check-wheel-contents] ignore = W002 toplevel = domdf_python_tools package = domdf_python_tools [pytest] addopts = --color yes --durations 25 timeout = 300 filterwarnings = error ignore:can't resolve package from __spec__ or __package__, falling back on __name__ and __path__:ImportWarning ignore::DeprecationWarning:certifi always:ast.Str is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning always:Attribute s is deprecated and will be removed in Python 3.14; use value instead:DeprecationWarning always:ast.Num is deprecated and will be removed in Python 3.14; use ast.Constant instead:DeprecationWarning always:ast.NameConstant is deprecated and will be removed in Python 3.14; use ast.Constant:DeprecationWarning always:datetime.utcfromtimestamp\(\) is deprecated and scheduled for removal in a future version:DeprecationWarning:pytz.tzinfo always:the imp module is deprecated in favour of importlib:DeprecationWarning always:The distutils package is deprecated and slated for removal in Python 3.12:DeprecationWarning always:The distutils.sysconfig module is deprecated:DeprecationWarning [testenv] setenv = PYTHONDEVMODE=1 PIP_DISABLE_PIP_VERSION_CHECK=1 SETUPTOOLS_USE_DISTUTILS=stdlib deps = -r{toxinidir}/tests/requirements.txt extras = all ignore_errors = True commands = python --version python -m pip install pytz>=2019.1 python -m pytest --cov=domdf_python_tools -r aR tests/ {posargs} python -m pip uninstall pytz -y python -m pytest --cov=domdf_python_tools -r aR tests/test_dates.py --cov-fail-under=0 --cov-append {posargs}