pax_global_header00006660000000000000000000000064147007421050014512gustar00rootroot0000000000000052 comment=dd4caf439c205aa9c34494b301724f80dcc66fc3 python-tuf-5.1.0/000077500000000000000000000000001470074210500136325ustar00rootroot00000000000000python-tuf-5.1.0/.gitattributes000066400000000000000000000001531470074210500165240ustar00rootroot00000000000000# Test data should not be modified on checkout, regardless of host settings *.json binary *.py diff=python python-tuf-5.1.0/.github/000077500000000000000000000000001470074210500151725ustar00rootroot00000000000000python-tuf-5.1.0/.github/ISSUE_TEMPLATE.md000066400000000000000000000003351470074210500177000ustar00rootroot00000000000000Please fill in the fields below to submit an issue or feature request. The more information that is provided, the better. **Description of issue or feature request**: **Current behavior**: **Expected behavior**: python-tuf-5.1.0/.github/PULL_REQUEST_TEMPLATE.md000066400000000000000000000007001470074210500207700ustar00rootroot00000000000000 **Description of the changes being introduced by the pull request**: Fixes # python-tuf-5.1.0/.github/dependabot.yml000066400000000000000000000015331470074210500200240ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: "pip" directory: "/" schedule: interval: "weekly" open-pull-requests-limit: 10 groups: build-and-release-dependencies: # Python dependencies known to be critical to our build/release security patterns: - "build" - "hatchling" test-and-lint-dependencies: # Python dependencies that are only pinned to ensure test reproducibility patterns: - "coverage" - "mypy" - "ruff" - "tox" dependencies: # Python (developer) runtime dependencies. Also any new dependencies not # caught by earlier groups patterns: - "*" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "weekly" open-pull-requests-limit: 10 groups: action-dependencies: patterns: - "*" python-tuf-5.1.0/.github/scripts/000077500000000000000000000000001470074210500166615ustar00rootroot00000000000000python-tuf-5.1.0/.github/scripts/conformance-client.py000077500000000000000000000055571470074210500230200ustar00rootroot00000000000000#!/usr/bin/env python """Conformance client for python-tuf, part of tuf-conformance""" # Copyright 2024 tuf-conformance contributors # SPDX-License-Identifier: MIT OR Apache-2.0 import argparse import os import shutil import sys from tuf.ngclient import Updater def init(metadata_dir: str, trusted_root: str) -> None: """Initialize local trusted metadata""" # No need to actually run python-tuf code at this point shutil.copyfile(trusted_root, os.path.join(metadata_dir, "root.json")) print(f"python-tuf test client: Initialized repository in {metadata_dir}") def refresh(metadata_url: str, metadata_dir: str) -> None: """Refresh local metadata from remote""" updater = Updater( metadata_dir, metadata_url, ) updater.refresh() print(f"python-tuf test client: Refreshed metadata in {metadata_dir}") def download_target( metadata_url: str, metadata_dir: str, target_name: str, download_dir: str, target_base_url: str, ) -> None: """Download target.""" updater = Updater( metadata_dir, metadata_url, download_dir, target_base_url, ) target_info = updater.get_targetinfo(target_name) if not target_info: raise RuntimeError(f"{target_name} not found in repository") updater.download_target(target_info) def main() -> int: """Main TUF Client Example function""" parser = argparse.ArgumentParser(description="TUF Client Example") parser.add_argument("--metadata-url", required=False) parser.add_argument("--metadata-dir", required=True) parser.add_argument("--target-name", required=False) parser.add_argument("--target-dir", required=False) parser.add_argument("--target-base-url", required=False) sub_command = parser.add_subparsers(dest="sub_command") init_parser = sub_command.add_parser( "init", help="Initialize client with given trusted root", ) init_parser.add_argument("trusted_root") sub_command.add_parser( "refresh", help="Refresh the client metadata", ) sub_command.add_parser( "download", help="Downloads a target", ) command_args = parser.parse_args() # initialize the TUF Client Example infrastructure if command_args.sub_command == "init": init(command_args.metadata_dir, command_args.trusted_root) elif command_args.sub_command == "refresh": refresh( command_args.metadata_url, command_args.metadata_dir, ) elif command_args.sub_command == "download": download_target( command_args.metadata_url, command_args.metadata_dir, command_args.target_name, command_args.target_dir, command_args.target_base_url, ) else: parser.print_help() return 0 if __name__ == "__main__": sys.exit(main()) python-tuf-5.1.0/.github/workflows/000077500000000000000000000000001470074210500172275ustar00rootroot00000000000000python-tuf-5.1.0/.github/workflows/_test.yml000066400000000000000000000065201470074210500210730ustar00rootroot00000000000000on: workflow_call: # Permissions inherited from caller workflow permissions: {} jobs: lint-test: name: Lint Test runs-on: ubuntu-latest steps: - name: Checkout TUF uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Set up Python (oldest supported version) uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 with: python-version: 3.8 cache: 'pip' cache-dependency-path: | requirements/*.txt pyproject.toml - name: Install dependencies run: | python3 -m pip install --constraint requirements/build.txt tox coveralls - name: Run tox env: RUFF_OUTPUT_FORMAT: github run: tox -e lint tests: name: Tests needs: lint-test strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] os: [ubuntu-latest] include: - python-version: "3.12" os: macos-latest - python-version: "3.12" os: windows-latest runs-on: ${{ matrix.os }} steps: - name: Checkout TUF uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 with: python-version: ${{ matrix.python-version }} cache: 'pip' cache-dependency-path: | requirements/*.txt pyproject.toml - name: Install dependencies run: | python3 -m pip install --constraint requirements/build.txt tox coveralls - name: Run tox run: tox -e py - name: Publish on coveralls.io # A failure to publish coverage results on coveralls should not # be a reason for a job failure. continue-on-error: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} COVERALLS_FLAG_NAME: ${{ runner.os }} / Python ${{ matrix.python-version }} COVERALLS_PARALLEL: true # Use cp workaround to publish coverage reports with relative paths # FIXME: Consider refactoring the tests to not require the test # aggregation script being invoked from the `tests` directory, so # that `.coverage` is written to and .coveragrc can also reside in # the project root directory as is the convention. run: | cp tests/.coverage . coveralls --service=github --rcfile=tests/.coveragerc coveralls-fin: # Always run when all 'tests' jobs have finished even if they failed # TODO: Replace always() with a 'at least one job succeeded' expression if: always() needs: tests runs-on: ubuntu-latest steps: - name: Add requirements file to make setup-python happy run: touch requirements.txt - name: Set up Python uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 with: python-version: '3.x' cache: 'pip' - name: Install dependencies run: | python3 -m pip install coveralls - name: Finalize publishing on coveralls.io continue-on-error: true env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: coveralls --finish python-tuf-5.1.0/.github/workflows/_test_sslib_main.yml000066400000000000000000000014121470074210500232660ustar00rootroot00000000000000on: workflow_call: # Permissions inherited from caller workflow permissions: {} jobs: sslib-main: name: Test securesystemslib main branch (not a merge blocker) runs-on: ubuntu-latest steps: - name: Checkout TUF uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Set up Python uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 with: python-version: '3.x' cache: 'pip' cache-dependency-path: | requirements/*.txt pyproject.toml - name: Install dependencies run: | python3 -m pip install --constraint requirements/build.txt tox - name: Run tox run: tox -e with-sslib-main python-tuf-5.1.0/.github/workflows/cd.yml000066400000000000000000000071201470074210500203400ustar00rootroot00000000000000name: CD concurrency: cd on: push: tags: - v* permissions: {} jobs: test: uses: ./.github/workflows/_test.yml build: name: Build runs-on: ubuntu-latest needs: test steps: - name: Checkout release tag uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: ref: ${{ github.event.workflow_run.head_branch }} - name: Set up Python uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 with: python-version: '3.x' - name: Install build dependency run: python3 -m pip install --constraint requirements/build.txt build - name: Build binary wheel, source tarball and changelog run: | PIP_CONSTRAINT=requirements/build.txt python3 -m build --sdist --wheel --outdir dist/ . awk "/## $GITHUB_REF_NAME/{flag=1; next} /## v/{flag=0} flag" docs/CHANGELOG.md > changelog - name: Store build artifacts uses: actions/upload-artifact@50769540e7f4bd5e21e526ee35c689e35e0d6874 # v4.4.0 with: name: build-artifacts path: | dist changelog candidate_release: name: Release candidate on Github for review runs-on: ubuntu-latest needs: build permissions: contents: write # to modify GitHub releases outputs: release_id: ${{ steps.gh-release.outputs.result }} steps: - name: Fetch build artifacts uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: build-artifacts - id: gh-release name: Publish GitHub release draft uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: script: | fs = require('fs') res = await github.rest.repos.createRelease({ owner: context.repo.owner, repo: context.repo.repo, name: '${{ github.ref_name }}-rc', tag_name: '${{ github.ref }}', body: fs.readFileSync('changelog', 'utf8'), }); fs.readdirSync('dist/').forEach(file => { github.rest.repos.uploadReleaseAsset({ owner: context.repo.owner, repo: context.repo.repo, release_id: res.data.id, name: file, data: fs.readFileSync('dist/' + file), }); }); return res.data.id release: name: Release runs-on: ubuntu-latest needs: candidate_release environment: release permissions: contents: write # to modify GitHub releases id-token: write # to authenticate as Trusted Publisher to pypi.org steps: - name: Fetch build artifacts uses: actions/download-artifact@fa0a91b85d4f404e444e00e005971372dc801d16 # v4.1.8 with: name: build-artifacts - name: Publish binary wheel and source tarball on PyPI # Only attempt pypi upload in upstream repository if: github.repository == 'theupdateframework/python-tuf' uses: pypa/gh-action-pypi-publish@897895f1e160c830e369f9779632ebc134688e1b # v1.10.2 - name: Finalize GitHub release uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: script: | github.rest.repos.updateRelease({ owner: context.repo.owner, repo: context.repo.repo, release_id: '${{ needs.candidate_release.outputs.release_id }}', name: '${{ github.ref_name }}', }) python-tuf-5.1.0/.github/workflows/ci.yml000066400000000000000000000003551470074210500203500ustar00rootroot00000000000000name: CI on: push: branches: - develop pull_request: workflow_dispatch: permissions: {} jobs: test: uses: ./.github/workflows/_test.yml test-with-sslib-main: uses: ./.github/workflows/_test_sslib_main.yml python-tuf-5.1.0/.github/workflows/codeql-analysis.yml000066400000000000000000000015421470074210500230440ustar00rootroot00000000000000name: "CodeQL Analysis" on: push: branches: [ develop ] pull_request: branches: [ develop ] schedule: - cron: '30 0 * * 2' workflow_dispatch: permissions: {} jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: # NOTE: If you add security critical permissions, start pinning used actions actions: read contents: read security-events: write # for uploading to code-scanning dashboard steps: - name: Checkout repository uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Initialize CodeQL uses: github/codeql-action/init@v3 # unpinned since this is not security critical with: languages: 'python' - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 # unpinned since this is not security critical python-tuf-5.1.0/.github/workflows/conformance.yml000066400000000000000000000010111470074210500222350ustar00rootroot00000000000000on: push: branches: - develop pull_request: workflow_dispatch: permissions: contents: read name: Conformance test jobs: conformance: runs-on: ubuntu-latest steps: - name: Checkout conformance client uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: Run test suite uses: theupdateframework/tuf-conformance@f4acd16d0ea49a6fd5cc4558084b578c6fc7d6cd # v2.0.0 with: entrypoint: ".github/scripts/conformance-client.py" python-tuf-5.1.0/.github/workflows/dependency-review.yml000066400000000000000000000020411470074210500233640ustar00rootroot00000000000000# Dependency Review Action # # This Action will scan dependency manifest files that change as part of a Pull Reqest, surfacing known-vulnerable versions of the packages declared or updated in the PR. Once installed, if the workflow run is marked as required, PRs introducing known-vulnerable packages will be blocked from merging. # # Source repository: https://github.com/actions/dependency-review-action # Public documentation: https://docs.github.com/en/code-security/supply-chain-security/understanding-your-software-supply-chain/about-dependency-review#dependency-review-enforcement name: 'Dependency Review' on: [pull_request] permissions: # NOTE: If you add security critical permissions, start pinning used actions contents: read jobs: dependency-review: runs-on: ubuntu-latest steps: - name: 'Checkout Repository' uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: 'Dependency Review' uses: actions/dependency-review-action@v4 # unpinned since this is not security critical python-tuf-5.1.0/.github/workflows/maintainer-permissions-reminder.yml000066400000000000000000000052151470074210500262600ustar00rootroot00000000000000name: Maintainer review reminder on: schedule: - cron: '10 10 10 2 *' workflow_dispatch: permissions: issues: write jobs: file-reminder-issue: name: File issue to review maintainer permissions runs-on: ubuntu-latest steps: - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea # v7.0.1 with: script: | await github.rest.issues.create({ owner: context.repo.owner, repo: context.repo.repo, title: "Yearly maintainer permissions review", body: ` This is a checklist for evaluating python-tuf maintainer accounts and permissions. This issue is automatically opened once a year. ### Tasks 1. Update this list to include any new services 2. Evaluate the accounts and permissions for each service on the list. Some rules of thumb: * Critical services should have a minimum of 3 _active_ maintainers/admins to prevent project lockout * Each additional maintainer/admin increases the risk of project compromise: for this reason permissions should be removed if they are no longer used * For services that are not frequently used, each maintainer/admin should check that they really are still able to authenticate to the service and confirm this in the comments 3. Update MAINTAINERS.txt to reflect current permissions 4. (Bonus) Update significant contributors in README.md#acknowledgements ### Critical services * [ ] **PyPI**: maintainer list is visible to everyone at https://pypi.org/project/tuf/ * Only enough maintainers and org admins to prevent locking the project out * [ ] **GitHub**: release environment reviewers listed in https://github.com/theupdateframework/python-tuf/settings/environments * Maintainers who can approve releases to PyPI * [ ] **GitHub**: permissions visible to admins at https://github.com/theupdateframework/python-tuf/settings/access * "admin" permission: Only for maintainers and org admins who do project administration * "push/maintain" permission: Maintainers who actively approve and merge PRs (+admins) * "triage" permission: All contributors trusted to manage issues ### Other * [ ] **ReadTheDocs**: admin list is visible to everyone at https://readthedocs.org/projects/theupdateframework/ * [ ] **Coveralls**: everyone with github "admin" permissions is a Coveralls admin: https://coveralls.io/github/theupdateframework/python-tuf ` }) console.log("New issue created.") python-tuf-5.1.0/.github/workflows/scorecards.yml000066400000000000000000000024221470074210500221020ustar00rootroot00000000000000name: Scorecards analysis on: branch_protection_rule: schedule: - cron: '21 6 * * 1' push: branches: [ develop ] workflow_dispatch: permissions: {} jobs: analysis: name: Scorecards analysis runs-on: ubuntu-latest permissions: # NOTE: If you add security critical permissions, start pinning used actions security-events: write # for uploading to code-scanning dashboard id-token: write # for publishing results in scorecard public dataset actions: read contents: read steps: - name: "Checkout code" uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - name: "Run analysis" uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif # sarif format required by upload-sarif action results_format: sarif # "repo_token" not set because personal access tokens are dangerous. # This means Branch-Protection check will not have correct results. publish_results: true - name: "Upload to code-scanning dashboard" uses: github/codeql-action/upload-sarif@v3 # unpinned since this is not security critical with: sarif_file: results.sarif python-tuf-5.1.0/.github/workflows/specification-version-check.yml000066400000000000000000000023101470074210500253240ustar00rootroot00000000000000on: schedule: - cron: "0 13 * * *" workflow_dispatch: name: Specification version check permissions: {} jobs: # Get the version of the TUF specification the project states it supports get-supported-tuf-version: runs-on: ubuntu-latest outputs: version: ${{ steps.get-version.outputs.version }} steps: - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 with: python-version: "3.x" - id: get-version run: | python3 -m pip install -e . script="from tuf.api.metadata import SPECIFICATION_VERSION; \ print(f\"v{'.'.join(SPECIFICATION_VERSION)}\")" ver=$(python3 -c "$script") echo "version=$ver" >> $GITHUB_OUTPUT # Get the latest TUF specification release and open an issue (if needed) specification-bump-check: permissions: contents: read issues: write needs: get-supported-tuf-version uses: theupdateframework/specification/.github/workflows/check-latest-spec-version.yml@master with: tuf-version: ${{needs.get-supported-tuf-version.outputs.version}} python-tuf-5.1.0/.gitignore000066400000000000000000000006031470074210500156210ustar00rootroot00000000000000# root level directories dist/ build/ env/ # docs build directory docs/build/ # global file patterns *.log *.pyc *.session *.swo *.swp *.egg-info .coverage .tox/ tests/htmlcov/ .DS_Store .pybuild/ .python-version *~ *.tmp .pre-commit-config.yaml .vscode # Debian generated files debian/.debhelper/ debian/*-stamp debian/files debian/*.debhelper debian/*.substvars debian/python*-tuf/ python-tuf-5.1.0/.readthedocs.yaml000066400000000000000000000012661470074210500170660ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 # Without a build section we hit https://github.com/urllib3/urllib3/issues/2168 # (the default image has openssl < 1.1.1) build: os: "ubuntu-22.04" tools: python: "3.12" # Build documentation with Sphinx sphinx: builder: html configuration: docs/conf.py fail_on_warning: true # Optionally build your docs in additional formats such as PDF formats: [] # Optionally set the version of Python and requirements required to build your docs python: install: - requirements: requirements/docs.txt - method: pip path: . python-tuf-5.1.0/LICENSE000066400000000000000000000227361470074210500146510ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. python-tuf-5.1.0/LICENSE-MIT000066400000000000000000000020761470074210500152730ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2010 New York University Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. python-tuf-5.1.0/README.md000066400000000000000000000142111470074210500151100ustar00rootroot00000000000000# TUF A Framework for Securing Software Update Systems ![Build](https://github.com/theupdateframework/python-tuf/actions/workflows/ci.yml/badge.svg) [![Coveralls](https://coveralls.io/repos/theupdateframework/python-tuf/badge.svg?branch=develop)](https://coveralls.io/r/theupdateframework/python-tuf?branch=develop) [![Docs](https://readthedocs.org/projects/theupdateframework/badge/)](https://theupdateframework.readthedocs.io/) [![CII](https://bestpractices.coreinfrastructure.org/projects/1351/badge)](https://bestpractices.coreinfrastructure.org/projects/1351) [![PyPI](https://img.shields.io/pypi/v/tuf)](https://pypi.org/project/tuf/) [![OpenSSF Scorecard](https://api.scorecard.dev/projects/github.com/theupdateframework/python-tuf/badge)](https://scorecard.dev/viewer/?uri=github.com/theupdateframework/python-tuf) ---------------------------- [The Update Framework (TUF)](https://theupdateframework.io/) is a framework for secure content delivery and updates. It protects against various types of supply chain attacks and provides resilience to compromise. This repository is a **reference implementation** written in Python. It is intended to conform to version 1.0 of the [TUF specification](https://theupdateframework.github.io/specification/latest/). Python-TUF provides the following APIs: * [`tuf.api.metadata`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html), a "low-level" API, designed to provide easy and safe access to TUF metadata and to handle (de)serialization from/to files. * [`tuf.ngclient`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.html), a client implementation built on top of the metadata API. * `tuf.repository`, a repository library also built on top of the metadata API. This module is currently not considered part of python-tuf stable API. The reference implementation strives to be a readable guide and demonstration for those working on implementing TUF in their own languages, environments, or update systems. About The Update Framework -------------------------- The Update Framework (TUF) design helps developers maintain the security of a software update system, even against attackers that compromise the repository or signing keys. TUF provides a flexible [specification](https://github.com/theupdateframework/specification/blob/master/tuf-spec.md) defining functionality that developers can use in any software update system or re-implement to fit their needs. TUF is hosted by the [Linux Foundation](https://www.linuxfoundation.org/) as part of the [Cloud Native Computing Foundation](https://www.cncf.io/) (CNCF) and its design is [used in production](https://theupdateframework.io/adoptions/) by various tech companies and open source organizations. A variant of TUF called [Uptane](https://uptane.github.io/) is used to secure over-the-air updates in automobiles. Please see [TUF's website](https://theupdateframework.com/) for more information about TUF! Documentation ------------- * [Introduction to TUF's Design](https://theupdateframework.io/overview/) * [The TUF Specification](https://theupdateframework.github.io/specification/latest/) * [Developer documentation](https://theupdateframework.readthedocs.io/), including [API reference]( https://theupdateframework.readthedocs.io/en/latest/api/api-reference.html) and [instructions for contributors](https://theupdateframework.readthedocs.io/en/latest/CONTRIBUTING.html) * [Usage examples](https://github.com/theupdateframework/python-tuf/tree/develop/examples/) * [Governance](https://github.com/theupdateframework/python-tuf/blob/develop/docs/GOVERNANCE.md) and [Maintainers](https://github.com/theupdateframework/python-tuf/blob/develop/docs/MAINTAINERS.txt) for the reference implementation * [Miscellaneous Docs](https://github.com/theupdateframework/python-tuf/tree/develop/docs) * [Python-TUF development blog](https://theupdateframework.github.io/python-tuf/) Contact ------- Questions, feedback, and suggestions are welcomed on our low volume [mailing list](https://groups.google.com/forum/?fromgroups#!forum/theupdateframework) or the [#tuf](https://cloud-native.slack.com/archives/C8NMD3QJ3) channel on [CNCF Slack](https://slack.cncf.io/). We strive to make the specification easy to implement, so if you come across any inconsistencies or experience any difficulty, do let us know by sending an email, or by reporting an issue in the GitHub [specification repo](https://github.com/theupdateframework/specification/issues). Security Issues and Bugs ------------------------ See [SECURITY.md](docs/SECURITY.md) License ------- This work is [dual-licensed](https://en.wikipedia.org/wiki/Multi-licensing) and distributed under the (1) MIT License and (2) Apache License, Version 2.0. Please see [LICENSE-MIT](https://github.com/theupdateframework/python-tuf/blob/develop/LICENSE-MIT) and [LICENSE](https://github.com/theupdateframework/python-tuf/blob/develop/LICENSE). Acknowledgements ---------------- This project is hosted by the Linux Foundation under the Cloud Native Computing Foundation. TUF's early development was managed by members of the [Secure Systems Lab](https://ssl.engineering.nyu.edu/) at [New York University](https://engineering.nyu.edu/). We appreciate the efforts of all [maintainers and emeritus maintainers](https://github.com/theupdateframework/python-tuf/blob/develop/docs/MAINTAINERS.txt), as well as the contributors Konstantin Andrianov, Kairo de Araujo, Ivana Atanasova, Geremy Condra, Zane Fisher, Pankhuri Goyal, Justin Samuel, Tian Tian, Martin Vrachev and Yuyu Zheng who are among those who helped significantly with TUF's reference implementation. Maintainers and Contributors are governed by the [CNCF Community Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md). This material is based upon work supported by the National Science Foundation under Grant Nos. CNS-1345049 and CNS-0959138. Any opinions, findings, and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation. python-tuf-5.1.0/docs/000077500000000000000000000000001470074210500145625ustar00rootroot00000000000000python-tuf-5.1.0/docs/1.0.0-ANNOUNCEMENT.md000066400000000000000000000041411470074210500174300ustar00rootroot00000000000000# Announcing TUF 1.0.0 Python-TUF v1.0.0 is a rewritten stable reference implementation of the TUF specification, which *currently* includes: - a modern low-level [*metadata API*](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html) - a fully specification-compliant [*updater client*](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.html), serving as a more robust and yet more flexible stand-in replacement for the legacy client updater For the reasons outlined in [ADR 10](https://github.com/theupdateframework/python-tuf/blob/develop/docs/adr/0010-repository-library-design.md ), this release *does not yet* include *repository tool*-like functionality. However, the new *metadata API* makes it easy to replicate the desired functionality tailored to the specific needs of any given repository (see *Migration* for details). As discussed in [ADR 2](https://github.com/theupdateframework/python-tuf/blob/develop/docs/adr/0002-pre-1-0-deprecation-strategy.md), this release *does not* include any legacy code, as its maintenance has become infeasible for the python-tuf team. The pre-1.0.0 deprecation strategy from ADR 2 applies as follows: > *Bugs reported with tuf versions prior to 1.0.0 will likely not be addressed directly by tuf’s maintainers. Pull Requests to fix bugs in the last release prior to 1.0.0 will be considered, and merged (subject to normal review processes). Note that there may be delays due to the lack of developer resources for reviewing such pull requests.* ## Migration Given the clean cut with the legacy reference implementation, we provide the following migration support: - detailed code documentation on [https://theupdateframework.readthedocs.io](https://theupdateframework.readthedocs.io/) - verbose [code examples](https://github.com/theupdateframework/python-tuf/tree/develop/examples) for *client updater* usage, and repository-side operations based on the low-level *metadata API* - individual migration support upon [request](https://github.com/theupdateframework/python-tuf#contact) - targeted migration support initiative for known users python-tuf-5.1.0/docs/CHANGELOG.md000066400000000000000000000776351470074210500164150ustar00rootroot00000000000000# Changelog ## v5.1.0 ### Changed * ngclient: default user-agent was updated from "tuf/x.y.z" to "python-tuf/x.y.z" (#2632) * ngclient: max_root_rotations default value was bumped to 256 to prevent a too small value from creating issues in actual deployments were the embedded root is not easily updateable (#2675) * repository: do_snapshot() and do_timestamp() now always create new versions if current version is not correctly signed (#2650) * Various infrastructure and documentation improvements ## v5.0.0 This release, most notably, marks stable securesystemslib v1.0.0 as minimum requirement. The update causes a minor break in the new DSSE API (see below) and affects users who also directly depend on securesystemslib. See the [securesystemslib release notes](https://github.com/secure-systems-lab/securesystemslib/blob/main/CHANGELOG.md#securesystemslib-v100) and the updated python-tuf `examples` (#2617) for details. ngclient API remains backwards-compatible. ### Changed * DSSE API: change `SimpleEnvelope.signatures` type to `dict`, remove `SimpleEnvelope.signatures_dict` (#2617) * ngclient: support app-specific user-agents (#2612) * Various build, test and lint improvements ## v4.0.0 This release is a small API change for Metadata API users (see below). ngclient API is compatible but optional DSSE support has been added. ### Added * Added optional DSSE support to Metadata API and ngclient (#2436) ### Changed * Metadata API: Improved verification functionality for repository users (#2551): * This is an API change for Metadata API users ( `Root.get_verification_result()` and `Targets.get_verification_result()` specifically) * `Root.get_root_verification_result()` has been added to handle the special case of root verification * Started using UTC datetimes instead of naive datetimes internally (#2573) * Constrain securesystemslib dependency to <0.32.0 in preparation for future securesystemslib API changes * Various build, test and lint improvements ## v3.1.1 This is a security fix release to address advisory GHSA-77hh-43cm-v8j6. The issue does **not** affect tuf.ngclient users, but could affect tuf.api.metadata users. ### Changed * Added additional input validation to `tuf.api.metadata.Targets.get_delegated_role()` ## v3.1.0 ### Added * Metadata API: move verify_delegate() to Root/Targets (#2378) - *verify_delegate() on Metadata is now deprecated* * Metadata API: add get_verification_result() as verbose alternative for verify_delegate() (#2481) * Metadata API: add MetaFile.from_data() convenience factory (#2273) ### Changed * Metadata API: change Root.roles type hint to Dict (#2411) * Various minor improvements in tests (#2447, #2491), docs (#2390, #2392, #2474) and build (#2389, #2453, #2479, #2488) ### Removed * build: Python 3.7 support (#2460) ## v3.0.0 The notable change in this release is #2165: The tuf.api.metadata.Key class implementation was moved to Securesystemslib with minor API changes. These changes require no action in tuf.ngclient users but may require small changes in tuf.api.metadata using repository implementations that create keys. As a result of these changes, both signing and verification are now fully extensible, see Securesystemslib signer API for details. tuf.repository remains an unstable module in 3.0.0. ### Added * Build: Use pydocstyle to lint docstrings (#2283, #2281) * Examples: Add Repository uploader/signer tool example (#2241) * Metadata API: Add TargetFile.get_prefixed_paths() (#2166) * ngclient: Export TargetFile (#2279) * repository: Add strictly typed accessors and context managers (#2311) * Release: Use PyPI Trusted Publishing https://docs.pypi.org/trusted-publishers/ (#2371) ### Changed * Build: Various minor build and release infrastructure improvements, dependency updates * Metadata API: Key class is still part of the API but now comes from Securesystemslib (#2165): * `Key.verify_signature()` method signature has changed * `Key.from_securesystemslib_key()` was removed: Use Securesystemslibs `SSlibKey.from_securesystemslib_key()` instead ## v2.1.0 ### Added * repo: experimental repository module and example (#2193) * ngclient: expose default requests fetcher (#2277) * workflow: OpenSSF scorecard (#2190) * build: Python 3.11 support (#2157) * docs: security policy (#2098, #2178) * blog: signer API (#2276) * blog: security audit (#2155, #2156) ### Changed * Metadata API: bump specification version 1.0.31 (#2119) * Metadata API: allow zero length metadata files (#2137) * Metadata API: add default value for MetaFile version (#2211) * Metadata API, ngclient: decrease logger verbosity (#2243) * ngclient: define API explicitly (#2233) * ngclient: improve example client output (#2194) * ngclient: support URLs without host part (#2075) * ngclient: update metaclass syntax (#2215) * ngclient: fail gracefully on missing role (#2197) * ngclient: improve type annotations in TrustedMetadataSet (#2250) * doc: misc improvements (2097, #2130, #2183, #2185, #2201, #2208, #2230, #2278) * build: misc improvements (#2090, #2091, #2122, #2187, #2188, #2217, #2252) * workflow: misc improvements (#2001, #2092, #2147, #2159, #2173) ## v2.0.0 This release, most notably, adds support for [TAP 15] - succinct hash bin delegation, which results in a few backwards-incompatible changes in the Metadata API. **NOTE**: While TAP 15 has been accepted it is not yet part of the TUF specification. Therefore, adopters should be prepared for potential changes to the implementation in future and for a lack of support for TAP 15 in other TUF implementations. [TAP 15]: https://github.com/theupdateframework/taps/blob/master/tap15.md ### Added * Metadata API: TAP 15 - succinct hash bin delegation (#2010, #2031, #2038, #2039) * build: CodeQL analysis action (#1932) * build: Dependency review action (#1974) * blog: ngclient design (#1914) * blog: tricky test cases (#1941, #2027) ### Changed * Metadata API: **BREAKING CHANGES** in Root and Targets class (#2010) - Argument order changed in add_key() and remove_key() - remove_key() renamed to revoke_key() * Metadata API: Update supported spec version to 1.0.30 (#2035) * ngclient: Use trusted timestamp role if new timestamp has equal version (#2024) * docs: Misc improvements (#1983, #2002, #2004, #2041, #2051, #2064) * tests: Misc improvements (#2017) * tests: Stop using requests type annotations (#1991) * build: Pin hatchling version (#1989) * build: Tweak pip download in verify_release script (#1982) * build: Update pinned dependency versions ### Fixes * Metadata API: Check None instead of falsyness for some optional arguments (#1975) * ngclient: Prevent use of potentially undefined variable (#2003) * tests: Change git attributes for test data (#2063) ## v1.1.0 This release contains major build improvements as well as fixes and backwards-compatible API improvements. ### Added * build: Release process was moved to CD platform (#1946, #1971, #1976) * build: Build is now reproducible thanks to Hatchling (#1896, #1900) * build: Build results are now verifiable (#1913, #1926, #1947, #1979) * build: test dependencies are now pinned for reproducibility (#1867, #1918) * Metadata API: Validation is now possible during serialization (#1775) * Infrastructure: Setup development blog (#1886, #1887) ### Changed * Metadata API: Supported specification version updated (#1908, #1960) * Metadata API: unrecognized_fields annotation fix (#1950) * Metadata API: Constructors are now easier to use (#1922) * Metadata API: Logging and error message improvements (#1876) * build: Include examples in source distribution (#1970) * build: Updated pinned dependency versions * tests: Various improvements (#1707, #1758, #1808, #1860, #1915, #1936, #1953, #1954, #1955) ## v1.0.0 This release makes ngclient and the Metadata API the supported python-tuf APIs. It also removes the legacy implementation as documented in the [1.0.0 announcement](1.0.0-ANNOUNCEMENT.md): all library code is now contained in `tuf.api` or `tuf.ngclient`. ### Added * tests: Extend testing (#1689, #1703, #1711, #1728, #1735, #1738, #1742, #1766, #1777, #1809, #1831) ### Changed * Metadata API: Disallow microseconds in expiry (#1712) * Metadata API: Preserve role keyid order (#1754) * Metadata API: Make exceptions more consistent (#1725, #1734, #1787, #1840, #1836) * Metadata API: Update supported spec version to "1.0.28" (#1825) * Metadata API: Accept legacy spec version "1.0" (#1796) * Metadata API: Accept custom fields in Metadata (#1861) * ngclient: Remove temporary file in failure cases (#1757) * ngclient: Explicitly encode rolename in URL (#1759) * ngclient: Allow HTTP payload compression (#1774) * ngclient: Make exceptions more consistent (#1799, #1810) * docs: Improve documentation (#1744, #1749, #1750, #1755, #1771, #1776, #1772, #1780, #1781, #1800, #1815, #1820, #1829, #1838, #1850, #1853, #1855, #1856 #1868, #1871) * build: Various build infrastructure improvements (#1718, #1724, #1760, #1762, #1767, #1803, #1830, #1832, #1837, #1839) * build: Stop supporting EOL Python 3.6 (#1783) * build: Update dependencies (#1809, #1827, #1834, #1863, #1865, #1870) ### Removed * Remove all legacy code including old client, repository_tool, repository_lib and the scripts (#1790) * Metadata API: Remove modification helper methods that are no longer necessary (#1736, #1740, #1743) * tests: Remove client tests that were replaced with better ones (#1741) * tests: Stop using unittest_toolbox (#1792) * docs: Remove deprecated documentation (#1768, #1769, #1773, #1848) ## v0.20.0 *__NOTE:__ This will be the final release of python-tuf that includes the legacy implementation code. Please see the [*1.0.0 announcement*](1.0.0-ANNOUNCEMENT.md) page for more details about the next release and the deprecation of the legacy implementation, including migration instructions.* ### Added * metadata API: misc input validation (#1630, #1688, #1668, #1672, #1690) * doc: repository library design document and ADR (#1693) * doc: 1.0.0 announcement (#1706) * doc: misc docstrings in metadata API (#1620) * doc: repository and client examples (#1675, #1685, #1700) * test: ngclient key rotation (#1635, #1649, #1691) * test: ngclient top-level role update (#1636) * test: ngclient non-consistent snapshot (#1666, #1705) * test: more lint/type checks and auto-formatting (#1658, #1664, #1659, #1674, #1677, #1687, #1699, #1701, #1708, #1710, #1720, #1726) * build: Python 3.10 support (#1628) ### Changed * ngclient: misc API changes (#1604, #1731) * ngclient: avoid re-loading verified targets metadata (#1593) * ngclient: implicitly call refresh() (#1654) * ngclient: return loaded metadata (#1680) * ngclient: skip visited nodes on delegation tree traversal (#1683) * ngclient: remove URL normalisation (#1686) * build: modernise packaging configuration (#1626) * build: bump dependencies (#1609, #1611, #1616, #1621) * build: limit GitHub Action token visibility and permissions (#1652, #1663) * test: misc test changes (#1715, #1670, #1671, #1631, #1695, #1702) ### Removed * doc: obsolete roadmap (#1698) ## v0.19.0 For users of legacy client (tuf.client module) this is purely a security fix release with no API or functionality changes. For ngclient (tuf.ngclient) and Metadata API (tuf.api.metadata), some API changes are included. **All users are advised to upgrade**. Note that python-tuf has required python>=3.5 since release 0.18.0. ### Fixed * GHSA-wjw6-2cqr-j4qr: Fix client side issue in both legacy client (tuf.client) and ngclient (tuf.ngclient) where a malicious repository could trick client to overwrite files outside the client metadata store during a metadata update. The fix includes percent-encoding the metadata rolename before using it as part of a filename https://github.com/theupdateframework/python-tuf/security/advisories/GHSA-wjw6-2cqr-j4qr * ngclient: Do not use urljoin to form metadata URL (included in GHSA-wjw6-2cqr-j4qr) * ngclient: Persist metadata safely (#1574) * ngclient: Handle timeout on session.get() (#1588) ### Added * build: Dependabot now monitors GitHub Actions (#1572) * tests: ngclient test improvements (#1564, #1569, #1587) * Metadata API: Add TargetFile.from_file() (#1521) ### Changed * build: Bump dependency charset-normalizer (#1581, #1586) * build: Bump dependency urllib3 (#1589) * build: Bump dependency cryptography (#1596) * Metadata API: Documentation improvements (#1533, #1590) * Metadata API: change Timestamp meta API (#1446) * Metadata API: change Delegations roles API (#1537) * ngclient: Remove unnecessary sleep() (#1608) * ngclient: Fix consistent targets URL resolution (#1591) * ngclient: Don't use target path as local path (#1592) ## v0.18.1 ### Changed * Update setup.cfg to not build universal wheels (#1566) ## v0.18.0 0.18 is a big release with 3 main themes: * Support only Python 3 and modernize the infrastructure accordingly * Metadata API (a low-level API for metadata de/serialization and modification) is now feature-complete for the client use cases * ngclient (a new high-level client API) was added. ngclient should be considered an unstable API and is not yet recommended for production use. Additionally the Github project name changed: project is now "python-tuf" instead of "tuf". Redirects are in place for the old name but updating links is advised. ### Added * Add ADR6: Where to implement serialization (#1270) * Add ADR8: Unrecognized fields (#1343) * Add ADR9: Refine reference implementation purpose (#1554) * Add client Network IO abstraction (#1250, #1302) * Add many features to Metadata API to support de/serializing specification-compliant metadata, and safer access through API: * Metadata.from_bytes()/to_bytes() (#1354, #1490) * Key, Role (#1360, #1386, #1423, #1480, #1481, #1520) * DelegationRole, Delegations (#1370, #1512) * MetaFile, TargetFile (#1329, #1437, #1454, #1514) * verification of threshold of signatures (#1435, #1436) * expiration check method (#1347) * support unrecognized fields in metadata (#1345) * use Generics to improve static typing (#1457) * Extensive Metadata API testing and validation (#1359, #1416, #1416, #1430, #1449, #1450, #1451, #1460, #1466, #1511) * Add ngclient: a new client library implementation (#1408, #1448, #1463 #1467, #1470, #1474, #1501, #1509, #1519, #1524) * Infrastructure improvements: * mypy, black and isort integration (#1314, #1363, #1395, #1455, #1489) * API reference documentation build (#1517) ### Removed * Remove Python 2 support (#1293) * Remove direct dependency on six * Remove obsolete reference to Thandy in a LICENSE file (#1472) ### Changed * Bump dependencies: * Certifi * Cryptography * Idna * Requests * Securesystemslib * Six * Urllib3 * Replace indirect dependency chardet with charset-normalizer * Move Metadata API serialization to sub-package (#1279) * Use SecureSystemslib Signer interface in Metadata API (#1272) * Make imports compatible with vendoring (#1261) ### Fixed * 'ecdsa' is a supported key type (#1453) * Fix various build infrastructure issues (#1289, #1295, #1321, #1327, #1364, #1369, #1542) * Test fixes (#1337, #1346) ## v0.17.0 **NOTE**: this will be the final release of tuf that supports Python 2.7. This is because Python 2.7 was marked [end-of-life]( https://www.python.org/dev/peps/pep-0373/) in January of 2020, and since then several of tuf's direct and transient dependencies have stopped supporting Python 2.7. ### Added * Added Architectural Decisions Records (ADRs) for: * where to develop python-tuf 1.0 (#1220) * to justify the extent of OOP in the metadata model (#1229) * to decide on a Python code style guide (#1232) ### Changed * Switch to GitHub Actions for CI (#1242, #1283, #1252) * Switch to only running bandit on Python versions greater than 3.5 (#1234) * Bump dependencies: requests (#1245), chardet (#1239), urllib3 (#1268), cffi (#1280), securesystemslib (#1285), cryptography (#1282, #1286). **NOTE**: the latest version of cryptography is no longer used on Python 2, as that is not supported. * Moved from dependabot-preview to GitHub native Dependabot (#1258) * Configure dependabot to ignore idna, as it breaks Python 2.7 builds (#1259) * Install securesystemslib in tox in non-editable mode (#1228) * Change the editable venv installation order (#1271) ### Fixed * Updated expiration check in Updater to better match the specification (#1235) * Ensure tempfile's are closed in Updater (#1226) ### Removed * Dropped support for Python 3.5 (#1238) ## v0.16.0 ### Added * Begin to document architectural and project-wide decisions as Architectural Decision Records (ADRs) in docs/adr (#1182, #1203) * Add Python 3.9 to the CI test matrix (#1200) * Implement a class for Root metadata in the simple TUF role metadata model in `tuf.api` (#1193) ### Changed * Bump dependencies: cryptography (#1189, #1190), requests (#1210), urllib (#1212), cffi (#1222), certifi (#1201), securesystemslib (#1191) * Simplify the test runner (`aggregate_tests`) and stop executing unit test modules in a random order (#1187) * Speed up indefinite freeze tests by removing `sleep()` calls (#1194) * Adapt to securesystemslib changes in key generation interfaces (#1191) * Migrate from travis-ci.org to travis-ci.com (#1208) * Make metadata signatures ordered by keyid, to ensure deterministic signature ordering in metadata files (#1217) * Improve test reliability by using thread-safe `Queue`s, rather than files, for process communication (#1198) * Avoid reading an entire target file into memory when generating target file hashes in `tuf.client.updater` (#1219) * Remove use of an empty list (`[]`) as the default argument in a test function (#1216) * Simplified updater logic for downloading and verifying target files (#1202) ### Fixed * Fix threshold computation in `_verify_root_self_signed()` such that signatures by the same root key count only once towards the threshold (#1218) ## v0.15.0 ### Added * Simple TUF role metadata model in the `tuf.api` package for interacting with metadata files directly, per-file without the overheads of reading and writing the entire repository at once (#1112, #1177, #1183) * Raise `MissingLocalRepositoryError` in updater when local repository can not be found (#1173) * Tests for targets metadata generation with existing `fileinfo` (#1078) * Test-verbosity documentation (#1151) ### Changed * Raise an error in `tuf.client.updater` when metadata is loaded without a signature (#1100) * Print a warning in `tuf.repository_tool` when metadata is written without a signature (#1100) * Remove iso8661 dependency (#1176) * Bump dependencies: cffi (#1146), cryptography (#1149), urllib (#1179), securesystemslib (#1183) * Overhauled logging to be less verbose and less alarming, by removing logging in the library when an exception is raised (including the same information that was logged) and using more appropriate log levels (#1145) * Make test output more useful by reducing and improving logging (#1145, #1104, #1170) * Make the `targets_path`, `metadata_path` and `confined_target_dirs` fields in `tuf.client.updater`s mirror configuration optional (#1153, #1166) * Include LICENSE files with source distributions (#1162) * Update Python version to be used in release instructions (#1163) * Remove direct use of `colorama` and dependency (#1180) ### Fixed * Ensure file objects and `requests.Responses` are closed during tests (#1147) * Auto-test against `securesystemslib` head of development (#1185) * Fix parameter name in `tuf.repository_lib` error message (#1078) ## v0.14.0 ### Added * Added a mechanism to the Updater to disable the hash prefix for target files even when `consistent_snapshot` is enabled for a repository (#1102) ### Changed * Updater now uses keyids provided in the metadata, rather than re-calculating keyids using `keyid_hash_algorithms` (#1014, #1121) * When loading an existing repository the keyids provided in the metadata will be used, rather than re-calculating keyids using `keyid_hash_algorithms` (#1014, #1121) * Improve reliability and performance of tests by removing sleep calls, instead use polling to check whether the simple_server is ready to accept connections (#1096) * Only calculate lengths and hashes of files listed by timestamp and snapshot metadata when those lengths and hashes will be included in the metadata (#1097) * Re-raise chained exceptions explicitly per PEP 3134 (#1116) * Remove use of `securesystemslib.settings.HASH_ALGORITHMS`, instead pass desired algorithms explicitly to securesystemslib's `keys.format_metadata_to_key` (#1016) ### Fixed * Better adhere to the detailed client workflow in the specification by ensuring that a newly downloaded root metadata file is verified with a threshold of its own signatures (#1101) * Update a delegating role's metadata when adding a new verification key to a delegated role (#1037) ## v0.13.0 ### Added * Add support for BLAKE hash functions (#993) * Don't list root metadata in snapshot metadata, per latest spec (#988) * Enable targets metadata to be generated without access to the target files (#1007, #1020) * Implement support for abstract files and directories (#1024, #1034) * Make lengths and hashes optional for timestamp and snapshot roles (#1031) ### Changed * Revise requirements files to have layered requirements (#978, #982) * Update tutorial instructions (#981, #992) and documentation (#1054, #1001) * Replace hard-coded logger names (#989) * Fix target file path hashing to ensure paths are hashed as they appear in targets metadata (#1007) * Refactor code handling hashed bins (#1007, #1013, #1040, #1058) * Improve performance when delegating to a large number of hashed bins (#1012) * Improve path handling consistency when adding targets and paths (#1008) * Clarify error message and docstring for custom parameter of add_target() (#1027) * Ensure each key applies to signature threshold only once (#1091) ### Fixed * Fix broken CI (#985) * Fix tests (#1029, #1064, #1067) * Fix loading of delegated targets during repository load (#1049, #1052, #1071) * Fix key loading in repo.py (#1066) * Remove redundant code in downloader (#1073) * Fix alarming logging in updater (#1092) ## v0.12.2 * Fix incorrect threshold signature computation (#974) * Drop support for python 3.4 (#966) * Improve documentation (#970, #960, #962, #961, 972) * Improve test suite and tutorial scripts (#775) ## v0.12.1 * Relax spec version format check for backwards compatibility (#950) * Update project metadata (#937, #939, #944, #947, #948, #953, #954) * Update misc dependencies (#936, #941, #942, #945, #956) ## v0.12.0 * Add backwards incompatible TUF spec version checks (#842, #844, #854, #914) * Adopt securesystemslib v0.12.0 update (#909, #910, #855, #912, #934) * Fix multi-root rotation (#885, #930) * Fix duplicate schema definitions (#929) * Refactor metadata generation (#836) * Refactor securesystemslib interface (#919) * Update implementation roadmap (#833) * Improve tests and testing infrastructure (#825, #839, #890, #915, #892, #923) * Improve documentation (#824, #849, #852, #853, #893, #924, #928, et al.) * Update misc dependencies (#850, #851, #916, #922, #926, #931) ## v0.11.1 * Prevent persistent freeze attack (pr [#737](https://github.com/theupdateframework/python-tuf/pull/737)). * Add --no-release option to CLI. * Issue deprecation warning for all_targets() and targets_of_role(). * Disable file logging, by default. * Tweak network settings (in settings.py) for production environments. * Add tuf.log.enable_file_logging() and tuf.log.disable_file_logging(). * Replace %xx escapes in URLs. * Support Appveyor (for Windows) with Continuous Integration. * Run unit tests in Python 3.4 & 3.5 under Appveyor. * Edit contact text to encourage users to report issues with specification. * Generate (w/ CLI) Ed25519 keys, by default. * Upgrade dependencies to latest versions. * Add requirements.in, which is used to generate the other requirement files. * Update list of adopters. * Convert README to Markdown. * Update installation instructions to note SSLib's optional dependencies that should be installed to support RSA, ECDSA, etc. keys. * Add unit test for persistent freeze attack. * Update list of tasks in ROADMAP.md. ## v0.11.0 Note: This is a backwards-incompatible pre-release. * Make significant improvements to execution speed of updater. * Resolve all of the unit test failures in Windows. * Add or revise many CLI options. - Add --revoke - Support ECDSA, RSA, and Ed25519 keys - Fully support delegated roles - Revise help descriptions - Allow 2+ roles to delegate to the same role - Add --remove - Add --trust - Remove obsolete code - Add --distrust - Allow any top-level role to be signed - Allow multiple signing keys with --sign - Rename default directories - etc. * Revise CLI documentation, such as QUICKSTART.md. * Ensure consistent behavior between add_targets and add_target(). * Add a CLI doc that demonstrates more complex examples. * Move LICENSE files to the root directory. * Update dependencies. * Update TUTORIAL.md to fix links. * Fix bug where the latest consistent metadata is not loaded. * Modify the pyup update schedule from daily to weekly. * Add hashes to requirements.txt. * Update AUTHORS.txt and add organizations. * Replace deprecated 'cryptography' functions. * Remove dependency in dev-requirements.txt that causes error. * Ensure that the latest consistent metadata is added to Snapshot. * Tweak a few logger and exception messages. * Revise introductory text in README. * Update ADOPTERS.md and link to pages that cover each adoption. * Remove target paths in metadata that contain leading path separators. * Address Pylint/Bandit warnings for the CLI modules. * Replace calls to deprecated 'imp' module. * Fix bug where the hashing algorithms used to generate local KEYIDs does not match the ones chosen by the repo. * Fix bug in tuf.sig.get_signature_status() where a given threshold is not used. * Refactor code that stores the previous keyids of a role. ## v0.10.2 Note: This is a backwards-incompatible pre-release. * Support TAP 4 (multiple repository consensus on entrusted targets). https://github.com/theupdateframework/taps/blob/master/tap4.md * Add quick start guide. * Add CLI (repo.py) to create and modify repositories. * Refactor client CLI (client.py). * Add pyup.io to manage dependencies. * Update all dependencies to their latest versions. * Add Pylint and Bandit (security) linters to Travis CI. Fix issues reported by both linters. * Tidy up documentation and directory structure. * Add option to exclude custom field when returning valid targetinfo with MultiRepoUpdater.get_valid_targetinfo(). * Fix PGP key fingerprint provided for security vulnerability reports. * Modify API for creating delegations. * Add wrapper functions for securesystemslib functions. * Fix bug: non-default repository names raises an exception. * Refactor modules for inconsistent use of whitespace and indentation. * Add cryptographic functions to read and write keys from memory. * Add full support for ECDSA keys. List `ecdsa-sha2-nistp256` in specification. * Remove example metadata. Documentation now points to up-to-date metadata in the tests directory. * Remove all references to PyCrypto. * Add copyright and license to all modules. * Add README for the unit tests. * Remove remnants of the compressed metadata feature (now discontinued). * Fix minor issues such as broken links, typos, etc. * Update configuration files to fix issues, such as duplicate upgrade commands, badges, etc. * Revise policy on static code analysis, CI, etc. * Earn CII Best Practices Badge. * Reach 98% score for CII Silver Badge. * Remove obsolete code, such as tufcli.py, interposition, check_crypto_libraries(), etc. ## v0.10.1 Note: This is a backwards-incompatible pre-release. * Add CHANGELOG.md, MAINTAINERS.txt, CODE-OF-CONDUCT.md, GOVERNANCE.md, ADOPTERS.md, DCO requirements, and instructions for submitting a vulnerability report. * Move specification to github.com/theupdateframework/specification. * Dual license the project: MIT license and Apache license, version 2. * Update to latest version of securesystemslib v0.10.8, which dropped PyCrypto and multi-lib support. * Add ecdsa-sha2-nistp256 to specification. * Remove directory of example metadata. Documentation now references unit test metadata. * Implement TAP 9 (mandatory metadata signing schemes). https://github.com/theupdateframework/taps/blob/master/tap9.md * Drop support for Python 2.6 and 3.3. * Support Python 3.6. * Improve code coverage to 99%. * Convert specification from text to Markdown format. * Add MERCURY paper, which covers protection against roleback attacks. * Implement TAP 6 (include specification version in metadata). * Implement TAP 10 (remove native support for compressed metadata). * Support ability to append an externally-generated signature to metadata. * Remove capitalization from rolenames listed in metadata. * Add a more detailed client workflow to specification. * Modify client workflow: A client must now fetch root first. Intermediate versions of Root must also be downloaded and verified by the client. See specification for modified workflow. * Fix bug with key IDs, where incorrect number of key IDs are detected. * Minor bug fixes, such as catching correct type and number of exceptions, detection of slow retrieval attack, etc. * Do not list Root's hash and lenth in Snapshot (only its version number). * Allow user to configure hashing algorithm used to generate hashed bin delegations. * Fix Markdown errors in SECURITY.md. * Add fast-forward attack to specification * Remove simple-settings dependency * Move crypto-related code to external library (securesystemslib). * Allow replacement of already listed targets in metadata. Fix issue #319. * Add instructions for contributors in README. * Copy (rather than link) target file to consistent target. Fix issue #390. * Rename target() -> get_one_valid_targetinfo(). * Ensure consistent Root is written if consistent snapshot = False. Fix issue #391. * repository_tool.status(): Print status of only the top-level roles. * Document and demonstrate protection against repository attacks. * Add installation instructions for Fedora-based environments. * Exclude "private" dict key from metadata. * "backtrack" attribute renamed to "terminating". * Fix data loss that might occur during sudden power failure. Pull requests #365, 367. * Add repository tool function that can mark roles as dirty. * Store all delegated roles in one flat directory. * Support Unix shell-style wildcards for paths listed in metadata. * Add draft of specification (version 1.0). * Sleep a short while during download.py while loop to release CPU. * Support multiple key ID hashing algorithms. * Prepend version number to filename of consistent metadata. * Remove updater method: refresh_targets_metadata_chain(). * Add Diplomat paper. It covers integrating TUF with community repositories. * Add project logo. * Delegations now resemble a graph, rather than a tree. ## v0.10.0 @vladimir-v-diaz vladimir-v-diaz released this on Jan 22, 2016 · 879 commits to develop since this release * Fix Python 3 str<->bytes issues * Drop support for Python 3.2 * Support Python 3.5 * Fix for Issue #244 (hash, rather than hash algorithm, should be prepended to consistent targets) ## TUF v0.9.9 @vladimir-v-diaz vladimir-v-diaz released this on Jul 23, 2014 · 1058 commits to develop since this release * Support externally created PEM files. Previous release generated an unexpected keyid for the external public key because of trailing whitespace, which did not match the format of internally generated keys saved to metadata. * Fix installation instructions. Non-wheel installation instruction listed an invalid command-line option to pip (-no-use-wheel, which is missing a leading hyphen.) * Add paragraph to Using TUF section of the README. ## TUF v0.9.8 @vladimir-v-diaz vladimir-v-diaz released this on Jul 16, 2014 · 1069 commits to develop since this release * TUF 0.9.8 (pre-release) ## TUF v0.7.5 @trishankkkarthik trishankkarthik released this on Sep 21, 2013 · 1877 commits to develop since this release * TUF 0.7.5 (pre-release) python-tuf-5.1.0/docs/CODE-OF-CONDUCT.md000066400000000000000000000002561470074210500172200ustar00rootroot00000000000000## The Update Framework Community Code of Conduct The Update Framework follows the [CNCF Code of Conduct](https://github.com/cncf/foundation/blob/master/code-of-conduct.md) python-tuf-5.1.0/docs/CODEOWNERS000066400000000000000000000000541470074210500161540ustar00rootroot00000000000000* @theupdateframework/python-tuf-maintainerspython-tuf-5.1.0/docs/CONTRIBUTING.rst000066400000000000000000000043761470074210500172350ustar00rootroot00000000000000Instructions for contributors ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Contribute to python-tuf by submitting pull requests against the "develop" branch of this repository. Detailed instructions are available in our `development guidelines `_. All submitted code should follow our `style guidelines `_ and must be `unit tested <#unit-tests>`_. .. note:: Also see `development installation instructions `_. DCO === Contributors must indicate acceptance of the `Developer Certificate of Origin `_ by appending a ``Signed-off-by: Your Name `` to each git commit message (see `git commit --signoff `_). Testing ======= With `tox `_ the whole test suite can be executed in a separate *virtual environment* for each supported Python version available on the system. ``tuf`` and its dependencies are installed automatically for each tox run. :: tox Below, you will see more details about each step managed by ``tox``, in case you need debug/run outside ``tox``. Unit tests ---------- More specifically, the Update Framework's test suite can be executed by invoking the test aggregation script inside the *tests* subdirectory. ``tuf`` and its dependencies must already be installed. :: cd tests/ python3 aggregate_tests.py Individual tests can also be executed. Optional ``-v`` flags can be added to increase log level up to DEBUG (``-vvvv``). :: cd tests/ python3 test_updater_ng.py -v Coverage -------- To run the tests and measure their code coverage, the aggregation script can be invoked with the ``coverage`` tool (requires installation of ``coverage``, e.g. via PyPI). :: cd tests/ coverage run aggregate_tests.py && coverage report Auto-formatting --------------- The linter in CI/CD will check that new TUF code is formatted with `ruff `_. Auto-formatting can be done on the command line: :: tox -e fix python-tuf-5.1.0/docs/GOVERNANCE.md000066400000000000000000000073351470074210500165430ustar00rootroot00000000000000# TUF governance This document covers the project's governance and committer process. The project consists of the TUF [specification](https://github.com/theupdateframework/specification) and [reference implementation](https://github.com/theupdateframework/python-tuf). ## Maintainership and Consensus Builder The project is maintained by the people indicated in [MAINTAINERS](MAINTAINERS.txt). A maintainer is expected to (1) submit and review GitHub pull requests and (2) open issues or [submit vulnerability reports](https://github.com/theupdateframework/python-tuf#security-issues-and-bugs). A maintainer has the authority to approve or reject pull requests submitted by contributors. More significant changes in the project, such as those that require a TAP or changes in governance, are guided by a maintainer called the Consensus Builder (CB). The project's Consensus Builder (CB) is Justin Cappos , who has a lifetime appointment. ## Contributions [A contributor can submit GitHub pull requests](CONTRIBUTING.rst) to the project's repositories. They must follow the project's [code of conduct](CODE-OF-CONDUCT.md), the [developer certificate of origin](https://developercertificate.org/), the [code style guidelines](https://github.com/secure-systems-lab/code-style-guidelines), and must unit test any new software feature or change. Submitted pull requests undergo review and automated testing, including, but not limited to: * Unit and build testing via [GitHub Actions](https://github.com/theupdateframework/python-tuf/actions) and [Tox](https://tox.readthedocs.io/en/latest/). * Static code analysis via [Pylint](https://www.pylint.org/) and [Bandit](https://wiki.openstack.org/wiki/Security/Projects/Bandit). - Auto-formatting with [black](https://black.readthedocs.io/) and [isort](https://pycqa.github.io/isort/). * Checks for Signed-off-by commits via [Probot: DCO](https://github.com/probot/dco). * Review by one or more [maintainers](MAINTAINERS.txt). A contributor can propose changes to the specification with a [TUF Augmentation Proposal](https://github.com/theupdateframework/taps) (TAP). It is a design document providing information to the TUF community, or describing a new feature for TUF or its processes or environment. A [TAP](TAP.rst) can be approved or rejected by the CB after it has been reviewed and discussed. Discussions take place on the project's [mailing list](https://groups.google.com/forum/?fromgroups#!forum/theupdateframework) or the TAPs GitHub issue tracker. ## Changes in maintainership A contributor to the project must express interest in becoming a maintainer. The CB has the authority to add or remove maintainers. ## Changes in governance The CB supervises changes in governance, but a majority of maintainers must vote +1 on the PR. ## Changes in the consensus builder The consensus builder may be appointed for a fixed term or it may be a lifetime appointment. To initiate a change of consensus builder, or a change in the length of the appointment, a GitHub PR must be opened. If a fixed term is specified, the PR should be opened no earlier than 6 weeks before the end of the CB's term. If there is not a fixed term appointment, the PR may be opened at any time. In either case, the PR must be kept open for no less than 4 weeks. Additionally, the PR can only be merged with more +1 than -1 in the binding votes. Anyone from the community can vote on the PR with either +1 or -1. Only votes from maintainers that have been listed in the top-level [MAINTAINERS](MAINTAINERS.txt) file before the PR is opened are binding. When there are conflicting PRs about changes in the consensus builder, the PR with the most binding +1 votes is merged. The consensus builder can volunteer to step down. python-tuf-5.1.0/docs/INSTALLATION.rst000066400000000000000000000063061470074210500172220ustar00rootroot00000000000000Installation ============ All versions of ``python-tuf`` can be installed from `PyPI `_ with `pip `_. :: python3 -m pip install tuf By default tuf is installed as pure python package with limited cryptographic abilities. See `Install with full cryptographic abilities`_ for more options. Install with full cryptographic abilities ----------------------------------------- Default installation supports signature verification only, using a pure Python *ed25519* implementation. While this allows to operate a *basic client* on almost any computing device, you will need additional cryptographic abilities for *repository* code, i.e. key and signature generation, additional algorithms, and more performant backends. Opt-in is available via ``securesystemslib``. .. note:: Please consult with underlying crypto backend installation docs -- `cryptography `_ and `pynacl `_ -- for possible system dependencies. :: python3 -m pip securesystemslib[crypto,pynacl] tuf Install for development ----------------------- To install tuf in editable mode together with development dependencies, `clone `_ the `python-tuf repository `_ from GitHub, change into the project root directory, and install with pip (using `venv `_ is recommended). .. note:: Development installation will `Install with full cryptographic abilities`_. Please check above for possible system dependencies. :: python3 -m pip install -r requirements/dev.txt Verify release signatures ------------------------- Releases on PyPI are signed with a maintainer key using `gpg `_ (see `MAINTAINERS.txt `_ for key fingerprints). Signatures can be downloaded from the `GitHub release `_ page (look for *\*.asc* files in the *Assets* section). Below code shows how to verify the signature of a `built `_ distribution, signed by the maintainer *Lukas PĂĽhringer*. It works alike for `source `_ distributions. :: # Get wheel from PyPI and signature from GitHub python3 -m pip download --no-deps tuf==0.20.0 wget https://github.com/theupdateframework/python-tuf/releases/download/v0.20.0/tuf-0.20.0-py3-none-any.whl.asc # Get public key, compare fingerprint in MAINTAINERS.txt, and verify with gpg gpg --recv-keys 89A2AD3C07D962E8 gpg --verify tuf-0.20.0-py3-none-any.whl.asc # Output: # gpg: assuming signed data in 'tuf-0.20.0-py3-none-any.whl' # gpg: Signature made Thu Dec 16 09:21:38 2021 CET # gpg: using RSA key 8BA69B87D43BE294F23E812089A2AD3C07D962E8 # gpg: Good signature from "Lukas PĂĽhringer " [ultimate] python-tuf-5.1.0/docs/MAINTAINERS.txt000066400000000000000000000022531470074210500170770ustar00rootroot00000000000000The project is currently managed by Justin Cappos at New York University. Please see GOVERNANCE.md for the project's governance and maintainership. Consensus Builder: Justin Cappos Email: jcappos@nyu.edu GitHub username: @JustinCappos PGP fingerprint: E9C0 59EC 0D32 64FA B35F 94AD 465B F9F6 F8EB 475A Maintainers: Marina Moore Email: mm9693@nyu.edu GitHub username: @mnm678 Lukas Puehringer Email: lukas.puehringer@nyu.edu GitHub username: @lukpueh PGP fingerprint: 8BA6 9B87 D43B E294 F23E 8120 89A2 AD3C 07D9 62E8 Joshua Lock Email: joshua.lock@uk.verizon.com GitHub username: @joshuagl PGP fingerprint: 08F3 409F CF71 D87E 30FB D3C2 1671 F65C B748 32A4 Keybase username: joshuagl Jussi Kukkonen Email: jkukkonen@google.com GitHub username: @jku PGP fingerprint: 1343 C98F AB84 859F E5EC 9E37 0527 D8A3 7F52 1A2F Kairo de Araujo Email: kairo@dearaujo.nl GitHub username: @kairoaraujo PGP fingerprint: FFD5 219E 49E0 06C2 1D9C 7C89 F26E 23EE 723E C8CA Emeritus Maintainers: Santiago Torres-Arias Sebastien Awwad Teodora Sechkova Trishank Karthik Kuppusamy (NYU, Datadog) Vladimir Diaz python-tuf-5.1.0/docs/RELEASE.md000066400000000000000000000050241470074210500161650ustar00rootroot00000000000000# Release process **Prerequisites (one-time setup)** 1. Enable "Trusted Publishing" in PyPI project settings * Publisher: GitHub * Owner: theupdateframework * Project: python-tuf * Workflow: cd.yml * Environment: release 1. Go to [GitHub settings](https://github.com/theupdateframework/python-tuf/settings/environments), create an [environment](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#creating-an-environment) called `release` and configure [review protection](https://docs.github.com/en/actions/deployment/targeting-different-environments/using-environments-for-deployment#required-reviewers). ## Release 1. Ensure `docs/CHANGELOG.md` contains a one-line summary of each [notable change](https://keepachangelog.com/) since the prior release 2. Update `tuf/__init__.py` to the new version number `A.B.C` 3. Create a PR with updated `CHANGELOG.md` and version bumps ➔ Review PR on GitHub 4. Once the PR is merged, pull the updated `develop` branch locally 5. Create a signed tag for the version number on the merge commit `git tag --sign vA.B.C -m "vA.B.C"` 6. Push the tag to GitHub `git push origin vA.B.C` *A tag push triggers the [CD workflow](https://github.com/theupdateframework/python-tuf/blob/develop/.github/workflows/cd.yml), which runs the tests, builds source dist and wheel, creates a preliminary GitHub release under `vA.B.C-rc`, and pauses for review.* 7. Run `verify_release --skip-pypi` locally to make sure a build on your machine matches the preliminary release artifacts published on GitHub. ➔ [Review *deployment*](https://docs.github.com/en/actions/managing-workflow-runs/reviewing-deployments) on GitHub *An approval resumes the CD workflow to publish the release on PyPI, and to finalize the GitHub release (removes `-rc` suffix and updates release notes).* 8. Run `verify_release` to make sure the PyPI release artifacts match the local build as well. When called as `verify_release --sign []` the script additionally creates gpg release signatures. When signed by maintainers with a corresponding GPG fingerprint in the MAINTAINERS.md file, these signature files should be made available on the GitHub release page under Assets. 9. Announce the release on [#tuf on CNCF Slack](https://cloud-native.slack.com/archives/C8NMD3QJ3) 10. Ensure [POUF 1](https://github.com/theupdateframework/taps/blob/master/POUFs/reference-POUF/pouf1.md), for the reference implementation, is up-to-date python-tuf-5.1.0/docs/SECURITY.md000066400000000000000000000013301470074210500163500ustar00rootroot00000000000000# Security Issues and Bugs Security issues can be reported to maintainers [privately via GitHub](https://docs.github.com/en/code-security/security-advisories/guidance-on-reporting-and-writing/privately-reporting-a-security-vulnerability): - [**Report new vulnerability**](https://github.com/theupdateframework/python-tuf/security/advisories/new) Please do not use the GitHub issue tracker to submit vulnerability reports. The issue tracker is intended for bug reports and to make feature requests. Major feature requests, such as design changes to the specification, should be proposed via a [TUF Augmentation Proposal](https://theupdateframework.github.io/specification/latest/#tuf-augmentation-proposal-tap-support) (TAP). python-tuf-5.1.0/docs/_config.yml000066400000000000000000000007041470074210500167120ustar00rootroot00000000000000title: Python-TUF author: Python-TUF community #email: your-email@domain.com description: > # for footer and for search engines Development blog for Python-TUF, a supply chain security framework for secure content delivery and updates. github_username: theupdateframework show_excerpts: true # set to false to remove excerpts on the homepage header_pages: # make sure ordinary docs are not linked from blog header - index.md theme: minima python-tuf-5.1.0/docs/_posts/000077500000000000000000000000001470074210500160715ustar00rootroot00000000000000python-tuf-5.1.0/docs/_posts/2022-02-21-release-1-0-0.md000066400000000000000000000047651470074210500215210ustar00rootroot00000000000000--- title: "Python-TUF reaches version 1.0.0" author: Jussi Kukkonen and Lukas PĂĽhringer --- The Python-TUF community is proud to announce the release of Python-TUF 1.0.0. The release, which is available on [PyPI](https://pypi.org/project/tuf/) and [GitHub](https://github.com/theupdateframework/python-tuf/), introduces new stable and more ergonomic APIs. Python-TUF is the reference implementation of [The Update Framework](https://theupdateframework.io/) specification, an open source framework for securing content delivery and updates. It protects against various types of supply chain attacks and provides resilience to compromise. For the past 7 releases the project has introduced new designs and implementations, which have gradually formed two new stable APIs: - [`ngclient`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.html): A client API that offers a robust internal design providing implementation safety and flexibility to application developers. - [`Metadata API`](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html): A low-level interface for both consuming and creating TUF metadata. Metadata API is a flexible and easy-to-use building block for any higher level tool or library. Python-TUF 1.0.0 is the result of a comprehensive rewrite of the project, removing several hard to maintain modules and replacing them with safer and easier to use APIs: - The project was reduced from 4700 lines of hard to maintain code to 1400 lines of modern, maintainable code - The implementation details are now easier to reason about, which should accelerate future improvements on the project - Metadata API provides a solid base to build other tools on top of – as proven by the ngclient implementation and the [repository code examples](https://github.com/theupdateframework/python-tuf/tree/develop/examples/repository) - Both new APIs are highly extensible and allow application developers to include custom network stacks, file storage systems or public-key cryptography algorithms, while providing easy-to-use default implementations With this foundation laid, Python-TUF developers are currently planning next steps. At the very least, you can expect improved repository side tooling, but we're also open to new ideas. Pop in to [#tuf](https://cloud-native.slack.com/archives/C8NMD3QJ3) on CNCF Slack or [Github issues](https://github.com/theupdateframework/python-tuf/issues/new) and let’s talk. python-tuf-5.1.0/docs/_posts/2022-05-04-ngclient-design.md000066400000000000000000000111371470074210500225160ustar00rootroot00000000000000--- title: "What's new in Python-TUF ngclient?" author: Jussi Kukkonen --- We recently released a new TUF client implementation, `ngclient`, in Python-TUF. This post explains why we ended up doing that when a client already existed. # Simpler implementation, "correct" abstractions The legacy code had a few problems that could be summarized as non-optimal abstractions: Significant effort had been put to code re-use, but not enough attention had been paid to ensure the expectations and promises of that shared code were the same in all cases of re-use. This combined with Pythons type ambiguity, use of dictionaries as "blob"-like data structures and extensive use of global state meant touching the shared functions was a gamble: there was no way to be sure something wouldn't break. During the redesign, we really concentrated on finding abstractions that fit the processes we wanted to implement. It may be worth mentioning that in some cases this meant abstractions that have no equivalent in the TUF specification: some of the issues in the legacy implementation look like the result of mapping the TUF specifications [_Detailed client workflow_](https://theupdateframework.github.io/specification/latest/#detailed-client-workflow) directly into code. Here are the core abstractions we ended up with (number of lines of code in parenthesis to provide a bit of context, alongside links to sources and docs): * `Metadata` (900 SLOC, [docs](https://theupdateframework.readthedocs.io/en/latest/api/tuf.api.html)) handles everything related to individual pieces of TUF metadata: deserialization, signing, and verifying * `TrustedMetadataSet` (170 SLOC) is a collection of local, trusted metadata. It defines rules for how new metadata can be added into the set and ensures that metadata in it is always consistent and valid: As an example, if `TrustedMetadataSet` contains a targets metadata, the set guarantees that the targets metadata is signed by trusted keys and is part of a currently valid TUF snapshot * `Updater` (250 SLOC, [docs](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.updater.html)) makes decisions on what metadata should be loaded into `TrustedMetadataSet`, both from the local cache and from a remote repository. While `TrustedMetadataSet` always raises an exception if a metadata is not valid, `Updater` considers the context and handles some failures as a part of the process and some as actual errors. `Updater` also handles persisting validated metadata and targets onto local storage and provides the user-facing API * `FetcherInterface` (100 SLOC, [docs](https://theupdateframework.readthedocs.io/en/latest/api/tuf.ngclient.fetcher.html)) is the abstract file downloader. By default, a Requests-based implementation is used but clients can use custom fetchers to tweak how downloads are done No design is perfect but so far we're quite happy with the above split. It has dramatically simplified the implementation: The code is subjectively easier to understand but also has significantly lower code branching counts for the same operations. # PyPI client requirements A year ago we added TUF support into pip as a prototype: this revealed some design issues that made the integration more difficult than it needed to be. As the potential pip integration is a goal for Python-TUF we wanted to smooth those rough edges. The main addition here was the `FetcherInterface`: it allows pip to keep doing all of the HTTP tweaks they have collected over the years. There were a bunch of smaller API tweaks as well: as an example, legacy Python-TUF had not anticipated downloading target files from a different host than it downloads metadata from. This is the design that PyPI uses with pypi.org and files.pythonhosted.org. # better API Since we knew we had to break API with the legacy implementation anyway, we also fixed multiple paper cuts in the API: * Actual data structures are now exposed instead of dictionary "blobs" * Configuration was removed or made non-global * Exceptions are defined in a way that is useful to client applications # Plain old software engineering In addition to the big-ticket items, the rewrite allowed loads of improvements in project engineering practices. Some highlights: * Type annotations are now used extensively * Coding style is now consistent (and is now a common Python style) * There is a healthy culture of review in the project: bar for accepting changes is where it should be for a security project * Testing has so many improvements they probably need a blog post of their own These are not `ngclient` features as such but we expect they will show in the quality of products built with it. python-tuf-5.1.0/docs/_posts/2022-06-15-testing-ngclient.md000066400000000000000000000202451470074210500227250ustar00rootroot00000000000000--- title: "Testing Tricky Edge Cases in a TUF Client" author: Ivana Atanasova --- Usually the TUF Specification creates an impression of simple and straightforward approach to address software update systems security gaps. In the next few paragraphs we'll try to convince you that the devil is in the details. With the [v1.0.0 release](https://blogs.vmware.com/opensource/2022/02/22/python-tuf-reaches-version-1-0-0/) we can say that the current reference implementation is finally in a good place, although it wouldn’t be so trustworthy without all the awesome test functionality it provides. Therein lies some interesting surprises, for the conformance tests reflect use cases and tricky details that wouldn’t easily come to mind. TUF, in fact, is capable of managing some tricky business! Before looking into them, let’s first introduce the test functionality itself. ## Some repository simulator magic The test suite is heavily based on [RepositorySimulator](https://github.com/theupdateframework/python-tuf/blob/develop/tests/repository_simulator.py), which allows you to play with repository metadata by modifying it, signing and storing new roles versions, while serving older ones in the client test code. You can also simulate downloading new metadata from a remote without the need of file access or network connections, and modify expiry dates and time. Even though `RepositorySimulator` hosts repos purely in memory, you can supply the `--dump` flag to write its contents to a temporary directory on the local filesystem with "/metadata/..." and "/targets/..." URL paths that host metadata and targets respectively in order to audit the metadata. The test suite provides you with the ability to see the "live" test repository state for debugging purposes. Let’s cite a specific example with testing expired metadata to demonstrate the cool thing the `RepositorySimulator` provides, i.e. the capability to simulate real repository chains of updates as suggested by the spec, and not just modify individual metadata. More specifically, we would like to simulate a workflow in which a [targets](https://theupdateframework.github.io/specification/latest/#targets) version is being increased and a [timestamp](https://theupdateframework.github.io/specification/latest/#timestamp) expiry date is being changed. We are going to elaborate below on how this can be used to test the `Updater` above all programmatically. Now, let's just focus on how to verify that the `RepositorySimulator` did what we expected. Let's assume we did the following: * Upgraded `targets` to v2 * Changed `timestamp` v2 expiry date We can verify that the metadata looks as expected, without the need to implement file access. First, we need to find the corresponding temporary directory: ``` $ python3 test_updater_top_level_update.py TestRefresh.test_expired_metadata --dump Repository Simulator dumps in /var/folders/pr/b0xyysh907s7mvs3wxv7vvb80000gp/T/tmpzvr5xah_ ``` Once we know it, we can verify that the metadata has 2 cached versions: ``` $ tree /var/folders/pr/b0xyysh907s7mvs3wxv7vvb80000gp/T/tmpzvr5xah_/test_expired_metadata /var/folders/pr/b0xyysh907s7mvs3wxv7vvb80000gp/T/tmpzvr5xah_/test_expired_metadata ├── 1 │   ├── 1.root.json │   ├── snapshot.json │   ├── targets.json │   └── timestamp.json └── 2 ├── 2.root.json ├── snapshot.json ├── targets.json └── timestamp.json ``` And now we can also see that after bumping the version and moving timestamp v2 expiry date two weeks forward from v1, the v2 corresponding timestamp metadata has recorded that expiry date correctly: Timestamp v1:
$ cat /var/folders/pr/b0xyysh907s7mvs3wxv7vvb80000gp/T/tmpzvr5xah_/test_expired_metadata/1/timestamp.json 
{
 "signatures": [{...}],
 "signed": {
  "_type": "timestamp",
  "expires": "2022-03-30T00:18:31Z",
  "meta": { "snapshot.json": {"version": 1}},
  "spec_version": "1.0.28",
  "version": 1
 }}
Timestamp v2:
$ cat /var/folders/pr/b0xyysh907s7mvs3wxv7vvb80000gp/T/tmpzvr5xah_/test_expired_metadata/2/timestamp.json 
{
 "signatures": [{...}],
 "signed": {
  "_type": "timestamp",
  "expires": "2022-04-13T00:18:31Z",
  "meta": { "snapshot.json": {"version": 2}},
  "spec_version": "1.0.28",
  "version": 2
 }}
As you can see, the first date is 30 Mar and the second - 13 Apr, which is exactly 14 days later. This is a great way to observe what the tests really do and check if they do it successfully. ## When we talk about security, edge cases are the norm Now, let’s take a closer look at two edge cases, using in this test the cool things the RepositorySimulator provides: ### Example with expired metadata: Imagine that we have performed an update and stored metadata in a cache. And the locally stored timestamp/snapshot has expired. But we still need it to perform an update from remote by verifying the signatures and we need to use the expired timestamp. We can play with versions and expiry to verify that this scenario not explicitly mentioned in the spec works correctly and safely. By using the simulator, we can do the following: 1. Set the timestamp expiry one week ahead (to day 7) 2. On the very first day (day 0) download, verify, and load metadata for the [top-level roles](https://theupdateframework.github.io/specification/latest/#roles-and-pki) following the TUF specification order. This is done by simply calling `updater.refresh()`. 3. Then we bump [snapshot](https://theupdateframework.github.io/specification/latest/#update-snapshot) and [targets](https://theupdateframework.github.io/specification/latest/#targets) versions to v2 in the repository on the same day (day 0) 4. Set v2 expiry dates three weeks ahead (to day 21) 5. Travel in time somewhere between day 7 and day 21 6. Perform a successful `refresh` (with `updater.refresh()` call) with the expired locally cached timestamp 7. Check that the final repository version of the snapshot and targets roles is v2. This is a not so obvious use-case to keep in mind when thinking about updates. You can see how it looks in practice in the [reference implementation](https://github.com/theupdateframework/python-tuf/blob/develop/tests/test_updater_top_level_update.py#:~:text=test_expired_metadata). ### Example rollback protection check with expired metadata: Now let’s see if a rollback attack protection can be performed when the local timestamp has expired. In this case we need at least two timestamp and snapshot versions, an expired older version of timestamp, and a verification that a rollback check is performed with the old version. For a timestamp rollback, the case is pretty similar to the use of expired metadata. We can do the following: 1. Set timestamp v1 expiry one week ahead (to day 7) 2. Perform `updater.refresh()` on the very first day 3. Publish timestamp v2 in the repository with expiry three weeks ahead (to day 21) 4. Perform `updater.refresh()` somewhere between day 7 and day 21 5. Verify that rollback check uses the expired timestamp v1. (For reference, see the implementation [example](https://github.com/theupdateframework/python-tuf/blob/develop/tests/test_updater_top_level_update.py#:~:text=test_expired_timestamp_version_rollback)). A similar approach can be used when testing both timestamp and snapshot rollback protection. We just need to guarantee that after the last snapshot update, the snapshot version is not the latest in order to verify a rollback check is performed both with expired timestamp and an older snapshot. Sounds complicated, but it’s pretty easy with the simulator and [this example](https://github.com/theupdateframework/python-tuf/blob/develop/tests/test_updater_top_level_update.py#:~:text=test_expired_timestamp_snapshot_rollback) illustrates it pretty well. ## The devil is in the details One of the great things about a reference implementation is that one can learn a lot about the TUF specification by looking at the tests, which are full of examples that would hardly come to mind when you read the abstract straightforward workflow explained in the spec. And those tests most likely do not cover everything… Do you have a comment about the TUF spec or the cited examples? An idea? Please share it with us! python-tuf-5.1.0/docs/_posts/2022-10-21-python-tuf-security-assessment.md000066400000000000000000000116571470074210500256130ustar00rootroot00000000000000--- title: "Python-tuf source code audit" author: Joshua Lock --- We are pleased to announce completion of a source code audit of the recently refactored python-tuf codebase. # Background In February 2022 the python-tuf team [released version 1.0]( https://theupdateframework.github.io/python-tuf/2022/02/21/release-1-0-0.html ). This release was the product of a significant refactoring effort with the code being rewritten from scratch to provide two new stable API’s: * A low-level interface for creating and consuming TUF metadata * A robust and pluggable client implementation Unifying both of these APIs is a focus on developer ergonomics and flexibility of the API. While the new python-tuf codebase is much leaner, a mere 1,400 lines of code at release, compared to the legacy code’s 4,700 lines, and builds on the lessons learned from development (and developers) on the prior versions of python-tuf, we were very conscious of the fact that our first major release of a security project was made up of newly authored code. To improve our confidence in this newly authored code we engaged with the Open Source Technology Improvement Fund (OSTIF) to have an independent security assessment of the new python-tuf code. OSTIF connected us with the team at X41 D-Sec who performed a thorough source code audit, the results of which we are releasing today. # Results and resolutions The report prepared by X41 included one medium severity and three low severity issues, we describe below how we are addressing each of those reported items. **Private Key World-Readable (TUF-CR-22-01) – Medium** This vulnerability is not in any code called by python-tuf, but was included in demonstrative code the python-tuf team provided to the X41 team. The underlying issue is in [securesystemslib](https://github.com/secure-systems-lab/securesystemslib), a utility library used by python-tuf which provides a consistent interface around various cryptography APIs and related functionality, where any files were created with the default permissions of the running process. We resolved this issue by [adding an optional restrict parameter]( https://github.com/secure-systems-lab/securesystemslib/pull/231/files) to the `storage.put()` interface and in the corresponding filesystem implementation of the interface ensuring that when `restrict=True` files are created with octal permissions `0o600` (read and write for the user only). This enhancement has been included in the recent release of [securesystemslib 0.25.0]( https://github.com/secure-systems-lab/securesystemslib/releases/tag/v0.25.0 ). **Shallow Build Artifact Verification (TUF-CR-22-02) – Low** The `verify_release` script, run by python-tuf developers as part of the release process and available to users to verify that a release on GitHub or PyPI matches a build of source code from the repository, was only performing a shallow comparison of files. That is, only the type, size, and modification times were compared. We have [modified the script]( https://github.com/theupdateframework/python-tuf/pull/2122/files ) to perform a deep comparison of the contents and attributes of files being verified. **Quadratic Complexity in JSON Number Parsing (TUF-CR-22-03) – Low** This issue was not in python-tuf itself, rather the problem was in Python’s built-in json module. Fortunately, we did not need to take any action for this issue as it was independently reported upstream and has been fixed in Python. Find more details in [CVE-2020-10735: Prevent DoS by large int<->str conversions]( https://github.com/python/cpython/issues/95778) on Python’s issue tracker. **Release Signatures Add No Protection (TUF-CR-22-04) – Low** python-tuf releases are built by GitHub Actions in response to a developer pushing a tag. However, before those releases are published to the project’s GitHub releases page and PyPI a developer must verify (using the `verify_release` script discussed earlier) and approve the release. Part of the approval includes creating a detached signature and including that in the release artifacts. While these do not add any additional protection, we do believe that the additional authenticity signal is worthwhile to users. Furthermore, along with the above notice and the recommendations in the informational notes we will continue to iterate on our build and release process to provide additional security for users of python-tuf. # Thank you We are extremely grateful to X41 for their thorough audit of the python-tuf code, to [Open Source Technology Improvement Fund](https://ostif.org) (OSTIF) for connecting us with the [X41 D-Sec, GMBH](https://x41-dsec.de) team, and to the [Cloud Native Computing Foundation](https://www.cncf.io) (CNCF) for funding the source code audit – thank you all. Read the full report here: [Source Code Audit on The Update Framework for Open Source Technology Improvement Fund (OSTIF)]( https://theupdateframework.io/audits/x41-python-tuf-audit-2022-09-09.pdf). python-tuf-5.1.0/docs/_posts/2023-01-24-securesystemslib-signer-api.md000066400000000000000000000125541470074210500251100ustar00rootroot00000000000000--- title: New signing API author: Jussi Kukkonen --- > Things should be made as simple as possible – but no simpler. > > _- sometimes attributed to Einstein_ I believe the rule of thumb above stands on its own merit when it comes to software systems so the credibility of the attribution is not important (it's also possible that we should not take software design advice from a physicist). This post is about the PKI signing API provided by [Securesystemslib](https://github.com/secure-systems-lab/securesystemslib/) and used by applications built with python-tuf. It's an example of how keeping a thing too simple can actually make it more complex. ## The problem with private keys The original `securesystemslib.keys` module is based on the assumption that there are three distinct steps in the lifetime of a private-public keypair in a system like a TUF repository: 1. Generate private and public key 1. Sign with private key 1. Verify signature with public key This all seems logical on paper but in practice implementing signing for different underlying technologies (like online key vaults and Yubikeys) forces the API surface to grow linearly, and still requires the applications to also be aware of all the different signing technologies and their configuration. It was clear that something was wrong. ## New signer module In reality there are four distinct events during the lifetime of a signing key. All of these steps can happen on different systems, with different operators and different access to the underlying signing system: 1. Generate private and public keys – _This may happen in securesystemslib but also in an online key vault configuration UI or the Yubikey command line tool_ 1. Store the public key _and the information needed to access the private key_ 1. Sign using the information stored in step 2 1. Verify signature with public key Securesystemslib 0.26 introduces an improved signer API that recognizes this process complexity – and in turn makes managing and signing with keys simpler in practical application development. There are three main changes, all in the `securesystemslib.signer` module that defines Signer and Key classes: * The concept of **Private key URIs** is introduced – this is a relatively simple string that identifies a signing technology and encodes how to access and sign with a specific private key. Examples: - `gcpkms:projects/python-tuf-kms/locations/global/keyRings/git-repo-demo/cryptoKeys/online/cryptoKeyVersions/1` (A Google Cloud KMS key) - `file:/home/jku/keys/mykey?encrypted=true` (A key in an encrypted file) - `hsm:` (A hardware security module like Yubikey) * **Importing** public keys and constructing private key URIs is handled by Signers (there's no generic API though: this detail is specific to signing technology) * **Dynamic dispatch** is added for both Signers and Keys (former based on the private key URI, latter on the key content): As a result application code does not need to care about the specific technology used to sign/verify but securesystemslib can still support a wide array of signing methods -- and this support can even be extended with out-of-tree implementations. ## Code examples These examples are slightly simplified copies from my latest repository implementation and should represent any new application code using the python-tuf Metadata API in the future[^1]. Some things to note in these examples: * Application code that signs does not care what signing technology is used * Public key import (and related private key URI construction) is specific to the underlying signing technology * Private key URIs can be stored wherever makes sense for the specific application ### Example 1: Online key in a KMS Here’s an example where the private key URI is stored in a custom field in the metadata (this makes sense for online keys). First, the setup code that imports a key from Google Cloud KMS – this code runs in a repository maintainer tool: ```python def import_google_cloud_key() -> Key gcp_key_id = input("Please enter the Google Cloud KMS key id") uri, key = GCPSigner.import_(gcp_key_id) # embed the uri in the public key metadata key.unrecognized_fields["x-online-uri"] = uri return key ``` Then signing with the same key – this code runs in the online repository component and only needs the public key as an argument since we embedded the private key URI in the public key metadata. It does require the `cloudkms.signer` role permissions on Google Cloud though: ```python def sign_online(self, md: Metadata, key: Key) -> None: uri = key.unrecognized_fields["x-online-uri"] signer = Signer.from_priv_key_uri(uri, key) md.sign(signer) ``` ### Example 2: Maintainer key on a Yubikey This time we're importing the maintainers Yubikey: ```python def import_yubikey(config: ConfigParser) -> Key input("Insert your HW key and press enter") uri, key = HSMSigner.import_() # store the uri in application configuration config["keyring"][key.keyid] = uri return key ``` Later we sign with the Yubikey: ```python def sign_local(md: Metadata, key: Key, config: ConfigParser) -> None: uri = config["keyring"][key.keyid] signer = Signer.from_priv_key_uri(uri, key) md.sign(signer) ``` [^1]: The new signer API is not used in python-tuf quite yet: follow Pull Request [#2165](https://github.com/theupdateframework/python-tuf/pull/2165) to see when the support is merged.python-tuf-5.1.0/docs/adr/000077500000000000000000000000001470074210500153305ustar00rootroot00000000000000python-tuf-5.1.0/docs/adr/0000-use-markdown-architectural-decision-records.md000066400000000000000000000014041470074210500267240ustar00rootroot00000000000000# Use Markdown Architectural Decision Records * Status: accepted * Date: 2020-10-20 Technical Story: https://github.com/theupdateframework/python-tuf/issues/1141 ## Context and Problem Statement We want to record architectural decisions made in this project. Which format and structure should these records follow? ## Considered Options * [MADR](https://adr.github.io/madr/) 2.1.2 – The Markdown Architectural Decision Records * Formless – No conventions for file format and structure ## Decision Outcome Chosen option: "MADR 2.1.2", because * Implicit assumptions should be made explicit. Design documentation is important to enable people understanding the decisions later on. * The MADR structure is comprehensible and facilitates usage & maintenance. python-tuf-5.1.0/docs/adr/0001-python-version-3-6-plus.md000066400000000000000000000032101470074210500225140ustar00rootroot00000000000000# Default to Python 3.6 or newer for new development * Status: accepted * Date: 2020-10-20 Technical Story: https://github.com/theupdateframework/python-tuf/issues/1125 ## Context and Problem Statement We are planning a refactor of tuf where: * We do not want to try and support end-of-life versions of the language. * We want to use modern language features, such as typing. * We want to ease maintainer burden, by reducing the major language versions supported. ## Decision Drivers * Python 2.7 is end-of-life * Python 3.5 is end-of-life * Modern Python allows use of desirable features such as type hints * Supporting end-of-life Python versions adds maintenance overhead ## Considered Options * Support Python 2.7 and 3.5+ * Support Python 2.7 and 3.6+ * Support Python 2.7 and 3.6+ (with polyfill modules) * Support only Python 3.6+ ## Decision Outcome Chosen option: "Support only Python 3.6+", because we want modern features and lower maintainer effort as we work to improve our codebase through the refactor effort. New modules should target Python 3.6+. Using modules to polyfill standard library features from Python 3.6+ feels untenable as more libraries are dropping support for EOL Python releases. ### Negative Consequences * Leaves major adopter and contributor without an actively developed client for some of their customers stuck on older Python versions. ## Links * [Discussion of how/where to develop the refactored codebase](https://github.com/theupdateframework/python-tuf/issues/1126) * [Discussion of deprecation policy for the pre-1.0, Python 2.7 supporting, code](https://github.com/theupdateframework/python-tuf/issues/1127) python-tuf-5.1.0/docs/adr/0002-pre-1-0-deprecation-strategy.md000066400000000000000000000035521470074210500234520ustar00rootroot00000000000000# Deprecation strategy for current release series (0.x) * Date: 2020-11-05 Technical Story: https://github.com/theupdateframework/python-tuf/issues/1127 ## Context and Problem Statement We plan to refactor the reference implementation significantly and, as part of that effort, drop support for no-longer maintained versions of Python (see ADR 0001). However, a major user of (and contributor to) the project has users of the client stuck on older Python versions. We would like to define a reasonable support policy for the current, Python 2.7 supporting, codebase. ## Decision Drivers * We have finite resources. * A major adopter/user of the project has a need to maintain support for Python 2.7 clients. ## Considered Options * Maintain the code in parallel for a fixed period of time after releasing the refactored code. * Abandon the old code once the refactored code is released. * Support the old code on a best-effort basis once the refactored code is released. ## Decision Outcome Chosen option: "Support the old code on a best-effort basis once the refactored code is released", because we only have finite resources and want to focus them on moving the project forward, including supporting PyPI/pip integration and providing a solid implementation for developing specification enhancements in. We should document this outcome clearly in a governance document describing the release process with words along the lines of: "Support for older releases: Bugs reported with tuf versions prior to 1.0.0 will likely not be addressed directly by tuf’s maintainers. Pull Requests to fix bugs in the last release prior to 1.0.0 will be considered, and merged (subject to normal review processes). Note that there may be delays due to the lack of developer resources for reviewing such pull requests." ## Links * [ADR 0001](0001-python-version-3-6-plus.md) Python version python-tuf-5.1.0/docs/adr/0003-where-to-develop-TUF-1-0-0.md000066400000000000000000000040311470074210500225020ustar00rootroot00000000000000# Develop TUF 1.0.0 in a subdirectory of the current TUF implementation * Status: accepted * Date: 2020-11-23 Technical Story: https://github.com/theupdateframework/python-tuf/issues/1126 ## Context and Problem Statement The plan is to implement a refactored TUF (1.0.0) alongside the current code base, in order to not disrupt existing usage and keep providing a Python 2.7 client. We need to decide on the best place to do this development. ## Decision Drivers * Developing the new code piecemeal * Continuing to make releases in the interim * Avoiding maintenance overhead ## Considered Options Develop TUF 1.0.0: * In its own repository * In a separate development branch of the current TUF implementation * In the default branch, archiving the current implementation * In a subdirectory of the current TUF implementation ## Decision Outcome Chosen option: "Develop TUF 1.0.0 in a subdirectory of the current TUF implementation", because we want to add the new TUF code gradually while keep maintaining the current implementation given limited maintenance resources. Once development of the new version is complete, we will transition from TUF 1.0.0 in a subdirectory to stand-alone TUF 1.0.0 by the following procedure: * flesh out tuf/api/* * implement tuf/client/new-updater.py * implement tuf/repository/* * \ * git mv tuf/client/new-updater.py tuf/client/updater.py * git rm tuf/\*.py * tag 1.0.0 ## Pros and Cons of the Options Developing TUF 1.0.0 in a subdirectory of the current TUF implementation seems to have the least maintenance overhead compared to option 1 and 2, while allowing us to continue making releases with the old code unlike option 3. ### Negative Consequences * In progress development in the default branch causes messiness in plain sight. ## Links * [Discussion of Python version support in TUF 1.0.0](https://github.com/theupdateframework/python-tuf/issues/1125) * [Discussion of deprecation policy for the pre-1.0, Python 2.7 supporting, code](https://github.com/theupdateframework/python-tuf/issues/1127) python-tuf-5.1.0/docs/adr/0004-extent-of-OOP-in-metadata-model.md000066400000000000000000000035441470074210500240650ustar00rootroot00000000000000# Add classes for complex metadata attributes * Status: accepted * Date: 2020-11-30 Technical Story: https://github.com/theupdateframework/python-tuf/issues/1133 ## Context and Problem Statement Custom classes for the TUF signed metadata wrapper (Metadata) and metadata payload containers (Root, Timestamp, Snapshot, Targets) were added recently. Complex attributes on these classes are still represented as dictionaries. Should we add classes for these attributes too? ## Decision Drivers * Transition to class-based role metadata containers in progress (see *"class model"* links below) * Harden in-memory representation of metadata model * Replace `securesystemslib` schema validation (see *"schema checker"* link below) ## Considered Options * Use custom classes for complex attributes * Use dictionaries for complex attributes ## Decision Outcome Chosen option: "Use custom classes for complex attributes", to provide a consistently object-oriented, well-defined, single source of truth about the TUF metadata model (not only its containers). In addition to convenience update methods, the model may be extended with self-validation behavior (see *"validation guidelines"* link below) to replace `securesystemslib` schema checks. ### Negative Consequences * Implementation overhead * Less flexibility in usage and development (this is actually desired) * Maybe less idiomatic than dictionaries ## Links * [class model](https://github.com/theupdateframework/python-tuf/pull/1112) * [class model (root)](https://github.com/theupdateframework/python-tuf/pull/1193) * [WIP: class model (complex attributes)](https://github.com/theupdateframework/python-tuf/pull/1223) * [new TUF validation guidelines](https://github.com/theupdateframework/python-tuf/issues/1130) * [securesystemslib schema checker issues](https://github.com/secure-systems-lab/securesystemslib/issues/183) python-tuf-5.1.0/docs/adr/0005-use-google-python-style-guide.md000066400000000000000000000037051470074210500240570ustar00rootroot00000000000000# Use Google Python style guide with minimal refinements Technical Story: https://github.com/theupdateframework/python-tuf/issues/1128 ## Context and Problem Statement The Secure Systems Lab code style guide, which has been used for most of the code base, has become outdated. Through the upcoming rewrite, we have the chance to ignore consistency considerations with existing code style and can choose a more standard and up-to-date style guide. ## Decision Drivers * Flaws in original Secure Systems Lab style guide * Curating a complete custom style guide is time consuming * Well-established style rules lower contribution barrier * Custom style is not supported by default in common tooling (i.e. editors and linters) ## Considered Options * Use custom style guide * Use Google style guide with refinements ## Decision Outcome Chosen option: "Use Google style guide with refinements", because the Google style guide is a comprehensive, well-established style guide that is mostly based on PEP-8 and was accepted by everyone on the TUF team. There is no need to replicate these recommendations. However, we do provide a very slim document with additional refinements, in order to emphasize items the we consider especially important, want to be handled differently, or in one specific way, where the Google guide would allow multiple. **Course of Action:** * Follow existing style when working on existing code (files) * Follow new style in any new code (files) * Consider providing linter and formatter configuration (e.g. pylint, flake8, black, yapf) to enforce and facilitate new style ## Links * [New Slim Secure Systems Lab style guide](https://github.com/secure-systems-lab/code-style-guidelines/pull/21) * [Google Python style guide](https://google.github.io/styleguide/pyguide.html) * [PEP 8](https://www.python.org/dev/peps/pep-0008/) * [Issues in original Secure Systems Lab style guide](https://github.com/secure-systems-lab/code-style-guidelines/issues/20) python-tuf-5.1.0/docs/adr/0006-where-to-implemenent-model-serialization.md000066400000000000000000000144041470074210500262560ustar00rootroot00000000000000# Separate metadata serialization from metadata class model but keep helpers Technical Story: https://github.com/theupdateframework/python-tuf/pull/1279 ## Context and Problem Statement In the course of implementing a class-based role metadata model we have also reviewed options on how to design serialization infrastructure between wire formats and the class model. In an initial attempt we have implemented serialization on the metadata class (see option 1), but issues with inheritance and calls for more flexibility have caused us to rethink this approach. ## Decision Drivers * A class-based role metadata model (see ADR4) requires serialization routines from and to wire formats * TUF integrators may require custom serialization implementations for custom wire formats * Readability and simplicity of implementation for users and maintainers * Recognizability of specification ## Considered Options 1. Serialization in metadata classes 2. Serialization in metadata subclasses 3. Serialization separated from metadata classes 4. Compromise 1: Default serialization methods in metadata classes / non-default serialization separated 5. Compromise 2: Serialization separated / dict conversion helper methods for default serialization in metadata classes ## Decision Outcome Chosen option: "Compromise 2", because implementing dict conversion as methods on a corresponding class is idiomatic and allows for well-structured code. Together with a separated serialization interface, it provides both ease of use and maintenance, and full flexibility with regards to custom serialization implementations and wire formats. ## Pros and Cons of the Options ### Option 1: Serialization in metadata classes Serialization is implemented on metadata classes, e.g. `Metadata.serialize_as_json()`, etc. * Good, because serialization for any object is encapsulated within the corresponding class and thus structured in small code chunks, using the already existing hierarchical class model structure. * Good, because the TUF specification is heavily based on json, even if only for illustrative purposes, thus this option facilitates recognizability. * Bad, because it might suggest that TUF is limited to json alone. * Bad, because it does not facilitate custom serialization implementations. * Bad, because it can get complicated with inheritance in the class model. *NOTE: a workaround exists in #1279.* ### Option 2: Serialization in metadata subclasses Serialization is implemented on metadata subclasses, e.g. `JsonMetadata.serialize()`, etc. * Good, because the wire format is decoupled from the base classes, not giving the impression that TUF is limited to json, and facilitating custom implementations. * Bad, because a user needs to decide on serialization ahead of time, when instantiating the metadata objects. * Bad, because the metadata model has many classes, which would all need to be subclassed accordingly. ### Option 3: Serialization separated from metadata classes Serialization is implemented independently of the metadata class, e.g. by defining an abstract `Serializer` interface, which must be implemented in subclasses, e.g. `JsonSerializer`, etc. * Good, because the wire format is completely decoupled from the class model, not giving the impression that TUF is limited to json, and facilitating custom implementations. * Good, because it can serve as exact blueprint for custom implementations. * Bad, because a decoupled serialization implementation needs to "re-implement" the entire class hierarchy, likely in a procedural manner. ### Option 4: Compromise 1 Default json serialization is implemented on the metadata class as described in (1), but can be overridden using an independent `Serializer` interface as described in (3). * Good, for the reasons outlined in options (1) and (3), i.e. encapsulation within classes but decoupled class model and wire format. * Bad, because it creates two different code paths for default and non-default wire formats making the code more complex and prone to deteriorate, especially on the non-default path. * Bad, because the on-the-class default implementation can not be used as blueprint for custom implementations. ### Option 5: Compromise 2 Serialization is implemented independently of the metadata class as described in (3). However, the *meat* of the default `JsonSerializer`, i.e. conversion between metadata objects and dicts, is implemented on the metadata class, e.g. as `Metadata.to_dict()`, etc. * Good, for the reasons outlined in options (1) and (3), i.e. encapsulation within classes but decoupled class model and wire format, without the disadvantage in (4) of having two completely different code paths. * Good, because it makes the separate default serializer a minimal wrapper around the dict conversion methods. * Good, because other serialization implementations might also make use of dict conversion methods. * Good, because conversion between class objects and dicts is akin to type casting, which is idiomatic to implement on the class. * Bad, because the on-the-class default implementation can not be used as blueprint for custom implementations. ## Links * [ADR4: Add classes for complex metadata attributes (decision driver)](/Users/lukp/tuf/tuf/docs/adr/0004-extent-of-OOP-in-metadata-model.md) * [PR: Add simple TUF role metadata model (implements option 1)](https://github.com/theupdateframework/python-tuf/pull/1112) - [details about separation of serialization and instantiation](https://github.com/theupdateframework/python-tuf/commit/f63dce6dddb9cfbf8986141340c6fac00a36d46e) - [code comment about issues with inheritance](https://github.com/theupdateframework/python-tuf/blob/9401059101b08a18abc5e3be4d60e18670693f62/tuf/api/metadata.py#L297-L306) * [PR: New metadata API: add MetadataInfo and TargetFile classes (recent ADR discussion impetus)](https://github.com/theupdateframework/python-tuf/pull/1223) - [more discussion about issues with inheritance](https://github.com/theupdateframework/python-tuf/pull/1223#issuecomment-737188686) * [SSLIB/Issue: Add metadata container classes (comparison of options 1 and 2)](https://github.com/secure-systems-lab/securesystemslib/issues/272) * [tuf-on-a-plane parser (implements option 3)](https://github.com/trishankatdatadog/tuf-on-a-plane/blob/master/src/tuf_on_a_plane/parsers/) python-tuf-5.1.0/docs/adr/0008-accept-unrecognised-fields.md000066400000000000000000000051631470074210500234320ustar00rootroot00000000000000# Accept metadata that includes unrecognized fields - Status: accepted - Date: 2021-04-08 Technical Story: https://github.com/theupdateframework/python-tuf/issues/1266 ## Context and Problem Statement The current reference implementation will ignore unrecognized fields in a metadata file when loading it. This leads to the side effect that if you read a metadata file with unrecognized fields and immediately write it back to the disk, this file will be modified. Furthermore, some TAPs like: - [TAP 6](https://github.com/theupdateframework/taps/blob/master/tap6.md) - [TAP 10](https://github.com/theupdateframework/taps/blob/master/tap10.md) - [TAP 14](https://github.com/theupdateframework/taps/blob/master/tap14.md) - [TAP 15](https://github.com/theupdateframework/taps/blob/master/tap15.md) - [TAP 16](https://github.com/theupdateframework/taps/blob/master/tap16.md) are relying on that unrecognized fields will be accepted to introduce new fields to the specification without making the metadata invalid for older clients who don't recognize the field. ## Decision Drivers - The TUF specification implies support for unrecognized attribute-value fields, see [Document formats](https://theupdateframework.github.io/specification/latest/#document-formats) - If we perform the following operations on a metadata file with no intermediate operations: 1. read the metadata file 2. write the metadata file back to the disk then, the checksum (the content) of the file must not be changed. - Flexibility to add new fields in the spec without adding breaking changes. - Don't store unrecognized fields when it is not allowed by the specification. ## Considered Options - Ignore and drop unrecognized fields. - Ignore, but store unrecognized fields as an additional attribute. - Ignore, but store unrecognized fields as an additional attribute except for a couple of places where it's not allowed by the specification. ## Decision Outcome Chosen option: "Ignore, but store unrecognized fields as an additional attribute except for a couple of places where it's not allowed by the specification." The motivation for this decision is that the TUF specification already implies that we should accept unrecognized fields for backward compatibility and easier future extensibility. Additionally, it seems unacceptable to change a metadata file content just by reading and writing it back. There are exceptions however for places in the metadata format when it is not allowed by specification: keys, roles, meta, hashes, and targets are actual dictionaries (vs JSON objects that most structures in the format are) where `unrecognized field` is not a meaningful concept. python-tuf-5.1.0/docs/adr/0009-what-is-a-reference-implementation.md000066400000000000000000000053501470074210500250140ustar00rootroot00000000000000# Primary purpose of the reference implementation * Status: accepted * Date: 2021-08-25 ## Context and Problem Statement The original goal for the reference implementation refactor was to provide an implementation which is both an aid to understanding the specification and a good architecture for other implementations to mimic. During refactoring efforts on the metadata API and ngclient, several friction points have arisen where a safe object-oriented API would result in a less direct mapping to the [Document formats] in the specification. The archetypal example friction point is that [Timestamp] lists snapshot _only_ in a `meta` dictionary of `METAPATH` -> attribute fields. The dictionary will only ever contain one value and creates an extra level of indirection for implementations which try to map to the file format. When presented with such cases, we have considered multiple options: * Strict mapping to the [Document formats] * Simple and safe API in preference to mapping to the [Document formats] * Strict mapping to the [Document formats] with additional convenience API which is documented as the preferred interface for users So far implementation has tended towards the final option, but this is unsatisfying because: * the API contains traps for the unsuspecting users * two code paths to achieve the same goal is likely to result in inconsistent behaviour and bugs Therefore, we would like to define our primary purpose so that we can make consistent decisions. [Document formats]: https://theupdateframework.github.io/specification/latest/#document-formats [Timestamp]: https://theupdateframework.github.io/specification/latest/#file-formats-timestamp ## Decision Drivers * The reference implementation is often the starting point for new implementations, porting architecture of the reference implementation to new languages/frameworks * Reading reference implementation code is a common way to learn about TUF * The TUF formats include non-intuitive JSON object formats when mapping to OOP objects * Multiple code paths/API for the same feature is a common source of bugs ## Considered Options Primary purpose of the reference implementation is: * a learning resource to aid understanding of the specification (pedagogical reference) * a good architecture for other implementations to mimic (exemplary reference) ## Decision Outcome Primary purpose of the reference implementation is as an exemplary reference: providing a safe, consistent API for users and a good architecture for other implementations to mimic. ## Links * Discussed [on Slack](https://cloud-native.slack.com/archives/C01GT17AC5D/p1629357567021600) * Discussed in the [August 2021 TUF community meeting](https://hackmd.io/jdAk9rmPSpOYUdstbIvbjw#August-25-2021-Meeting) python-tuf-5.1.0/docs/adr/0010-repository-library-design.md000066400000000000000000000141301470074210500233570ustar00rootroot00000000000000# Repository library design built on top of Metadata API ## Context and Problem Statement The Metadata API provides a modern Python API for accessing individual pieces of metadata. It does not provide any wider context help to someone looking to implement a TUF repository. The legacy python-tuf implementation offers tools for this but suffers from some issues (as do many other implementations): * There is a _very_ large amount of code to maintain: repo.py, repository_tool.py and repository_lib.py alone are almost 7000 lines of code. * The "library like" parts of the implementation do not form a good coherent API: methods routinely have a large number of arguments, code still depends on globals in a major way and application (repo.py) still implements a lot of "repository code" itself * The "library like" parts of the implementation make decisions that look like application decisions. As an example, repository_tool loads _every_ metadata file in the repository: this is fine for CLI that operates on a small repository but is unlikely to be a good choice for a large scale server. ## Decision Drivers * There is a consensus on removing the legacy code from python-tuf due to maintainability issues * Metadata API makes modifying metadata far easier than legacy code base: this makes significantly different designs possible * Not providing a "repository library" (and leaving implementers on their own) may be a short term solution because of the previous point, but to make adoption easier and to help adopters create safe implementations the project would benefit from some shared repository code and a shared repository design * Maintainability of new library code must be a top concern * Allowing a wide range of repository implementations (from CLI tools to minimal in-memory implementations to large scale application servers) would be good: unfortunately these can have wildly differing requirements ## Considered Options 1. No repository packages 2. repository_tool -like API 3. Minimal repository abstraction ## Decision Outcome Option 3: Minimal repository abstraction While option 1 might be used temporarily, the goal should be to implement a minimal repository abstraction as soon as possible: this should give the project a path forward where the maintenance burden is reasonable and results should be usable very soon. The python-tuf repository functionality can be later extended as ideas are experimented with in upstream projects and in python-tuf example code. The concept is still unproven but validating the design should be straight forward: decision could be re-evaluated in a few months if not in weeks. ## Pros and Cons of the Options ### No repository packages Metadata API makes editing the repository content vastly simpler. There are already repository implementations built with it[^1] so clearly a repository library is not an absolute requirement. Not providing repository packages in python-tuf does mean that external projects could experiment and create implementations without adding to the maintenance burden of python-tuf. This would be the easiest way to iterate many different designs and hopefully find good ones in the end. That said, there are some tricky parts of repository maintenance (e.g. initialization, snapshot update, hashed bin management) that would benefit from having a canonical implementation, both for easier adoption of python-tuf and as a reference for other implementations. Likewise, a well designed library could make some repeated actions (e.g. version bumps, expiry updates, signing) much easier to manage. ### repository_tool -like API It won't be possible to support the repository_tool API as it is but a similar one would certainly be an option. This would likely be the easiest upgrade path for any repository_tool users out there. The implementation would not be a huge amount of work as Metadata API makes many things easier. However, repository_tool (and parts of repo.py) are not a great API. It is likely that a similar API suffers from some of the same issues: it might end up being a substantial amount of code that is only a good fit for one application. ### Minimal repository abstraction python-tuf could define a tiny repository API that * provides carefully selected core functionality (like core snapshot update) * does not implement all repository actions itself, instead it makes it easy for the application code to do them * leaves application details to specific implementations (examples of decisions a library should not always decide: "are targets stored with the repo?", "which versions of metadata are stored?", "when to load metadata?", "when to unload metadata?", "when to bump metadata version?", "what is the new expiry date?", "which targets versions should be part of new snapshot?") python-tuf could also provide one or more implementations of this abstraction as examples -- this could include a _repo.py_- or _repository_tool_-like implementation. This could be a compromise that allows: * low maintenance burden on python-tuf: initial library could be tiny * sharing the important, canonical parts of a TUF repository implementation * ergonomic repository modification, meaning most actions do not have to be in the core code * very different repository implementations using the same core code and the same abstract API The approach does have some downsides: * it's not a drop in replacement for repository_tool or repo.py * A prototype has been implemented (see Links below) but the concept is still unproven More details in [Design document](../repository-library-design.md). ## Links * [Design document for minimal repository abstraction](../repository-library-design.md) * [Prototype implementation of minimal repository abstraction](https://github.com/vmware-labs/repository-editor-for-tuf/) [^1]: [RepositorySimulator](https://github.com/theupdateframework/python-tuf/blob/develop/tests/repository_simulator.py) in python-tuf tests is an in-memory implementation, while [repository-editor-for-tuf](https://github.com/vmware-labs/repository-editor-for-tuf) is an external Command line repository maintenance tool. python-tuf-5.1.0/docs/adr/index.md000066400000000000000000000027611470074210500167670ustar00rootroot00000000000000# Architectural Decision Log This log lists the architectural decisions for tuf. - [ADR-0000](0000-use-markdown-architectural-decision-records.md) - Use Markdown Architectural Decision Records - [ADR-0001](0001-python-version-3-6-plus.md) - Default to Python 3.6 or newer for new development - [ADR-0002](0002-pre-1-0-deprecation-strategy.md) - Deprecation strategy - [ADR-0003](0003-where-to-develop-TUF-1-0-0.md) - Develop TUF 1.0.0 in a subdirectory of the current TUF implementation - [ADR-0004](0004-extent-of-OOP-in-metadata-model.md) - Add classes for complex metadata attributes - [ADR-0005](0005-use-google-python-style-guide.md) - Use Google Python style guide with minimal refinements - [ADR-0006](0006-where-to-implemenent-model-serialization.md) - Separate metadata serialization from metadata class model but keep helpers - [ADR-0008](0008-accept-unrecognised-fields.md) - Accept metadata that includes unrecognized fields - [ADR-0009](0009-what-is-a-reference-implementation.md) - Primary purpose of the reference implementation - [ADR-0010](0010-repository-library-design.md) - Repository library design built on top of Metadata API For new ADRs, please use [template.md](template.md) as basis. More information on MADR is available at . General information about architectural decision records is available at . python-tuf-5.1.0/docs/adr/template.md000066400000000000000000000042761470074210500174760ustar00rootroot00000000000000# [short title of solved problem and solution] * Status: [proposed | rejected | accepted | deprecated | … | superseded by [ADR-0005](0005-example.md)] * Deciders: [list everyone involved in the decision] * Date: [YYYY-MM-DD when the decision was last updated] Technical Story: [description | ticket/issue URL] ## Context and Problem Statement [Describe the context and problem statement, e.g., in free form using two to three sentences. You may want to articulate the problem in form of a question.] ## Decision Drivers * [driver 1, e.g., a force, facing concern, …] * [driver 2, e.g., a force, facing concern, …] * … ## Considered Options * [option 1] * [option 2] * [option 3] * … ## Decision Outcome Chosen option: "[option 1]", because [justification. e.g., only option, which meets k.o. criterion decision driver | which resolves force force | … | comes out best (see below)]. ### Positive Consequences * [e.g., improvement of quality attribute satisfaction, follow-up decisions required, …] * … ### Negative Consequences * [e.g., compromising quality attribute, follow-up decisions required, …] * … ## Pros and Cons of the Options ### [option 1] [example | description | pointer to more information | …] * Good, because [argument a] * Good, because [argument b] * Bad, because [argument c] * … ### [option 2] [example | description | pointer to more information | …] * Good, because [argument a] * Good, because [argument b] * Bad, because [argument c] * … ### [option 3] [example | description | pointer to more information | …] * Good, because [argument a] * Good, because [argument b] * Bad, because [argument c] * … ## Links * [Link type] [Link to ADR] * … python-tuf-5.1.0/docs/api/000077500000000000000000000000001470074210500153335ustar00rootroot00000000000000python-tuf-5.1.0/docs/api/api-reference.rst000066400000000000000000000024361470074210500205770ustar00rootroot00000000000000API Reference ===================== TUF provides multiple APIs: * The low-level :doc:`tuf.api` provides access to a Metadata file abstraction that closely follows the TUF specification's `document formats`_. This API handles de/serialization to and from files and makes it easier to access and modify metadata content safely. It is purely focused on individual pieces of Metadata and provides no concepts like "repository" or "update workflow". * The `client update workflow`_ is implemented in the :doc:`tuf.ngclient` module: It is a higher-level API that provides ways to query and download target files securely, while handling the TUF update workflow behind the scenes. ngclient is implemented on top of the Metadata API and can be used to implement various TUF clients with relatively little effort. Code `examples `_ are available for client implementation using ngclient and a basic repository using Metadata API. .. toctree:: :maxdepth: 2 :caption: Contents: tuf.api tuf.ngclient .. _client update workflow: https://theupdateframework.github.io/specification/latest/#detailed-client-workflow .. _document formats: https://theupdateframework.github.io/specification/latest/#document-formats python-tuf-5.1.0/docs/api/tuf.api.metadata.metadata.rst000066400000000000000000000001331470074210500227660ustar00rootroot00000000000000Metadata class --------------------------------- .. autoclass:: tuf.api.metadata.Metadata python-tuf-5.1.0/docs/api/tuf.api.metadata.root.rst000066400000000000000000000001231470074210500221700ustar00rootroot00000000000000Root class --------------------------------- .. autoclass:: tuf.api.metadata.Root python-tuf-5.1.0/docs/api/tuf.api.metadata.snapshot.rst000066400000000000000000000001331470074210500230450ustar00rootroot00000000000000Snapshot class --------------------------------- .. autoclass:: tuf.api.metadata.Snapshot python-tuf-5.1.0/docs/api/tuf.api.metadata.supporting.rst000066400000000000000000000013141470074210500234220ustar00rootroot00000000000000Supporting classes --------------------------------- The Metadata API includes multiple classes that are used by the top-level ones (Root, Timestamp, Snapshot, Targets): .. autosummary:: :nosignatures: tuf.api.metadata.DelegatedRole tuf.api.metadata.Delegations tuf.api.metadata.Key tuf.api.metadata.MetaFile tuf.api.metadata.Role tuf.api.metadata.TargetFile tuf.api.metadata.SuccinctRoles .. autoclass:: tuf.api.metadata.DelegatedRole .. autoclass:: tuf.api.metadata.Delegations .. autoclass:: tuf.api.metadata.Key .. autoclass:: tuf.api.metadata.MetaFile .. autoclass:: tuf.api.metadata.Role .. autoclass:: tuf.api.metadata.TargetFile .. autoclass:: tuf.api.metadata.SuccinctRolespython-tuf-5.1.0/docs/api/tuf.api.metadata.targets.rst000066400000000000000000000001311470074210500226550ustar00rootroot00000000000000Targets class --------------------------------- .. autoclass:: tuf.api.metadata.Targets python-tuf-5.1.0/docs/api/tuf.api.metadata.timestamp.rst000066400000000000000000000001351470074210500232130ustar00rootroot00000000000000Timestamp class --------------------------------- .. autoclass:: tuf.api.metadata.Timestamp python-tuf-5.1.0/docs/api/tuf.api.rst000066400000000000000000000005301470074210500174310ustar00rootroot00000000000000Metadata API =============== .. toctree:: tuf.api.metadata.metadata tuf.api.metadata.root tuf.api.metadata.timestamp tuf.api.metadata.snapshot tuf.api.metadata.targets .. toctree:: :hidden: tuf.api.metadata.supporting tuf.api.serialization .. automodule:: tuf.api.metadata :no-members: :no-inherited-members: python-tuf-5.1.0/docs/api/tuf.api.serialization.rst000066400000000000000000000003071470074210500223070ustar00rootroot00000000000000Serialization ============================= .. automodule:: tuf.api.serialization JSON serialization ----------------------------- .. automodule:: tuf.api.serialization.json :show-inheritance: python-tuf-5.1.0/docs/api/tuf.ngclient.config.rst000066400000000000000000000001241470074210500217260ustar00rootroot00000000000000Configuration ============= .. automodule:: tuf.ngclient.config :undoc-members: python-tuf-5.1.0/docs/api/tuf.ngclient.fetcher.rst000066400000000000000000000002711470074210500221040ustar00rootroot00000000000000Fetcher ============ .. autoclass:: tuf.ngclient.FetcherInterface :undoc-members: :private-members: _fetch .. autoclass:: tuf.ngclient.RequestsFetcher :no-inherited-members: python-tuf-5.1.0/docs/api/tuf.ngclient.rst000066400000000000000000000010411470074210500204610ustar00rootroot00000000000000ngclient ======== The ngclient module contains a complete TUF client library implementation. * :doc:`tuf.ngclient.updater` implements the `detailed client workflow`_ * :doc:`tuf.ngclient.config` provides optional configuration for the updater * :doc:`tuf.ngclient.fetcher` can be used for optional low-level network I/O control .. toctree:: :hidden: tuf.ngclient.updater tuf.ngclient.config tuf.ngclient.fetcher .. _detailed client workflow: https://theupdateframework.github.io/specification/latest/#detailed-client-workflow python-tuf-5.1.0/docs/api/tuf.ngclient.updater.rst000066400000000000000000000000701470074210500221250ustar00rootroot00000000000000Updater ========= .. automodule:: tuf.ngclient.updater python-tuf-5.1.0/docs/conf.py000066400000000000000000000047561470074210500160750ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # This file only contains a selection of the most common options. For a full # list see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html # -- Path setup -------------------------------------------------------------- # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # import os import sys sys.path.insert(0, os.path.abspath(os.path.join(".."))) import tuf # -- Project information ----------------------------------------------------- project = "TUF" copyright = "2021, New York University and the TUF contributors" author = "New York University and the TUF contributors" # -- General configuration --------------------------------------------------- master_doc = "index" # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ "sphinx.ext.napoleon", "sphinx.ext.autosummary", "sphinx.ext.autosectionlabel", ] autosectionlabel_prefix_document = True # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = "sphinx_rtd_theme" html_theme_options = {"logo_only": True} html_logo = "tuf-horizontal-white.png" html_favicon = "tuf-icon-32.png" # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". # html_static_path = ['_static'] # -- Autodoc configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html # Tone down the "tuf.api.metadata." repetition add_module_names = False python_use_unqualified_type_names = True # Show typehints in argument doc lines, but not in signatures autodoc_typehints = "description" autodoc_default_options = { "members": True, "inherited-members": "Exception", # excl. members inherited from 'Exception' "exclude-members": "to_dict, from_dict", } # Version version = tuf.__version__ python-tuf-5.1.0/docs/index.md000066400000000000000000000004571470074210500162210ustar00rootroot00000000000000--- title: "Python-TUF development blog" --- This is the development blog for the [Python-TUF](https://github.com/theupdateframework/python-tuf) project, welcome! If you want to learn how to use Python-TUF, check out our [developer documentation](https://theupdateframework.readthedocs.io/en/latest/). python-tuf-5.1.0/docs/index.rst000066400000000000000000000013041470074210500164210ustar00rootroot00000000000000TUF Developer Documentation =========================== This documentation provides essential information for those developing software with the `Python reference implementation of The Update Framework (TUF) `_. The reference implementation provides easy-to-use components for Python developers but also aims to be a readable guide and demonstration for those working on implementing TUF in their own languages, environments, or update systems. .. toctree:: :maxdepth: 1 :caption: Contents: api/api-reference INSTALLATION Usage examples Contribute python-tuf-5.1.0/docs/repository-library-design-ownership.jpg000066400000000000000000001420011470074210500244260ustar00rootroot00000000000000˙Ř˙ŕJFIF˙ŰC    $.' ",#(7),01444'9=82<.342˙ŰC  2!!22222222222222222222222222222222222222222222222222˙ŔŔ"˙Ä ˙ĵ}!1AQa"q2‘ˇ#B±ÁRŃđ$3br‚ %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz„…†‡‰Š’“”•–—™š˘Ł¤Ą¦§¨©Ş˛ł´µ¶·¸ąşÂĂÄĹĆÇČÉĘŇÓÔŐÖ×ŘŮÚáâăäĺćçčéęńňóôőö÷řůú˙Ä ˙ĵw!1AQaq"2B‘ˇ±Á #3RđbrŃ $4á%ń&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz‚„…†‡‰Š’“”•–—™š˘Ł¤Ą¦§¨©Ş˛ł´µ¶·¸ąşÂĂÄĹĆÇČÉĘŇÓÔŐÖ×ŘŮÚâăäĺćçčéęňóôőö÷řůú˙Ú ?÷ú(˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€#šC1×ÔiE©nżŐŹ­T  ľŇţ‹GÚ_Ńj(o´ż˘Ńö—ôZ†Š›í/č´}Ąýˇ˘€&űKú-iE¨h  ľŇţ‹GÚ_Ńj(o´ż˘Ńö—ôZ†Š›í/č´}Ąýˇ˘€&űKú-iE¨h  ľŇţ‹GÚ_Ńj(o´ż˘Ńö—ôZ†Š›í/č´}Ąý«—Uę“͏ű€,ýĄýŹ´ż˘Őo6?ď <Ř˙Ľ(ĎÚ_ŃhűKú-VócţđŁÍŹű€,ýĄýŹ´ż˘Őo6?ď <Ř˙Ľ(ĎÚ_ŃhűKú-VócţđŁÍŹű€,ýĄýŹ´ż˘Őo6?ď <Ř˙Ľ(ĎÚ_ŃhűKú-VócţđŁÍŹű€,ýĄýŹ´ż˘Őo6?ď <Ř˙Ľ(ĎÚ_ŃhűKú-VócţđŁÍŹű€,ýĄýŹ´ż˘Őo6?ď <Ř˙Ľ(ĎÚ_ŃhűKú-VˇčÂźÖ€&űKú-iE¨h  ľŇţ‹GÚ_Ńj(o´ż˘Ńö—ôZ†Š›í/č´}Ąýˇ˘€&űKú-iE¨h  ľŇţ‹GÚ_Ńj(o´ż˘Ńö—ôZ†Š›í/č´}Ąýˇ˘€&űKú-iE¨h  Ń1xæźQÁţĄçRPEPEPEPEMcřwÄÚ_ŠleĽŇ¦i!ŠS nB§ b{@QEVBřŽÉîî­˘ŽćYmo#˛G mŽę¬ Çđ€Ă-ĐV˝QY—őŚÚ †k‰¬ bâx™ÝK€T<xí@tQEQEQEQU`Ôm.o®ěˇ=ŦÁ:~BĂrçę9  TQEU[˝FŇĆkHnfÉw/“N÷Ú[{+­PEUżÔm4Č{É„QĽ© ’ Ë»QÇ© TvZµ­ýţŁec6ź*Ĺ8+€‘\c׆zŠ( Šăbř“ĄOn×Qišě–j\¨ôÉ^,)!Žĺ *ę¬omµ+/¬ćY­®#E"ôe# ĐŠ(˘€ (ŞÚ…ęiÖ2ÝÉň¬c% ŚČçśpŁ“@hްj6—7÷v0ĚćÓgž€“xĘäôäsV¨˘Š(˘Š(˘ŠŁŁęÖşî“o©Ů6× Y ®ÓŚ‘Óđ  ÔQEQEQEQEQEQECuţ¬}jĄ[ş˙V>µ‘ŞÜ\Úi—vÍsu,Đ½d||«řśPśk^!ÔăńUç‰-ď']C˝‡Ną·W>TŞŔ‰ä+Ý‘¤ŹîšěüC⸴í2Ĺl.Ż®ő3"ŰEm·ćdś– †Îz s:O»'đĽVş­ö­ö»¨‹Ţ¤wň,m+ň˙ ;O'ÓśV4wzö“}ŕ8ő*{ťBÄ_ZÉl»ćŤ@‘2pNÜHΠɼymmĄëWúmÝĄćŹÉudĺŠ7Ýd`v°8<ç±ÍIiăA&ˇaöŤ}§ŰjM¶Ććr…el ÁX”b@aĎ׊ăĽWiˇxĎÄ·–iđͦGgkĺ|ÖDbĚî!r[g8ąpÚÇ‹nü?m.…u§GayőĺÄě›7F§jEµ‰|±ëůP='Xo°[¶Ł}Ş;ÉâÉía0Üś<›QóÖ ÝҶďaůŐË 5ď A«éVŢ»Ô&žö{ť>憱`%%M¤ŕńČPĺß‹e7‹i˘č—ş´źgK—hŮ!ŤůQşB2Ä ŕ~5®řęňçEđŘ^íĽŐ’Úć"bGR¬C@w7<Ž0§$gu+-rΆą­WĂ:ĆŻsâŰ«{6Š[™ôëË$ť‚‰ZRČyăWÓ>ŐĄvÚ§‹ő­÷L´ÓnĹőĚ×»—TeXĐ+Ů,rx?Ä´:}Χ‡uI´»9^+ËĄ1Ć*HRŮp1“Ŕ> ă¸ŠDš$–6 ގ•‡pzWźÚč:˘|&×´¦łq}qö˙*Śľ÷rťű‚+¸Ňâx4›(¤R˛$¬§± nŠ( Š( Š( ±őýN{ Xⳍdľş•mí‘ĎĘ]»źö@Ëe5±\Ö¬Ińź…W·ŰĺăţÝ.(BßŔş+FX¶ŹZ»ndźPA('ý”9TGćy©żáđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9˙řA<˙B¦‡˙‚čřš?áđý šţ ˇ˙âk ˘€9óŕOăĺ𾌇űŃŘĆŚ>„EfGž×áŇüéeŇÓÎrí‹‚Ńn<• î\䍬3ŚWg\—ŤÎŰż °ëý¬Gáö[ŠÜŠ)±śĆżJuQEQEQEQEQEQEQEQE]ýJţ?Τ¨ŕ˙RżŹó©(˘Š(Ę>;k>'ŃĽ9a'‡ĺą·ćayqméŔŘ7T»ź`;ó·đRń«ŕ {źůÍsćşĂ,ă,"j7V~–ĎNÔőYOłPqóÉÁ9í…ÜŮö®wAkď řîÎŢďGM+MŐěŇĘ$[ˇ0óí×ä$€0Ly_}‚˝k;[‰ŕžkhdšÝ‹C# -#©= Ž;R\ŮZŢy?j¶†&Q4^laĽąF\ôa“‚9 !ĐY-üáM~ĎVĽąÖîŻíŕv’ńäóĂI¶H™ m¦ăÓ#n}ItĆ4đ§Šuřő{ĆÖě5›´´˙Lsĺ:Ü=ŰJ¶@ÚAČoaŽóÂ~ŇĽ1§XŻŘ¬eŐ-áňžý-•dř^śuŁDđF•¦]\_\ŮXÝę}qwŰ[/™’FpˇŽO±šăî.. ×őSŻ Iâí=$äeLĺN:éSé«ayĄÝxŹZÖŻěőXu‰ /̇ěĺn GeJ˛…ČÚs¸źzôVҴ瑤k Vw™nŚ*KJ rq÷€Ô`TáíM]ugŇl[R^—fÝLٰů±šň?j¶•â]<]Ż—©”S»ŐŮ$ĘÎÇş»8#k`•É9«şěQé×îí'žĹ’Ě«-Ăä g׿aŔŔâ˝&O řr[ąîäĐtǸ¸ČšW´BŇg®âG9ďëV.4îî[»ť*Ęk‰bÉ,–ęĚč!I#‘=…p7ŰjňřÎ˙YŐ.ínô«‡Ž×ËĽxEśK´r*«KNH9éŰÍÎoëř×çľđţźq%´W2@˘y<ÝĎ„# 1ţp1ß_xsDÔďb˝żŇ,.®˘Ç—4öčî¸é‚FjęÚŰĄÜ—KoÜČŠŹ0@•r@'©'ÜĐ9ŕ;űËď‡EĺÄŹsvÖśĽ‡-# “Üśk—đč±đ˙‰'֯ν|˙?úDŽ.§emÖí*8nޢ˝.ÖÖŢĆÚ;kKx­íăH˘@ŠŁĐŔŞpx{EµŐdŐ-ô›µ 3ľé-ŐdlőËžhĚímüáŻÚę·“x†úňŐ&-vě.^Iš;@P_€ÝĄGyk đ§‰|Gýˇ¨˙iŘë—"ŇO¶H[­»nÚT‚A:ŹO‡Ăš%¶Şú¬E„ZäµŇ[ “Ôî<ՆҴ浚լ-M´îd– ě‘ÉÜY†0I<’{ó@]âgšĎX×µą¦źP±˛¸B÷ŁŰÜéˇU ŚBß»pz˙µ¸ő¨5řŇĘ÷–7׿čf9w ˇÖ2HR}}¸č08ŻOşđއ}©¦ĄwŁŘO}6ÜËlŤ ÇOŚńŰҤ¸Đt{Ëą.ît«)®eC$˛[«3  …$ŽF@8öĘÚŮÁáĎzV›out¶siwrmą»y|ÉD’rěrq¸űdô®sOÔZ›K±ąÔ®ʍkúy#ądó‚3bŢ!O$yÚzv©˘iZÜQĹŞé¶—ŃĆŰ‘naYźQqI.ŁĎ§>ť.•döR9w·ku1ł’ĹqŚçśĐśjšv5źÚ[ęw—vöľ!xKs&-‰¶w1+än…=I+ž˘ IeÓµ´Ö.帿´“Z0¦©aŞ8u-)Ao-łüĄA!HNp2=kŇŰĂšéqiŤŁŘ¦ˇŁę±5śr\łŮÇd ÂÄ'“yŤËÎ9 ç§Ö{…Ynü+ h‹˙ť!§µŠ]MěĹˆQ–tRY”ěă–'¶+Ömm-¬mÖŢŇŢ+x’±ÂI8ŽI'ęj„ţĐntÔÓ§Ńté,QËĄ»[!ŤXś’’y÷ 3µšćę? éÚ®°ÉfúĺŐ®mu1¬6ľäĎqŽsRß]Ícy¨hvş•Ô~흤—ĺ‹@’DZX„¤îQĽF:üľaWq«x7OÔçĐăÖi¦é’ĘÍbmÔĹ"ĽOĐ˝óÓµjŢiPiGJ‹LłM8‚¦Ő`Q®W4çĘ#Qń…ŁërXXfA3Iqs$‰ipÓm ż%Ł/ŹĽ+>ţňăFĐ|O¦›{í+P[nV(µFş·Űćíó#cóŁ7B2>¦˝VĎĂú6ź§Ë§Ůé66ösgÍ·ŠÝU$ĎrřÔvžĐ,,§˛´Ń4č-n3C˛“7 `ţ4ćşŮţÇŐľ ßX\MmwşĹZeť‹E »˛N$|¸ăŻëqEá˝[ězíцďDľžę»’`›Ls‚ĚJ±bFAçđŻC}#M“P{÷ÓíZňHL9…K´gŞĆJűt¨,<7ˇépÜC§čö±\‚ł¤6Č‚QÓ ärx4ÂEd|3ᯠxŞ;ëé6´jÍqw$‹,s Gr¬HY‘¸¦şŻ˝Íî‰.łvň—Ő®ň(äb|¨O(§îŐOŐŤMâ]çYĐ×D±¸¶˛Óć_"é|ŤÍäc#Á8Éđ8­Č˘ŽR($hˇUTpŕ@)â}BVŇuiâíL‹G§w«´nڞ…1Ĺn€©^ÚŘ$dśÖ WSřss}¨Ó‚5’Ĺy$knâÍr…`3»śţ§=ĚľđěדŢK é’\Ü&•íš@zî$sžţµű>Č}«ýßý/ţ>?t?}ňíůřůľP=†(δĆOk.ľ!Ô.aKmĘęŮc»{q™Śłü¤d†dđżŤt ö˙µĐöľőňNűĂ{s[wľŃ5íc˝Ň,.R×Ýf·FĐ(#ŔŕzUëk[{+t·µ‚( A„Ž$ «ô@ŃEQEQEQEQEQE ×ú±őŞ•nëýXúŐJ*•Γgw©Řę3DZęÇĚű;î#fđ¸črz»ESŐ4ËMkK¸Óoâ2ÚÜ&ÉP1]Ăę9mT"*¨ŔQKEQEQEQEQEQEQEQEĚężň;xWţżĺ˙ŇKŠé«™ŐävđŻýË˙¤—Űן^A'Ť~ jş%ÝÝÔ:&‰ mµ´ÍşšU.7˛ŰU@ůAś× ×Şé:î‡âűŻřvĘ-J=BâÔ4÷BěŃä$‘»|ą H ăü*čÖ:Ťâűhü9â›T¶q$7š<šŹÚ ¸+F¬Ĺ•Á>ٵX“Ç÷ŇŰ_jşo†ć˝Đl¤‘%˝J’H#$HŃDGΠՆqĹgAˇk ńvŹŞÍák?Zi×s#´±ÉqrĚ…qřś’M`Eđö}"ÎóJ´Ö.LŇ›=RKĄXŠ;S2– ç(9Ç˝v×Ţ:ë–şN…Ł>­5Ţšš”2 …†?)ڱ ŕtč ;‡M2ź Ϩ\iwj0ę'J:jş»˝Ţ@ŻŔ ‚î8Í?IđÝî›ă›KßłŔ–xz=?|* V\íU$°\tÎ}3\ć©ŕSRŃő¤’Ę e':˝µ¬Ň€—qŐ6۸oô PYaâËŃ­&‘®čżŮ—“ŔóÚşÇ8Lo]ÁT†Śtď\µßŽćń€îu›ß ]ŰčRG Ç2ę ’Jţz.ĐnP“»¸SŔÍYđď…™ăńë\«[kŢńVą§hʬYjíę!ąŽ'‚U@…[yS€r3Žx®ŁAMU4Ka®I š‘RÓI =p03ß gTńƱai¨j‘řJáô{q,óÝ&tCó:DËĘđHÉŠž˙Ć×#Ä6ş.Ť˘>Łqu§®ˇŤp!Ťc,Wç$:€’ONőÂŢř']Ôt]ZËQđÁÔµůĚá5‹˝AZß [aŤ „.H­Xî5­â.–Ú3_M… ŽęŐ'Ť$B%#*X…lĎ|Śâ€7.>$GkáůoćŃ®…ő¶¤šmÖž˛)‘%b1´ôpA3žŐ٦řŞőüA‰®h§Kşş…ć´dąÇ0Lo\€0ŕq‚1޸_izÎť É­][A±Şx–Ęę;3.ä‹k"DŚă‚~Q’8ćşŰk=oÄ>4ŇőťOHm"ĎH†qrÜG,“Ë( OČHIÉ'Ąmx—ÄIáŰKf[Ioo/.ÖŇÖ& e‘8$đ I' ‘'ާŇm5I|Iˇ\éĎc LľDźhŽŕ;mUGFýŘHAéÍYńžŹ¨Ţ¶ŹŞi0ĄĹî“z.VŮä'B¬ŽˇŹ°ŮńĹbëzoŠükĄjpMeŤn±ÂÖ6×2$’Iq˘Mîc$*ˇq’y&€5-Ľc©[ęşuź|<ÚTz“ůVł­ÚÎľn äŔ€qŤĂ#­sţ*ń¶Ą©ř+ÄwZV<š:Aslš’Ý*ąe †DŹ(?6ŕp ÇĄumâ/jşj hözeę_ÜI-Ôr™dE`©B~\¶Ilp:VCčŢ,Ň|«x*ĂB[ČäŽę+=CíQ¤f)K6IÜo+ÓăśdĐü>*»·‹HŃtť-µ]Q´Ř®¦p!ŽČ3ą rH8ÁÄHňĂ?0ů2zpqďWďü¨kVÚ´Ú®§nÚ•ô0Ű#ŰŰ”ŠŁ“ĚÚ±$“’I>ž”ŻńěZž©ś:déM˛ăíz’ŰÍ3… Ë ;°b<Z–ę—ŇÁáý Ř“LSÜÝCG&ňî!F;uÉĺ×Ţ ÔRŐfŃďôř!Ő$ó¦ű]Źť%Ľ…BłÄŰ€äpŔ€yďŠÓ´đ»Zj×÷‚ďz\é°X(dů‡—ć|䌝ý(/JńŐííÖ‡5揦—­Äďi7Ú·Ę›b2ţń6€U$aŹlăµ’í$]3ź»#Ďݡ㟗ďg˝QŇ| ©icOÓÓS±ţʰ•^6K.ĄŤNV'±\t‚‚@íÖ€"›Çú´VÚŽ¨4?˛4ÝFK‰Mé0YĽż1f Ô™/˛×ĽAŠž[;+=cP†â}mľĎŚĽoĺ…~Y°ŔGĎąí@m1ü$g˝nhމ¨k,×pÍe}xב ¬‘»ăp-¸† p;őŞ7Ţ‚˙ĆŻ®Mq›YlŚŮíáĺ*Éćgţą».1é@»ńmČşŐˇÓtĹĽ[·JnDq™$›sv˘!F$düÝ+ţ[Űéţ!{‹;›­(f?Ů×Ţ|S,ŚW¶‚¬0rôő˘†ÓCŕ›-ęq\Ţ[ß­ôł\Űď†é— $‰»•Ű´uę€Ń?Ă»űČőż´jvHÚĄśţ]µ™Ž+.BĂhÜI7sśäôŕ[ąńO!žűKĽŇ,í/ßLšúÉă˝2&…es卬7)ŕ0>µ—§řëT±Đ"‹Skť‘®ťqbŃ…É>k!Ýźmť=ë/ëđhzfśuM&ň;;Qföךy{y‘@ĺwäHÁäž‚€:Ë Ë»Ýą,…­Ü‘oű4’† řč]rĎp:ví\†<_®˙Â3¤AqiöłŞ]]%¶ë¶ ĺĆě]ämź(_ş†ČŰÓ8—…´đφěôuśÎ-Ă~ónĐK1bäíP[dŕ+›´đ.©§ÚŘ}—WµzUŐĶ=«hf$ĽrŤüňxe#Gżáýz}RçQÓµ 4łÔôégŽ9|ŘŮ]w#Ł`ĎP Š…ĽRʵR€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ ću_ůĽ+˙_ň˙é%ĹtŐËk˛-·Š<7w)Ű Z†ŹAćC,K˙ŹHŁń ęŠ( Š( Š( Š( Š( ŞfŮ˙kWěëöăŮĽîsĺîÝ·éžjÝR˙MłŐ"Š;Űu™"™'Ś6~Yĺ[đ5nŠ(˘Š(˘Š(˘Š(®GÇ?ńóáű źý$¸®ş¸˙ȲjŢ´S™…ô—EéšÁ"˙}Jń  ČżŐŻŇźLŹýZý)ôQEQEQEQEQEQEQEQEvő+ř˙:’ŁýJţ?Τ Š( ćž+xšYĺHŁ_ĽîÁ@ú“NGY]20Če9W—|pđ‡|Y éɡ!¸Ó3Ďh$ dČXd€vóÇűU±đ‹ĂšĎ…üź®6.LĎ*CĽ7‡LŽ:‚xţőwtQEařŻ_›ĂšL7vöKy<×pÚÇ ÍĺŇ8@KmlOĄf·Šő­7SÓmőďŰÚ[j"Ň+‹]Cíe`J†S§‘šgÄ«5XÚĆ·~­gą­ËEó—, ň¸ç·ZÇşđŕđ·Ť4}VVÔőť.Wű>onĄş}:f,Ş‘µľë2ĽăŠî_]Ň"ŐK“U±MA±¶Ń®J~‰śţ”Űźh–WBÖëYÓŕ¸gňÄRܢą|·ç8e8÷µăňé÷K¦jşýÝęj—”˛}–ßHó&™šmŃÍÄ€ĄNâFÜcŚVĚ×ú=¶łń ÎűOšćööT†1ŁËöźôXÂƤ2çŚn€=+R×4ťÄ5MRĘÄĘqÚn=çŰqŁP×4Ť&(ĄÔµK+8ć˙V÷7 ˇb3ÖĽóH á}^fń…ĽłI>Źeo Á¶{…cefąÝŹâČëŠĘ¶°›FŇü?-íĹö‰|–3Çłé˙l·ňZbÉo" ˛¸Mťŕ“ŚP¦j>#·Óe2Lö¦ÁldĽiţÔR C÷Ý÷ł€p;Ólü_ˇÜxnÇ\¸Ôě¬í.ăV=Ę*«’…‰ĆŕrőÂi¶—÷ͦ´QdÍá‹č~Í ±«cŘžT°‚žFqÚ™ ^YXÜřcWÔmgd: جŤg!×jWĚ »rĄ€Ćě`í#<ЦO­é6¶qŢ\j–PÚĘĄăšK„Tu$†'cš|®ťue%íľˇk5¤@™'Žed@NX ~•ĺÚNŠ×Zż‡Ţm.EŇeׯîím怅Š%ůpXŢsÄšdöţ.“ĂV•Ó|Ta’] ŕqŔßĆżRhĐu]HŘčŇęëo>ŐVO6ĺaŤ gĚ9řţ4Űźč¶WKmu¬iđ\4žPŠ[”V/€vŕśç §ăÖ±ţ#[É?€58-áy¬acŤI''@+•ÖôQ>ťńFs§.g@Ć´mc+·ŽpŮéŢ€= {Gş{´·Őlfk0MĘÇpŚ`®ü—ˇëéE–»¤jWrÚXę¶7W1 É îÔ€r+€ń.–7…­l„/„oávŽ=Ş1ĺRGü ëLŃf±Ő¤đ4ŚđM¦2íÚŐâđY—v\Nq»Ţ€; 'Ć:uç„´˙jsÚé0Ţ `.nUUIĎËą¶äńéZ“k:U˝¤WsęVq[JĄă™çUGP2HbpF9ŻŇěďôË_jłK§ŮĹŁÉnłÉ§›‘ćLËŐ .0ßě‘޵tmIwá=ö×w-¬ß](şłŞ©…Ę°Źť\nPpyt FOř~Ch\ÓŢ[wóŽqňsósÇę[ÍwHӮᴾŐlmnf˙U ÷ŽýľU'&ĽÓPĐU<ăć‡KĹĚšĂ<%`ůŘ$‚Ľg–êϤKdly2°ŘűIĹyfżaw1ńľ›iaq ő嵌đ°—2BČ­÷\ŕ2íÎXĐÖW:őÍëhşŤŐŐÚh—VÉ,Z_آBęDěpwn·¦h˝“Ćš[ëúV“§]Yę{q,µ˝Ň±·)Éó(Ď]ÁÇ銳Żř§MđĺÖ—o4qľŁsäF^U@ź)bç'îŚőeő®"ÎçJÔlŤ»â»?v×ü2ŐmĽ§şť­•Y"RZCąs€=y ‚ĂYҵW™4íJÎńŕ;e[yÖCôm¤ăń¦ŮkşFĄw5ĄŽ«cusúŘ`¸Gtíó(9çš´cÄz>´šÚKmöÖY~ĚÖÁYŐD0üŔ|Á8ţ{ŐO [­Ö©áX!şĽiôě—·ŹFű7ŘÔDUŇY8$ă;Ž˝zőQ@Q@Q@Q@Q@Ý«Z©VîżŐŹ­T Š( Š( Š( Š( Š( Š( Š( Š( Š( ˛5í&-ZÂ[yP::A­z(‰¶ń‹48…¬–¶şĽiÄrĎ3A.?Ú`Žűŕ{äóSÂyâOúlđj˙üŹ][ŰÄ˙yA¦ýŽůć?*ĺżá<ń'ý ¶?ř5ţGŁţĎĐ«c˙W˙äzę~Çüó•cţyŹĘ€9ořOJçóýI˝›Q“VŐ§űMü +>ÝŞŠ:"/đ¨ÉîIęI5Ó HAČAS*…ŠZ( Š( Š( Š( Š( Š( Š( Š( Š(  °©_ÇůÔ•ęWńţu%QEQEQEVŚ5Ytť%‡P†ĆIn#„M$ 3ŕžDq¨%ä  cąé\3xĎÄşoŠaŽîy§Óâ´šÎ}FŔ[ÉűŮ °tŔů~^Đy4ëBÇHµÓŻu¸7ůş„Ë<ű›#pEAŹA…ÇjÚĆżá{™­.µTÔ Ö‘yw¦Ů#h&Tđ‚‡xŔ9#NjKSÄ:mdž.5MR+řu¶ÍŰŞ$hZE1‘ÉQ°»=sÇJě­ő+ ¦‰mďm¦3FeG*¶ôËČ‘Ç"­WŤřoÄRčŃ.ĄŽŰ·w»Lc{4rG…ߌ…9ätďÚ»+wń.™hš–ˇâ :âŢK)&š;D) 7)BŁ%;·qČ=¨˛¨ZîŮ%†'¸‰d™™bBŕ ‚X(î@8é^m¤xźY›Äš%Źö˝ĺĺľ«ČóO¤ýš(Ýb.ŻeRĂ#ŁnČďT<#{w¤čľ ó®ÔSÝj ŰŕMńŞG3a2 *NzňGJőęĘĂö°ř†}m¦ąžîHü¨ÄŇ–Hŕ˛ĆżĂ¸Ş“ôĆAŻř’ßĂš/‹nµ8fµÔf¶ó´Ő·P‘E;Ş®ÇűĹ×z“’AÁŕUkŹx¦=[ńŐáišĽöŃŘý‘6Í \yxwűŔŕŕŽ9ÍzŤ‘⋍Z×ĂWłčpyúŠ(1&ĐÇď Ä@$.â<poăRßĂzűŰë>ˇi2CĄ§}–ćŇm;ÓhVCĆGóŇ€=.ţĘ-KNş±¸ÝäÜÂđÉ´ŕí`AÁúuť¬v6VöçĘ‚5‰7ś(ŔĎĺ\.Żâ-kÁWn5;ĺŐá›K»ĽAä,M°bŁođ0|s’1ÔŐmÄž$†™%ÚęwÖ·JĆődŇ Ľvß!`Ńľ9\Ť¸bIÎs@“YókÚ=˝­˝ÔúµŚV÷-˛ ^á%oE$áŹŇą ]ř›VÓôK¬Y‹m@¬˛éĎTŽ'¨ŤÇĚd/SĎą}o5˝gáĺě·‰{+É|­b‚­ě Ľn:cŚdĐ­Ű\Áyn—łÇ<.2’Dᕇ±–ĽĂĂ:ćł®ŮřH˛Ľ‡LitÉ/în µŚ“‰|µDB6(ÎI8=˝kľŃN šR.­skqy:I=°ÚŽ î¶1‘Đâ€4hŻ)‡ĆzŔżŃ®ŁŐĺż‚÷RŽÖd‹KhěBHĹG•;(g#ŽrAÁ⥺ńŠbŃuĎ®ŻKŐ§¶ŽÇěVhR}wűŔŕŕŽ9Ízt˛Ç/4Ň,qFĄťÜŕ(’Ič)c‘&‰%‰ŐăpYNCĐÜWśë×zď4ż˝ž§ť–'˛KSnŻçí€3ł±äg~Üc9é]$SX|3Žň݂͎%Ś‘ś2Ă‘ÇÔPIEp6:·tßřEďµMR+č5·X'·[uŚ@ď H­H;łśäcĄcĹă=aoô{¨őyoŕĽÔٵ™"ŇŮ,BHĺG•;(f#ŽA ŕń@ŞĚ¨Ą€ d’x–ĽÄ—"Öü ăkŰťmc°´šöŇ+8íSçHä sÎqňńŘg­z.§¨MáżŢjSČ×ÓXY<Ĺ™BJ©<…;PÝTµŐtëë››k;ű[‹‹V q3+´$çŔ9SÁëčkŤkßčňč2ßëQŢG¬Ě-%Ť-Q>Í+ÄÎŻTÁ»#š“áE”¶ŢóĄş3´÷w'‘H"â@rTdäóĎLŕq@ÍPŃ´‹]H¶Ó,·ýžÝJ¦öÉĆIäţ5ç"ń¦Ż§˙ię:Ľ·"Ćď`µµŇŮía”’á”|řÎv·ŚVÔ÷~&Ô5ߥ–µť¶’ń‹XŤŞ>ö6é!ÎĚ·lO<@ĺA5íĄĽľT×PĹ'–Ňěyť‹ŤÍŘddô® L×5˙k%ލşUśş¦ UmŇWĘdŕwĺçżdÖBk÷Zµćź«\$"ńµR­Ý«Z©@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@TÔ/ÖĆ! łČvC ždoOaܞÚŽóSMöKXţÓzF|Ą8?Ľíü#ő=˘ÇN0Ě×wR ďdZLaQş˛ţ§˝?Młk;vó\Is+'tg>žŔ°%Ý•˝ôB;÷w+C!őR9ÜUŠ(/¦źÂăQ·‰ 2ŹŻ ˙řďăS[ëW<Ó Á˙–©Ť˙zýFE^¨®-mîâ1\ÁŃž«"†‘  h¬żěc6×6ľ‘–óc˙ľ_8î‘G›¬Ű¬¶¶Ľ_ď@ć'˙ľ[#˙  J+3űvÚ>/!ął=üřHQ˙_Ö®ŰÝŰÝÇć[\E2z7 ?1@QEQEQEQEQEQEQEQEQEQEQEvő+ř˙:’ŁýJţ?Τ Š( Š( Š(  ˝wA´ń¬ÜÉŽ_ë‹§=ÝާŞC 3µŐÎă(ŽMŔ’p…$q€8®»YÖl´+¶^»„.±"F…ŢGc…EQË1=«$xďF[K©nňÚâÚXá{)­Ř\“ýZŞ îÝŚgˇô4^óÁpŤëo=íţ§s§Meo%íĆó˛ś"“€8É<ś “Іüm¤ĹĄ\]O{qucl#‚+‹$v¬P+ůcó$ŕp0*Ä^6ŃÍžŁqv× §*˝Ôp2JŠßt…çpcŔŰśž:Öe§Ťľ×ăaa$w60é^ÜG}nae+$`>OđígďŰž”©cŕťÂ+H’)$ŠÚÂM9c•÷+C#+0aŽIÚ*¬4c†ň[űřÚKH!»ą.¶ń8ÚÁ:vń¸’qŢ­i~3Ó5[ŰkT†úŮŻžÍî­š$ąP2J׎ppqÎ+˘ fĎŔöş–ť¨ËŞ^]iĹľĘ÷W;ÄjČP®0¸Ďž)Úw‚4˝2{9!–ń’Ęćk›hĄ›rDeVVP1÷~f8ő5ŇQ@˝§€´‹IíJÍ|öv“yöş|—­ŕ“9WŻ’$ŔUą<#¦K˘jCyßeżą’ęl?Í˝äó Ő»EQÖ4‹MwK›NľFh%Á;«+) ¬¤r }«‘ń€…džu­&»Ôµ[č#·óŻ®o-d °ާÜ×yEsö^Ómgžk™o5%·kM×óv@ßz1ěxÉ9'$ÔZo‚4í6öÎçíZ…ذR¶PÝÜ™#¶můGs·ĺ‰ WKEs~Ňl®í¤Ž[Öµ´Ďka$ĺ­ŕçćUöÉŔ$ž«Zw„tÍ-´oçgI†hm·>~YJ–ÝÇ?tVíĚiqXéÖöwÖréŃĽV÷Vóm”FÇ,„ŕ†€pGaŠ×Ň´[I]6Ň6ű8Ü_Ěbí#1%™ňI$’}ëBŠä řu¤Ă„&÷T–ßNž9ě`’číŮ+´c:|Ůŕ1ZRřGL›DÔt†óľË¨\És>ćŢď˝°qŔÍnŃ@έŕm3Vąľ•î/í“P@·°ÚÜăąÂíÇ®08Ć@äUýKJ?đ‡Ţi*Xýí`Vnż»* źËš×˘€9 ř×M´Ň¤ľ–ňćâĘŐcŠŢćăĚŠŮŠ}é‘’NŔŔ˘‡ZL1XŔo5I-´ůăžĘŢK˘c·dmË´c:|Ůŕ1]}|+¦6‰Şi’5¦§,ňÜ)~KJIlÜž*ě\1čĂK¸’[Ř Fé·´ŞFcß âŻQ@Ţ›ŕť7M˝łąűNˇv,T­”Ww&D¶mů®Ţl+WGŇ-t=14ű=ţB<Ž7¶N]ŮŰź«żEq÷_ ô{Ë[»).µ1§\Ęóµ’]…$fÜYF3÷ľlW<⪏µ˙Šký5>Ϭ±YÜÖhÓ;Cu9ŹĚ<žk©˘€9«oiVPč‘ÚIyö39µhćÁŘěŁn>e8­4xJŠËO‚Ň{ë9´őt·ş·›lˇîe'2“‚A]=gŕí&Çű8Âło±ą’ídyK<łH¬®ň1ĺ‰ OLV…ž‘kc©jWđďóő I÷6FQnUú(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(ŻőcëU*Ý×ú±őŞ”QEQEQEQEQEQEQEQEQEQEQEZíw˛…ă—P3‘Űż8Ş­ŠŤ$˛‚ĺQqŚ+3ŽĂpÍiŃ@f“Ę’2Ť¶•_~ˇŞ˙«XYźăaűů°?p}yö-®—¶u»˝—íWŁ‘„‹Ůř~˝OsZ4Ąť˝Ś>U´A9=Ëä“É>槢Š(˘Š(˘Š(˘Š*ŤĆʧ]IćKe —ţz*íűčsúŐę(3ű"HżăÓS˝‡ý—q2˙ăŕźČŠ1­ÁŢĆě{†żöpJÓ˘€3?µ¦‡ţ>ô»Č‡÷ăQ2ţ oҤZÓn$ĄěBS˙,¤;ţůlŇŻÔs[ĂsŽxc•đȡ‡äh+µÜ"% Ä. g#·~qUZ)H#eʢăVg†áš?°lŁć×Îł=ľÍ+"ŹřÝý(ű6Żoţ¦ţĄţíÔ[X˙Ŕ“˙ ű4žT‘”m±â8ýĆěçňÇĺOšÜ+L«,Dˇ#<çŽý©żÚ—6˙ńű¦\ ď%żďÓň?ţ;Vm5++íÂÚć9~ňů—ę˝Gă@ d@A@qĆ®G®OĄY˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š»ú•üťIQÁţĄçRPEPEPEP9ă *űQµÓ.´č’{­3PŽőmÝ ‚«+(cŔ8rAÔýKC׼]«ßIyĄdZĎ Ýi¨ŇÜG#‰dxČ$!?/ĘqÎx9"˝ŠómĂŁYŃä»đíͧö~dšćď[šé7„*<üÓ×'—Q‘Šôš( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( nżŐŹ­T«w_ęÇÖŞPEPEPEPEPEPEPEPEPEPEPEPEPFňH<«kUWĽ¸$D­Ń@ęíě2>¤Ţ¤±°ŽÂ&U,ňąÝ,Ď÷¤oSţŕV‡µëMw^żž%•$H’$ŠHÎUA%‰# [Ďđôę(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(Ş·zuťö>ÓmŚżuů—čzŹÂ­Q@`ľ´ćĆýťü°ĽĚđĽ>¤·Ň•5…†E‹R¬¤c…v;˘sěý?Áö­:l‘¤Ń´r"ş0Ă+ ‚= Q\¶­ŞAŕĂněóÉa32ýźicAF=p6“ߌEu4QEQEQEQEQEvő+ř˙:’ŁýJţ?Τ Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( nżŐŹ­T«w_ęÇÖŞPEPEPEPEPEPEPEPEP/‰5;­;OŘ´+s=ݵ˛4Ń—EófHË ¤ŕ98ČéOţÇńoý 7ţ ĺ˙äšÎńŹú˝/ţÂÚţ•Ĺ]Ír˙Řţ-˙ˇF˙Á<żü“Gö?‹č`ŃżđO/˙$×QEr˙Řţ-˙ˇF˙Á<żü“Gö?‹č`ŃżđO/˙$×QEr˙Řţ-˙ˇF˙Á<żü“Gö?‹č`ŃżđO/˙$×QEq:w„|CĄ ţË®čęg•Ą‘›G”’Ié˙=@*÷ö?‹č`ŃżđO/˙$×QEr˙Řţ-˙ˇF˙Á<żü“Gö?‹č`ŃżđO/˙$×QEr˙Řţ-˙ˇF˙Á<żü“Gö?‹č`ŃżđO/˙$×QEr˙Řţ-˙ˇF˙Á<żü“T®ĺń‰©i ©i—–××mlëź$żą–@ÁŚÎ:Ć1Ţ»Zäük˙~˙°ą˙ŇKšÚ#4´ÔűéN Š( Š( Š( ąýB÷U¸ń-–ŤĄ]YÚ™m'ą’[›Vźý[¡@YóIÎOJč+›¶˙’źe˙`›Ďýk@±ü[˙CŤ˙‚yů&Źě˙ĐÁŁŕž_ţI®˘Šĺ˙±ü[˙CŤ˙‚yů&Źě˙ĐÁŁŕž_ţI®˘Šĺ˙±ü[˙CŤ˙‚yů&Źě˙ĐÁŁŕž_ţI®˘ŠâµO ř‹Y°’Ę÷]ŃŢţîŹ e=?iŕŐßě˙ĐÁŁŕž_ţI®˘Šĺ˙±ü[˙CŤ˙‚yů&Źě˙ĐÁŁŕž_ţI®˘Šĺ˙±ü[˙CŤ˙‚yů&Źě˙ĐÁŁŕž_ţI®˘Šĺ˙±ü[˙CŤ˙‚yů&Źě˙ĐÁŁŕž_ţI®˘Šä4ëÍVßÄ—ş>«ugtb´·ąŽ[kVýcĚĄHi8ňÎGZŢ®zoů)·˙ö ł˙Ń·UĐĐEPŘ?ÔŻăüęJŽő+ř˙:’€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€!ş˙V>µR­Ý«Z©@Q@Q@Q@Q@Q@Q@Q@Q@żŚ~ć—˙am?˙Jâ®ć¸oýÍ/ţÂÚţ•Ĺ]Íx–—­řjkŤdx‹ĆúĹžˇ«wA­:ŚJB@qŚt®«Ăľ)żŇĽ ú¤Wú…ĹÎŁ%¦’’ K›ČË)źv6ť ’Ç ýuaş”FČ›WhŘż9'<ôô)řŤţ‚ÚŘЮφ–]‡S¦vîŰćůYÝĺçż\sб{ă‰ÓVÔěô˝çSM/h»h'A&YCţî2w?ĘG¦O5„lă ­ié*kÍJďMŐt×ŇŻmmc»hĺ™d6, ĽaJ~˘ąŘ|¬łŘßÝ o·Mâ5Őď"ŽL¤Z0ŠHrp2I¨ţ&éíŞčvúmâE©ßĽšTń«|íi*î•°9ůBg=9÷ ŰĂÚŔńg«%´–ńݧ™r}턝¤ýFăZuEmoĽ((" čŞüŞJ+“ń·ü}ř_ţÂç˙I.k¬®OĆßń÷áű źý$ą  ”űéN¦§ÜJuQEQEQEÍŰÉP˛˙°MçţŤµ®’ą»oů*_ö Ľ˙Ѷ´§ăë™ěţë÷6łÉńXĘńË•d`§Č5ÉéŢ#—⡅¤ę˛Úiö6ńMy Ą..ĎM·P›”†u<ą=źŚ4ËťkÁşĆ™fÜÝZI A›s)'µbk^Ľf‹¨h ¶˝ŁD‰ ýŘć‹IăřOQčFF(í犮bŐ®ô­CąŐdÓ‘>Öâác–]Ę€ąËľŢ“Í?Kńµ†­¨i0[Ĺ(Uł’ćÖwă-ńŮ×9?Cé\¶§ŕ‹ŹřI5=]Ľ+§kIŞĺÜÜä´” VRpC!Ŕ9Žx¦x«K›ĂżtUµ“O·ń›sŮCj»IerŹjNpD­ůdă°Ý×ÄUO!ltk«ç»Ô.,,Ö9|ö°'€› źî“PKă8´câMNúËRY¬c±k›&ą¤FQŤ±Ŕ#?6>ńj/ͧ\ř& Ťk˘~ĐěŘ-şMŘîK¶OÖ¨x‹Áú¶Ą'‹ÍşDF¨Úy¶Ý&3ä°/źNśzĐĹźŚĄ×ön·ŁO¤4–˛]Á$ł$ŠńÇŤá¶µ€`qČ÷Şâ,ëo¦_Má›č´˝Rć{;Ł,dź5€Vt(9}8Č«ľ'đôş·¬/%!tŘtŰëk§çQ* řV®çPÖ.<;ŕÝ'~‘qd5;ἵş.÷‘ĆęC¶‚UÜŮ'#żĎEPEPEP7ü”Ű˙űŮ˙čŰŞčkž›ţJm˙ý‚l˙ômŐt4QEvő+ř˙:’ŁýJţ?Τ Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( nżŐŹ­T«w_ęÇÖŞPEPEPEPEPEPEPEPEP/㹥˙Ř[O˙Ҹ«ą®Ć?sK˙°¶ź˙ĄqWs@´íZÇVFĆ4ZÜ˝¬ß#.ŮS†^@Î3ÔqWkÇcĽÔm4]Y4Ë粸şńËÚ™Ud•Tđx={ÖͶ‘ŞOă]SÇĹzĐÓˇ˛†ńLžx‘Ů×fÜíů3·Ôúq@‹qo ÝĽ–÷0Ç42)WŽE ¬PA੭©ŘčđŰąDÍ«®x™<o>·sj·ÚuÜ·ĎlUsFŁ)ç$ŽĹ±Šf©yzł¶…wy-ňi^)ŇÖ ™°dhäd#J’Fz‘ŚĐ®QEQEU_ěË©ťKěV˙o1ůFçĘ_7g]»±ś{UŞ(˘Š(®OĆßń÷áű źý$ஞą?Çß…˙ě.ô’ć€6SîĄ:šźp})ÔQEQEQEW7m˙%BËţÁ7źú6ÖşJćíżä¨YŘ&ó˙FÚĐ]{yťaq{u'—ooM+໨ ’p9<Ň–Ňęë(/-ź|ƲĆř#r°Č8<Ž dřÓţDO˙Ř2ç˙E5qzxÔuMWÂÚ:Ĺí…ře.¦[F îĘcQ† íűÝGDňD¬ńg®Ö##>ŐäÂďÄţ ąńž%Ôe»ŇőCid ŠX’čDDŞçb Éúcߢ‚ĎTńŠüY ř“Q˛·°¸Š;8­]TDÍ1fČ;†OÝ<}ď^;VŐl—YM Íţśöír±m<Ƭ¶qެ3žjĺy‡ő›źxĂĂš­ŕQsqቚRŁ\D čqźĆ˝>€ Ě´đ߽ýžŹ§ŰŢIť÷Ű"HŮë–<ÖťQEQEQEĆÍ˙%6˙ţÁ6ú6ęşç¦˙’›˙`›?ýu] QE]ýJţ?Τ¨ŕ˙RżŹó©(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(ŻőcëU*Ý×ú±őŞ”QEQEQEQEQEQEQEQEËřÇîiöÓ˙ô®*îkŠńuĄÝÍŚgrOowor©+”Wň¦I –‘ÎZ¨|uâPq˙­Źţ ź˙‘č  ŇMɶ¶Ş5sűĎůx ?LĹhĹŁÚC®Ükíw%Ľ„·Ë± ă×,kŤ˙„ëÄżô*Ř˙ŕŮ˙ůŹřNĽK˙B­Źţ ź˙‘č˝ř{ö߆-ěEÜ^•kt‚â­’ÂîP®99ĂŽ„c­tř+G‡O†Ě$ě#ľŹQid´’Ü#ěyn@ü+ź˙„ëÄżô*Ř˙ŕŮ˙ůŹřNĽK˙B­Źţ ź˙‘čµ´Óbł˝˝şInîÝ]ÖIK*»p€ýŃÇAŢ®WźÂuâ_úlđl˙üŹGü'^%˙ˇVÇ˙Ď˙Čôč4Wţ9ń° á{?Ú®9Ç#ýGcĹ3ţŻ˙Đ«c˙g˙äzô+Ďżá:ń/ý ¶?ř6ţGŁţŻ˙Đ«c˙g˙äzô+Ďżá:ń/ý ¶?ř6ţGŁţŻ˙Đ«c˙g˙äzôäüm˙~˙°ą˙ŇKšĘ˙„ëÄżô*Ř˙ŕŮ˙ůˇ—Q×|M©ikŃěě`±»7,ńß<Ěßą–0ˇLJ:ČsÚ€;DűéN¤NRĐEPEPEP\Ý·ü• /űŢčŰZé+ŐßTŇĽSk­iö6÷˘;Iíž)®Zľń0`B>q匴Űę0ęzmÖźrÁu Ă SµSô5FĎĂz}†ˇg}Č&ł°ţΗČĺNîr‹Írżđťx—ţ…[ü?˙#Ń˙ ׉čU±˙Áł˙ň=toŕý%ô ­’_±ÜܵÔĚů·™|Óé¸W?€Ť˙ŠüQ¨KküńÍĄéŚ\B!Eep§¦ŕñäóL˙„ëÄżô*Ř˙ŕŮ˙ůŹřNĽK˙B­Źţ ź˙‘č«Ăze®Łm{m †Kk§Â±¶!,§}Qy«zfꕦÁc łË+µ^y Ž~¬y&¸źřNĽK˙B­Źţ ź˙‘č˙„ëÄżô*Ř˙ŕŮ˙ů€=ŠŕcńĎšDWđ˝Š! 3 Uή<Šgü'^%˙ˇVÇ˙Ď˙Čôč4WźÂuâ_úlđl˙üŹGü'^%˙ˇVÇ˙Ď˙Čôč4WźÂuâ_úlđl˙üŹGü'^%˙ˇVÇ˙Ď˙Čôč4WźÂuâ_úlđl˙üŹGü'^%˙ˇVÇ˙Ď˙Čô~oů)·˙ö ł˙Ń·UĐ×'Ł>©©řžďYÔlměĶ[$PÜ´ßęŢV,IDĆ|Đ1ŇşĘ(˘Š»ú•üťIQÁţĄçRPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP7_ęÇÖŞU»ŻőcëU(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(¬Šă 3Q}’î žŠě˙pQöH¸*z(˛CýÁGŮ!ţŕ©č >É÷d‡ű‚§˘€*Ăoˇ& ż3 cĐ‘Oű$?Ü$EĘ ÁÜŔ}2qúbź@}’î >É÷OEAöH¸(ű$?Ü=Ů!ţ৤ˇĘ¨%QEQEQEQES“ď(4ú(˛CýÁGŮ!ţŕ©č >É÷d‡ű‚§˘€ ű$?Ü}’î žŠ«=ĽQŰČâ ĹT1׊Ů!ţŕ©&.°Hc)*=ńĹ>€ ű$?Ü}’î žŠě˙pQöH¸*z(˛CýÁGŮ!ţŕ©č $HźtbźEQEvő+ř˙:’ŁýJţ?Τ Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( É‘ps×Ěž­SQ@ý™=ZŹł'«TÔP?fOVŁěÉęŐ5ٓըű2zµMECödőj>Ěž­SQ@ý™=ZŹł'«TÔP?fOVŁěÉęŐ5ٓըű2zµMECödőj>Ěž­SQ@ý™=ZŹł'«TÔP?fOVŁěÉęŐ5VâÔ›iD,ÂR‡aĎ|qR}™=Z–ĺ –“F!d`öČëRĐ?fOVŁěÉęŐ5ٓըű2zµMECödőj>Ěž­SQ@ý™=ZŹł'«TÔP?fOVŁěÉęŐ5ÔPŠtę( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( m•&ۇç>ĺ‰#đ95CmĺůMĺgoůĎ®ăź×55QEQEQEQEQEQEQEQEQEQEQECt¨ös¤Ť±6 ŢššˇşňţÇ?ťź+ËmřëŚsSPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPdFą9ëŽ*?´§ŁQuţ¬}jĄ[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©EMow Ś”Ť”o~=÷źÄóRýĄ=¨BĹĐ’»~vú1©(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€&ໄZĚ^6tw/¨ÇJ—í)čŐBv)o+…ÜU ŰëÇJ’€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ú0u :uGú•üťI@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Ý«Z¤ä¬lèŐŰŻőcëTe˙T˙îšóż ÝřďÄž˛×!×tÚć2ëo&śŘ0XIÓŽ¸­ďxŇ×ÄšF—%ĆËmJö$ű0É ĺąŤĘž„dgÎ r| aŞü?Ňn//ő &…ĽËdÔeŽ"70ĆŔ@Ú·üa¦[xsEѵť2ŘC‡gWň˘ňęß$Ę=~S»ţ@Ľ]ăËÚN°H“^ÁqÄQČŹĚ#’@ë°“ŚúzŐgřceâË»[űÄŹN66÷ @í,…÷“ň€XŚŰŽőÎę6˛Űü(}jőnuJ Zď‚LhÓˇP{ác =°kˇĐ¦µÔ~(jÚ•«$ĐˤZ43¨á‘™Ď߀:#âť h ®ťNßű1‡Ëq»‚sŚ×9ăÎ{Stßhz´7rŮß«ý‘wÜ#ĆńĽkŚäŁŘŔëŠó;uű ˝–§s ¶“¦ř®úK ¨XD¤ş¤„áV ç¶k[Sż´ń'‹.u- EşłłĐ®ˇ»Ľ„f7gÁHĂtb0[¦h«µń÷…ďo­ěíµxžk’±Â;ˇČŰ»ýśç%ŠôĚ.-6@e‰V2ÂEh‚ă ÁÝĹtľ4Đfń—eg pȱę6×$ßu˘I8Ç~ă˝kŮi:f™¸Řiö–›‡Ě`…cČ÷Ŕ”ž2Ó¦Ňtkűh®n?µŰm¬Şů„ěg`Ŕ°hVž qÖ>>Ö.,ü'}qey›ë˸g¶‚(Ë\…I Ž rJýŇO›^ Ó#źĆúŐݵÔWZ.™4‰¦´MąKŤ˛Î|§Ž›Č©|?á]nĘO Ey˛E˘Ţ]łKűĽčäI°9|íšÚOißŮSÝÍgÔ‚Á´÷‰MÁ¸` ˘€ĹNAŘÇ94Ů<§YŘjsęv7ú|újG-Ťńˇ“ËvÚ®»X«.r8=ŹzËÔ\4-!ßqn84«'ŚtŘ´ ýeŁąű5ŤÔ–’¨A¸şKĺqŤŢý+'GŐu]Wâ^·k1Ômôý0GP€Bĺ“%śäą' ®0ŕńY:Ź…üRú.·áë[K´˝Ô¤ľŽőî!a)O/oŢÎFsŚsí]v“¤]Yx«ÄZŚ»<‹ö·0á˛~H¶¶Gnhşź‹ ÓőY4č4ÝCQ¸‚žäYF¬-ăbv–ÜĂ$í8UË:V†üo+řKĂĆćŢ÷WÖµ Cpa´HĂ•S†‘·E\9#$ńZZn»Ąx«RŐtkKKčµHaI{‚XĂ(oşw!dAőËGđęţÚĎĂ÷:u®«=–śln­ ŰAÎýęčŕsÉ ŘűPʉ|cu¨XčqčIŞAöýHÚ]4Â'„˘ąxq)ÂÉ•ë‚0<Śč[xÎ×M·XemWU¸źTş±VŢ?0É6P@Ú6ăŽN9Şşwo­-t¶–Ťm¬ľˇs´ŽĘŃH€nn]ľdÉŕx§éŢÔíu˝6îA•o®j7ď‡çĘťe Ž:üă#µiÇăÍ8éW7sŮßŰÜŰ]­‹éď›;U+mÁcśŚÇz}µ–Ł.Ąe§Üéé’ŮÜ"Ydm¨Sk`Íňä7®+ _đö­6ł>ËiLšĹľĄm˛˛,Ë #2ڦ~|žqIiŕËä´Öf·đţ‹d÷6ńÁ ĄĚŇ] ”6çąŕĐ>ÔŘhúëj·ÓiZ†›sVhď>elŕ«#2ž‡#9Ĺsš÷ŠőKOëzU˝¬É®„ב\…Ś…—aÎ[;~P műŔäcĄđg‡5 UĽźě§KҤ-0_˝Ň‰A%¤¸A‚Ń×ăŠ_č˝ß‰o‚[]CC}9ÝćŘĐČ Ś§nŕKÔc“@đ׍×PŹD´ľÓőyőPđ]ÜD‹Ó¬aźn+ĆXnUČTöľ=°şšŐņŁ™w8·¶ÔäŤD98\|ŰŔcŔb zô¨ĺđÍäßđ†Ły{4¤u»Ăójń|ľż3 ĺôo‡7ZxÓ4ét-1ÖĘáMQîĺo64l‚°ń¶CÔí'ž”čúĆ­‹aö©`¸¸-"Ĺ6Ńď’Gc€ p?@Ír#ńĚÉá_µ˝­ö‘«éĹ+Gt‘łwŔu*άuę+sĆšn«ŞhđAĄHr·QÉsܵ»\@3ş1"ňąŕçŰë‹“Ŕ:Ěx—ěš}Ť€Ôě­â·]4Ĺ9‰‘d’sϧlĐaŽ,VKő+ CJh¬äľ_¶ÄŁÍ9v]¬ÜŚŚ©Ă Ž*%ń«ÜX]K«[Ę,šňŰíĆVtte€yk8Şłčţ"×uxőűM:Ĺěln-íŁ2›”šiB‚Î6ŻîŔ_»Ôäç¬Í ÁşĄ® íź…döSCsko~óĂq+€–2Ś/Ěr0yĆ(ö›ń †‰áď¶húĄÎ­©éâčCk ldÚ{ź ýĂ$qÇ´/Ľym`÷ˤj§N¶źěóę$Ćű‚ž ‡eŕ°R>µźáëVwŢźR¶·€iZ<šl‚9üÍíB¸ŕpDlqŰĄsúçµR­Ý«Zˇq/ŮíĄŤŢZÇ®hJÂń7‡§ń$ŮR[]9‰¶ńĤܦAŰĽň‚:‚kĂâ4×:ý߇®-tÉ"Cjâty.ef #XĆË‚zž+RÇĹĂZ´ŇµÍé“_+µŁĄĘÎ’2ŤĚ„€6°÷€:ePŞ@ ©k·řŤ{qˇľľ<1p4XY…ĹŔąRꊳ¤xů”c“x<Vµď‹n$ÖKđţ“ý­q =Ă›•‚(•ňPn ĺ@ÇNôÔRa@vĆIńÜčö·éŇťF{󦛥XĽ«…˛»ňdśä`sPëţ"¸ořőÍ úŬ‘7¤8Y•ÁŠ`ăĆGă@Ő®żŞĂńCRµą‹f‘m¦$ĚĆŕm‰räË·“·n;špřuťąwáŮíü;;.ŰÖ¸S"FÄ‘â!NAŕ’Î(¸¤ŘW%/Ś5|Wˇi^’ő¬spn–$E‘ädžO˛źaN_@ţ ·ńvR4óʶÉb\óĚž_—śu žÝh¬˘ŽÜŃ@Q@Q@Q@Q@Q@Q@F…aeó9,Ä0ç$ţ•AÉG”KxV@~@c\·zsřV„ «ۆö9÷Ür?:Ąq#ůç`®í§Č8  ń–1!`C`g>´ęŠ C»†ä95-QEQEQEQEQEQEQEQEQEQEG:—·•m,„nôă­%ÂČŃ-ŮX~P}ą· ­m*»mB„3z u§HÁcfb@’GjĎ˙{KŻ1‰ýâQŽ;ŕdV•fFŇ4Á$3˘’>h=FFF+IFŐ$ŕc'© ˘Š(˘Š(˘Š(˘Š(ěęWńţu%Gú•üťI@Q@Q@Q@uBŰJÓn/ď$Ůoo’FĆN î}zež ×W Ř]۲ÁĄ¦Ał/ź0',¸ävČäÖÄł4Űţ=¤Ö¬VăЧś¤í¸-r~:’_·xŐRi##MÓv˛1I¸“‘čhÓ‚YhúsáŠÖŇibŚ*¨’^”ë Řu-:ÚúŘ–‚ć$š2F VŽ>†ĽŰVđţ›Ąx—PÓ,mŚvwľşšâ ěË,‘ÉW9'-ózśÖĽP\Gŕý:!¤Iٶ†&Ho®Z;inňľfv‚E;OL± p¬hĽG ÇŠ.4(,Ż%’Ő®n•TC eÜŞImÄ‘čQ^gc`šĎü"ş]íň_é’j·ń¨¶–S…br"ÁYŃXžA š}柢xjëÇ—ŘÉq+dű,ţK…•ćěRÄnnĂ4ěTW‰E-ŢŹâ=nßD‹Lµş>ąťmt‹—ťĘÉĺł2ÍŽ3Ď|ŠŐxLĐŁ»đ•䯯ϣ\IŮ&2›‚"ÝćL9ËĆ ÁÜqí@±ExŽ×YđĺĹ…ţŹ \Äí7Řngąžö?(–2®Â7Ănn„cľ)ľ{]/^đó°ÔŤů’8µ=>îE¸ź(ĚMÔÉéÉĎĘŘŕt d˘¸:‹xbm3ĹjŁ´ÚÜ"çćI#%2?ëŞF?ŕUĚxR×P·ńťŕ›Ů¦™´«–Ő®%b~t1!Qźúď,‡ô΀=*×Ä·éöŇEskw “EmqŮP¨mÝ@#rńžő.‡¬[xD´ŐlÖE·şMč%0Ç éë^QáK]:MKáÍÝňÄg“Kş $­‚Ň,‘”ž¤nlsMđöŤe¦x+ŔúÝŞ:jsjVŃIsć6ćŽIZ3ĎÜÁĆŢśP´Ń\‡ÄhŁź@µ†MFŢĚI}QvŻö{ÉJSîŁc©ă ř®o`ŹK}&HNÓb×­áŐM•óKf"xÉÄo€cBâ0ËĆ7{Đ´Q^/âaia‹4˙ Ďĺé)¦ŰI*ÚËíîŚřů0p¬Sč nŹč_đ±ĺŇ Ł6M!ne´39ŽI„ĄDŚ3ó> ?^ĽĐĄŃ^e-ć©má -Eěn,I•˘MZĺă†i–]Ľ÷T»kŃ~´‡ĂRˇż‚ňď&Kw¤tHĂpŠîp§p 20 ĹoiŶµ Ô¶Ë"­µÜÖŹć2ńąF#ń‘ĹhW‹\čÖQx?Ĺ^!Dq«Zë÷Oor$`Đâď^pç#ľNiuŐ{íĹŇęÓéKe&-Ąż˝–)m`ň”¤ŞŐ·Ż%˛(Ö!Ö-§×îôdY>ÓkWÄ Ąd.9ĎČsÇĄhWŠř‚k¦ř–k»¶kŹřG4¦šć%d%ĽŮ78SÔŕ€GĄijfŰHÔĄ˙„ T’I4ÉîRÖS*– ľD‡“™ ÁęÜő X˘Ľ{±%ľ·á{‹ í)nŐŚßc»žyŻŁňÉo4ĆCa·1#çŇü(Ѭ­|aŞŞ;jţţâI™ŔvÚ98 ŹJčăń3ř¦} +Éd¶E{›•UÂYK*’X1$Ŕőł^5â].ĘŇăâEő˝˛ÇqÚIć¦w(`Ż!üpI­Źk6z‡ŠîÓOżŠŕGá[÷cˇÔŃmägçšôÚ+ĘôÝ"ßEÔ<{§#­îˇ ‰w+JÄݢ4ĚÉççPG§lT~Ă7‡ŻŻîćo^OĺÝĺ&âi0–)W“叛 €ŚPˇŮx‚×R†Ć{®n-Ż%–%ť"ů#ň÷_8* B’=kVĽ_‘ŰYŮx-4±¸:¦˘&Ť¬‚+€ˇĆxŕ/^آČé‘řKĂş˝ŤŮ\^۬Íçq,Í ČąÎĐľfAPŁĄ{EăúMŻü",ń ťZĎ\şk[ź5·A¶ä`'8®G|śŇřÖkFëĹ·ŢÇͰ_'í:ťű,Ń82‹hU~\’0w ĚOZôŰż[Űj7ö iysuemÓÇobë#2€Ł<śˇĎN*í¶©iw©^éđČZâËËó×iKŤĘ3ߎxőŻ Ö®¦źJ×nžVi¤đ¶—#IžK$$çń­iü9e©x«Çöv^Uľ±$VóZ:¶$°HwÇ«“@™¨^gé׆gFd1Ävdŕ§ŞKkom!ş¶‘e‚dY#‘O ¤dő¸ŻęŤă bëĹ ŹĽV±XÁČ !Iř=ò§ýł5{á©?đ‚Y/üłI®Rúä'Gřm ŹlPYEPEPEPEPEPEP7_ęÇÖło#i¬n"A—xŮTz’+JëýXúŐ'uŽ6w`Ş ’Oa@}uŕ˝B˙á.“ˇI #R±H%ű<ŻÝă Ů—±´ÍĂ[|Mcy‚-tH-UÚiî&Yeg+€"ŘäÉË7QÚ»x5˝2é¬Ö ŘdkŘšk`­ţµ2ËęGçSŘßÚęvQ^ŘΗŇŤŃËʰö 'đÓř“Uř`Ú–­đą·ŹQk„GĘęĹ×;÷ ¶ÓCĺ!ş(;Ë_şT`žµé”P5ˇiv/ń=ôѵ˝{SlűŢ­Ćr0}k˛Ń%‹7vĘÚ=śżŰf0s˛ęTňÂźND’­zefčŢŇĽ=°éVQŰ$ŻćI´’]˝I$“ý(JŠ( Š( Š( Š( Š( Š( Š( ŕŮĺź/8ŢŮĎ®ăź×5Rę,î"Ś®wß'ŚtÇĄ[•Ł%hŢĂűŽOçT/ >{ŁÁk˝ş;Éő^Ă»ćžĂ(FČčzeĂ*ŰJλ!,ľŁ)ě2Ś0C@3 ÄEâL´3688ŕŠĐ¬»*K¤ň˘µŤäß$Śtč+R€ (˘€ (˘€ (˘€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQ\GÄźÖß4ŰIŢĹďn®Ý–D› Ķ1‘Ű˝hx ĆvŢ;đĘköďlÂF†hY·lp 20AÎZé袊ˇ­i6úćŹu¦Ü–N›wˇĂ#•aî{OŽÂ'·ö;{›‡Ťy|€Ą{ŕçŚä“ŚŐ/k±řgÂş–±&ÜŰBZ5c€Ň ~5Ć|7ÔmôÝjďĂ ­ÁŞů¶‘jĎĘÍűŇ\.A8ůŔp=úP¤"3‰ĚHf PHTn H$gÓ qíUźFŇäłűé¶mk»wĐ)MŢ»qŚÖFł®jkâ}D·´kǵkɦĽfĹ` Ľł'¸Ŕ¨ÂIâ;›ű= &ĘÓX6Żuvng/ H$1©]ś¶ň Ć^x µm-—ČŰoň8‡?wĆ>_N8↳¶?}Ľ-çŚM”ĚÇÍëÇ׹­•Ç+$wŚH˛FĺP„’9?.;ĐamĄéöB1icmŚ0O*]ąĆqĆp3ôĄ¶Óll¦–k[+x%ćWŠ%RçÔ9ük’Ő´ńnˇŻř›ÂZť“Ă•w§]Í<+4oÉ÷xbĽ…>íÓ5ťńj;ą´ë™.4Cg¨\¤ e ćëضÔw^‡’7(ű ő8 Lžnb1\E±’ I0$ޏ+p×(Ä̡@ŁqQĐךă~)¸_ Aą&’6Ô­HáÎůĚ ¨Á$qř×5d–ř×Jłđ¶Ź«iöł-Ć o˘V´! ŚäÉ–Ŕ‚:Šő/ěë'Říö[¶čWĘ\D}WŽŇś,­Vˇ[hDQ0hĐF6ˇ‚b+Í/>.Cn÷·«q˘ ;—¬äĽĹěČŹµ¤Tč:ň@ę3[“řźÄSę> M+NÓä´ŃťFéĺp÷9…%*¸S†ęr9u4ŮOo ÔÄ1Í Ś4r(eaîZŠ->ĘŢČŮĂgo©% ÔmÉŰř»Tńוá{[#vV÷rÍ~î3)tŤBŽ»FK™Íşř—3Účňņ–—ŃJd¸Ődq sG'–ІQŚä1É#Đć€;Č´˝> #g Ť´v¤äŔ¨C˙cÉ‹Ďóü¤ó¶ěó6ŤŰsśg®3Ú¸G]˝žâÖHěí Ő&đýŐĘĘ·M"FU˘ČRż+ś†Ćx2ižńgŃü9ŁZÇe6©6‘ô÷WRHQaÂŞŹ™¤byä‚y ćm+N¸ł[9ě-eµS•áVAôR1V"Š8"X˘ŤcŤ`(ô¸&ńÖ±p,,,ô»1¬Í¨Ď¦ÜG,ÍäÇ$Q™ †%J᱌óŹzµ/Ťotí;ÄęVpkifŠ”ą3*¶“Î …“ţ@y˛µ0I¶„Ĺ#xĚck19$Žäži—:e…ěńOucm<Ńs’ĬÉô$qXľ5ĽűŻî®í’äÇ!2+ĘH`2kYń¦˝gqây,´Ű±đńV™¦™ĂΦ‘‚€0ňxéÇZéüGáčgŮ- ·ó|žLa7·©ÇS\sxĂ[°ş¸ŹSÓěTI¤O©Ú%f+ĺmĚrd źťyčx©t˙ëi66µae –¶DpýžFi-ä1™_# ­ČĆő ŞßL°ł¸–âÚĆÚ ĺ˙Y$Q*łýH5<0Ĺo ĂIJ0¨Š äşŽż±Ľ'áŤ(^i–÷w6/s%Ţ­pc‰P9PvbsĆF“[Vź/5kM%4›K ‹ŰÍB}>F WŽ2ćDp2ČTéžqď@÷Ů +ů1î)Ú2ŕ úńëPA¤i–±ůvúu¤(U—lpŞŚ67 Đŕg׸IÎ?.ÚÚ%Š%ôUfŠ(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š†ëýXúÖV§˙ «Ďúŕ˙ú ­[ŻőcëY÷0ý˘Öh7mó“8Î21@oŕíFxađ‚y~EĆŹ3ÉÁlŞÇŚ7QÔôëY›ÄúÂ;ĂŞÂÖÖpyßنŮvIcśÉ÷‘“‘ÇAŽőÜé^ ]2oH/̟ضRÚĺcÎŢnëňăoNzÖE—ĂŤFßöţźÄÍ>‚óaűY¤]ŰŠ 7p„űŽ3@Ũx‡]ńÖĄa§ęńŘév1Ú\mVGH¤”ÉčS“Éc˝sú˙Śő=>ßT»OÚBŃädŇěl~ÓU<$’Ü“•çŇ˝OĐWOń&Ż«¬űż´c·O'f~R°ëžs»ĐcË/ĂťE<3sáxüH#Ń$T˛xKmi a†O?($q‘@ĽÔüI¬řŇ=#JŐbŇí‘ űČmVfŇ:í±ÁĎűĽu¦Iă KMĐą§^}–ÓäÍçöfÚ8ţ.Ü5ŃiŢűVkĎ1Ć—žcňö±‹oÎO]Ý?Zć$˛˛ń?Ũ/lŚĎo¤Ä´ Ť–7ąŤ™a^@ÜËľFČČŕP Z$ńYAĚÂk…ŤD˛…{ÉŔé“SQEQEQEQEQEQEQEQEQEG BJíůŘcčÄf¨\•Iy±ŹźqŢ§Ś®}kB"ĺ `î`>™8ý1Tî'mň ź 8 [łcŹQĹ]ډ@P=éŐłŁŰˇŹvĐő©h˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(9ŘĄĽ®qT'oŻ)ePŃ:“€T‚Gj&.°Hc)*=ńĹ+ť¨Ç `g'µgBUîcHűH$€G‚´ë7íxšYťÔ6@[f\śő¤Fh˘Š(˘Š(˘Š(˘Š(ěęWńţu%Gú•üťI@Q@˙‹|˘ř×NŽËY·i'ß‘ľ×Śô8>ţť?*µáĎ é~ѢҴ‹&Ö2[‹31ęĚORkZŠ(˘ŠÇ×tץÓ󕵲»[·fDě ě瀆čsQjž·ľŐt}JŮÖÎëM¸2Ž |ČŮJĽg§ĎbhęZ­†Źkö­JňH7mó&`«žN2~†łfń·†měí.ĺÖěŇ ´2@ćOľ ŕ¶:ŕ§  ë>ąĽÖmőť+R~Ł lÎđ Ł–"CmeĘś†ÔőÍgŻ‚Ż-&´żÓüA:jńE$7Wp‰ÖĺüÂ7.Ü1;v‘Ç5ą¨řŹFŇm­î/µ;hbąćióF3•ÇQŽr;S­|A¤^ý›ěşŤ´ÂęI"€Ćᄌ€— G\*ĆÓü …Ţ—voĺšćŇňâúâWA›™fŤ‘‰…#vP=é/|ÜÚ•ĘjCws¨Ĺ©[̱öycŤc§ ă†#޵/őŘb‘"´ş˛iRţ+K…šB» ŕíç ‚ëšHü[áéoî,SXł7Vé$“Gć€cTb®[Ós@MŕŰËÉcąŐuŮnîÖúÚďä‡Ë…D$‰ă·$ś¶IÚÚÚĺ G3T´jŘ“ nIǦ M§řźCŐ,®o,µ[Im­ăâA !ŚĺłĐcśšČoO§^ Ź jĂJÝi¤±Il.’ V6°ĂHÎH#SSÁW:vźcg˘ër[Ĺ Ń^@.b¸ÜűŮŮ7.±c{㯥ř«AÖ®Eľ›ŞŰ\ϰÉĺFůmŁ8동y÷©őmJĐ’&Ő/ŕµóI¬Ťó9vާńŇ€04ż‡özZXÇä­¶ťsbT üůFaŽ(űTV^ÔtŘ4ąlü@RÓí>Ŕ.Č4rŰ mGŹpĺvŚ0aßÖ·dńg‡áÓ­µ5‹1guź"o4mŚäÜđxëž:ŇŹč'J§ö˝ ±2y^y”Wţé'ˇö4•§ř+˝.đęĎuk{qq+ ćYŁhŘŕp ˛ďYú–’ş˙Ĺ-6ĺ-o#J…šňY!) îhXŹś«3ľG<Öž­ăm>/ j:ÎŤqm¨5™ExĂýŇĚŁ :ŽEh.˝oo.˛ú…儚|¨¦O;0Q[÷™ŔS–ŕ äÜĐž$ŃÄ^»Ň^sÜ@»Šátü+:óÁńÝÚxŞzĘ<@0ÇËĎ“ű•‹Ž~o»žÝjô+Đ.4»ŤJ=^ĚŮ۶٥2…·`ŮčNFë‘YšŻÄ ËÂÚžągyňŘ/Ď Iµ·źş§#+žŮßxi§´¸Ô!’I'¶Ń.ěc·Hňe2*F9Îcç5[Ă~ÔZ×Ă×:Ö«-Ôzl %µŁ[š9L[3+g.Ę€ázääÖݧ­nŻnYol?łâłŽčJ%!ŐXľYÁ …ŕçłzSěQ+Ŕçý_·ZKż j)©Ď˘ëď¦A©¸’ę/ł JÉ´!x›pŘÄ*ő 23Š~Źă]?R:őÓÝŮǥ铤ky捎­9$žĘăŰÖµt˙hÚĄ”÷–Z•´¶öůóä"ŔĎÍź»Ç<ö 4˙&źŻĎŞ ą¦2ŘÁg¶_™±s¸·V'?OzÇ´đŘEáŵ՝GIˇf0çĹ)Rë×ä?(Ă ăŇ®Íă=&ëFż»Ńµ]6â[TVo:b¨ śÄBžÄ »yâ˝OÔĆ›w«ÚCxJŻ”ňT·ÝĐžŔő fĎáÍŐ¶ź¤é§_ĂJ»†ęÚ(¬V6Ćű€•·|çy99«§ŔÎo.[űYľÉ.±®qäF Ë»<†Ú;qď[^·¶źY}Bň M>DS'ťhŐż{ś9n' Žç¶Ţ*Đo4ëŤBßUµ{[bňÇ”OMèÎ{ĐÇ€§–Ö÷I‡\’ŢÎóMd-ÁmŇF’çĺF$ń´‘“Z±/ü$nJ®¶ 'ú Xxçž=ŞÖ›âťX–â-;V´ş’ÝwʱH Uţ÷şűŽ*íľŁgw¦¦Łoq–ošł)Ę”ĆsźLPivCLŇlě™´ r1»jś~n¸oŠ:mݦŤ|Ţ ;ŰŰ‹k—–_ő4‘”ç–ŘĽµŽk}ĽQcy§Zßi†źq·ŃZłÉ)` Ś ůśđ§$PýŽi˘¶…ćžTŠ(Ôł»°UP:’OAQĄí´—˛Ů$ńµĚH˛Io™U˛‘čvźČĐ™G§ľ·¨^\Ľ~%ŐŇ=âŮĹý¬vÄĚ Q™Ž3žUHëÍ\đsj_đ“ZÇÔŻ´Řěť}cKű=Ĺ™Ęm‰e*ľfpr#ĺ5éP+wa,ż´»Ók#ŰäܨźË%C,Xş+»Ž¸Ís%äđýşé“Ç<^%F `Čźlď# łôŰŽŐčqë:lÓEwĐ<’Í$¨pKIw¨÷]§#¶ Og{m¨YĹwgŰr˘pŢkLPů„¨ßň0%ŹJӾеk_k^*ŠŇňîRSN0ĺn"6ŃŁËÇÍ*ö#ű¬&˝fŠňÍGIĽ˝Őő?łn¤·›ĹuÂî·l4K [ź}ĐAô Ôľ.Ń5 ÝgÄň[XÉ$RÚi’m+µ.Ľ©Ýž Ç‚Ĺ@Ďp;×§UMOLłÖtŮôűřDÖł®Ů’3ÎFä@ ŽAÄXę‘k,g‹K˝´Ů˘\)’ňŘÂďűčxÚÜás×§Ěqš±âë‹űX4Iughlä_í+,ŢOĽşţäa_bd® Őµi˘čľűF«-ÄŢc"Ĺ%îˇvó2¦îs“µw©®‚€^p®ôÍcRÇM:Ő÷Ű´XRÚâňČ@× ¦\Ş˘˘ă+ ÝůWłÓhžY"YQ¤ŹĐ0%sÓ#¶hË•} ä1Hby!mĘ`ă=úŠóŤO»xBĘăJ˝FŇő›Öąó­\"egdpÄ`®Y0Ŕă$w«¦“¨7uw‹Oą0Éâm*á!m¬ŠůŽ8ĺA' ÁÍz}ä­ˇe­›}*ÓUŢÚÇź&“§yöx3nkî6¸ů‰ Ć+łń˝­ĹÝ–Ž¶ÖňĚcÖ¬eqÚ‹2–cŽ€Ií]=ä­ˇe­›}*ÓUŢÚÇź&“§yöx3nkî6¸ů‰ Ć+Ö¨˘€ (˘€ (˘€ (˘€ (˘€ (˘€!ş˙V>µR­Ý«Z©@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ŔޱŤ¸ocź}Ç#ó©*86ygËÎ7¶së¸çőÍI@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Ü*µ´Şíµ Íč1Ö¤¨î6}š_3>^Ă»椠Š( Š( Š( Š( Š(  °©_ÇůÔ•ęWńţu%QEQEQEsľ+ÔnôěŤBöY¦ˇ7Č@ÚŃKÁ$ŚŤ®Čr= q_Ť5Ý@ɧ˝ŢËÍ[Q·źLqć; YŘŕcćÄpIÉîĂÚ˝7UŇěő­.çMÔ!ó­.PÇ*dŚî9QTâ𾍧§ę1Ů*Ýiöżdµ}Ç÷qcqś;žy>´ćöϬhţń,ő»–Ó_ş)b±Fa‘ ŮW•,IÉä0ÇůŢÖłß©#\i?nĽ—I‚4ŻżËyź*®A'<Ŕ®çÁwú­ţ‰/öÄr-Ě2@Ż*˘Ľ¨§ĺgT%U°p@îJ$đF†Ö}¤1\Z˙gŁGk5µÔ‘K·Ţ]ŕäÜEji=އ§ĄŽź•ł1Ëff9ff$–bNI'4çžľ¸¶Öt»Xś,7^%Öe(ŕľsHČäŞ>“Wđ˙Ăď ë1ësO ·öϧăňLRËĺáp»÷ŤÁł¸ňĆ+Ńŕđľ‘muos±Ymî§ĽŤĽĆ8–mÂFëÎw·xŞo€<=¤Íhö¶÷+6ó-í¤»–Hb“źťcf+»“Î:’zó@Őľ«­ Äšőç‰ä¶ŽÚţîĆÖ#h’G[ŤvŞîwţ3ňäsV?ëvž­h.őŁü#÷7đ¶Ż "ÍPvüÝuw®öo čóč÷şT–¤ŮŢN÷3'Ŕ™YüÂÁł•;ą#ޤ^Đăž[‰îćękY,帹Ľ–YĆä%›§éÉM`ZŢkÚ-ꆵ­G]žúÓP–8ďâ’Ń!3 TŞ‚dÚ9'ďs]µ ÍgIźWą”´·RId…@ňíŰAÎŕ»ůĎߪŢ.Đ®ő/ ݆ô»8ÖĺÖI¦Ź˛Ä1‡Q‚]€r9×Iik Ťś–č#‚Ö(ĐtUQ€?!@Mâ 5|<Ő5ůµ©˘‰ŻZ%ÓDqů+w^XRvďßňîÎî§Ĺts­ë’řžöŰ^›LM"ĺí­mâŠ&ŚăW/.ĺ%č ŕ{ó[Ţđöˇqq%ŽǗq/ź-˛]Ę<ąĎc wqמĽÔÚź‚tMZúâęâ+”kĄ v]Éw "«ÜqĎn:P‹őż_D°.łi z]ĄÓ&‘ĆYĐľ\ÍźcĎ95<úß‹.ôÝâę=NšÖCzş*A%Č™dÚ®Q÷f28Lňpk˛ÔĽ¤jw\ą´ž8E¸’ĆćKv1‘ť„eG`zvĹG?‚49"˛Kxn,M”&Ţ ,ndÖ"rP˛X3Îyç­Zđ®˘5o X^ýĽ_™#Ă\ >c)*ŮC÷H ‚=A® GŐĽF<;áoÝx‚âćMFţ[‹F†%„Ĺ#”Ȇ89Ý×¶+Ň´Í2ĎGÓˇÓě ÚÂ6˘N9É$žI$’Iä“TŁđľ‘“§é‰lE¦ź2Olžc|ŽŤąNs“ë@DúÎż7őoĹ®KYĎpĐéžT~G• ¬ž[ĺw–`„î 0X`V§„­ĺ˙……ă ¦ľ»tv¶?gG´n…XtPrŁĺôë“ÍkĎŕ]ćú[™-çŮ4ÂâkQs ·–\˝˘ °ś€O‘“šŃ‡A°·ńĆ· Ę—·1,Sâfňä Ť¤¦vî`gĎ]˙ljŢ?Ôt5ëť:ĆŰN·ťRÚ8‹™ĺË«|¸Q‘ôéÎp´ÍoÄ^)»đő’ëO§}˘ÂňKąmaŚ™^ Ö ë˝H]Ý}0Hô# ľđRęŢ8˝Őďâ+i, ¶‰í.ä‚L«Č]NÂ2¤2ţ^Ő»iáí*ĆćĘâÖŃa{+V´·HT‰Š’¸Î(Ľžx÷ mc_›Áú·Ť#×%…¬¦¸htß*?#ʆFO-ň»Ë0Bw,08«ZźŠu}=őÍ%g'S»’Ý´Vt\ŞÜ|Ć0|§Yç°®†ăŔş Íô·2Űϲy…ÄÖ«s ·–\˝˘ °ś€NG$dć´®ô2űZ°Ö.mVKý<8¶”’6on:=zdâ€8[ÝwWXŐt›{óĎ­Ůé±Ýů(^{U‘Řq‚Ä‚ŕpXvV~«}©ř;Zń}äwňęWqéşz[ËqO2y#‚…V*Xž#úסÝřSFľMEn- hKó°‘•ĽÄUTu ĺ¸+Ž•Z×Á ˛jíĄşmF†ńîî$™§UÎ7'¦ăČéÇ   _jô>(·˛ąţٸÓî-¤i$Ő˘·Ťă•Jŕ§•Ś©‚ŕăžjß‹µ9ăÖ,4ŰmKR‰¤†Iž×IµYnd¨ YÁT@Iŕ’G=kWIđž—Ł_č>×=וä¤×—RNŃÇśěRäí§\sOŐĽ1§kđ_NnˇĽ†6‰g´ş’1“’„ˇ\€}ŹLPwĂíZ˙WđěĎ©<˛\Ű_\Z™Qd+…Fđź.ě`ĽdW)sŻřŠÇ@ń?ÎŻ4ĆĎRźOł˛G墛;p™wxĎs^‡˘č:w‡¬žĎK·ň-ŢV¦öo™şžI¦Ż‡4‘¦ßéÍfŻi,ł\Ĺ!,$i\ňxÉ9ă§jᢺń \\i˛ÜkÂĘëOť¤ąÔbµŽXe@1ě`H ©Ç<ÖF‹©ęÖš7„t K˝eŕ›DMBi,a§«ďPŞŠI$ÍČç5čşw´Ť6âK„wĽŘKww$íGŞ!v;GÓ­> Ń®,4ŰEŽâßű2! śÖ×2E,)´.ĐęrAdsŠăSYńUôz—-őĆť=έsg%ËĂť, H¬TnU~1ÇĆ8¨®uż\ę:Őµ­ŢşNËinö–Ö̒ȱ+4“ďÁ;‹t] śôď,ü'˘Ř.ž-í › ¤¸…̬Í溲»±'.Ä3d¶zÔŻ‚´m^ö{»„şŽK”Ý ką![•H¨Ŕ7sÎ8é@íĄç|Kâ5˛}^ăF‹űŇňXm&ežF0 Ęß/Ëřŕr9Ím _ÖüV<=¦ÉŞI`óirŢŢ\ÚĆ‚IŮ%»••FIcč+˝µŃtű-AŻ­­–)Í´vź)!DQ–(ˇz n=fÉŕť¬,m"ŠâŘXł[\ɱ9`Äŕ’:zP%¨kšězƟᱪ^_4pO=ŐćŹ yvJP‡ůŚüřÎ5Őx*˙UżŃâŢîHIŐIc*ΨJ†ÁÁ¸č3N“ŔúXŮŰCö¦Čą·žÚćHćBç.|ŔwÇ“’ry5§¤hö:€˛Óá1Ăąť‹9vwc–ff$łÔ“@袊(˘Š(˘Š(˘Š(˘Š†ëýXúŐJ·uţ¬}jĄQEQEQEQEQEQEQEQE5ÝcRÎÁTu,p)żh‡Ęó|čüĽăváŹÎśĚŠc.»‡ĺ€óÁ­:ĘkQÚhÂ7ÝbĂéCĎeCĘŠ[î‚ŔgéZ´PYš%”DdA!襆*Ń4¦!"U 3ůVĄ”“Ĺ!`’ŁűŔ08úĐ·ş3¬Ń”_ĽÁ†jŃ@ć™-Ą–kڎ€Ěů©Í^kQÚhÂ7ÝbĂéZ´PSĎeCĘŠ[î‚ŔgéJf‰eHz)aźĘµ(  ±4M)H†AŐC ţT‰W›çGĺç· ~t5Ä(Ší4aî±aô­Z(4zxĎáKN¸Uűk0ośĆ Ż Ë`ţ§ň¦ĐEPŘ?ÔŻăüęJŽő+ř˙:’€ (˘€ (˘€ (˘€1ĽE¨.žšak©íüýF paŤ_yfĆĆÝŃOr9«‰zeąžVÓ5scmzöŢ-şă™\¦ÜÜŔś`ŞźĽ3ŔÖńN‘u«Ç¤­®ĚÚę–÷rol|Ůl{Ö žÔŰ—şpý˘m|ę óńĺ}°MÉÇ]§­_:}µ–­q¨éÚ•„šZE$öóƆCŤµv±dů n$ű˘ú]µÁ¶źTXÓěńČiţ-ĺCpX)޵ťmá-N/ čşky?hłÖĹô¸~<ݵ<Ľrv°ăÖ°źá­Ě+y¦¦‡¦ŢG=Ü’Ç©\ŢJDň!áÜŔ‚qÍuš—Źěôű˝VŇukµŇHűtÖđˇHT }Ůg† đxé×Ć\ŘYŽŹ‘řąTRâäżŢÜW ąÜÁĄo _}—Ćń(‹:Ćď˛|ţ¶ÉÝéó)ü+>o x’ŢřI¦µĽO?‡SLűQ› k<~c+…Úw\ ŽťqÚ€5ěümÍýĄ¤úŻf÷Ń<–Fá"ăbî*0ä«coáY>ń…Ö±iọHÝYĎ{uu–±EĺNda“¸•U שeç“KHđn«Š<=«>“ °2‹ąeÔ¤şžrđ˛n Ă… zg'=9—Hđvł~˛ż‚Ţ;}*î÷|±O¸ËŃĘ€Ú0s öÍmÚxţÂęKIłő(tËŮ„şś±(‚g' Ś6đđ ("˘—â6źÜÉý—޵ŤťëXÝ_SʆE“ËççÜW8ĺAŔ#8Ôĺđ?t…ýŞ˙S¸ş‡/ňě{Ź1rqÁŰ@ĺQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Ý«Z©VîżŐŹ­T Š( Š( Š( Š( Š( Š( Š( Š( .Q…ÝűÄú°ü:Ö•f“"˛Ć[zôÜ3úf´¨Ť‡âÝÉv,|-âČ­ne¶y †…ăb­ŚĘ •˝ kö>$ŇÖţŔČ{E$s!I"‘NOFĽÓĂö~1m ÄW~Ömc)¬_ěfł ]„­$-Á=˛¸ÍU–ę1ŕ/ǢKssµ­íSsqöieť´‘Hę?vYÔ/ ľhÚhŻż‹RŃt˙ÚG ľ“iý‚óŤ> M®Z)A JąPP0Č8ęTŰm>ÓĂÚŹo4;™ćĽÔç\–¸i>Ý …ť¤pI‚íśtâ€=2ŠńQo|$“Ćď}rľ'®>Ůö‡Üł‰1äěÎ6çäŮŚcµZń•ĚÚ¶ż«Ďh5‹XLŹg©µ˝ćV%,OČqËŽrsÎhÓ/µŰ}4ę{Ô6–˘ę[¶‹12üŮ G%€\‘Žâ´ ™.-ăž<ě‘®}Íy‰®"Ö­|c'™<¶Źá[k¸¬Tä‰1€x­;M"ĂSń^›áŰď0éÚwvÖ~{„šW‡sÎ[hĆ'¨Ó¨Ż™eĽµ¶ĐEýăépxÄXC*Ü6óoöwf‡~w eÎsďĹmë1|1Ö%–Í%ţĚżÓ-mÚF ČrQAbOÎŻŹřzu‘á}.mĂ:~źs3Ďs#Ď•ŰqyOĚç?ďZôUKívsć|őÚßÓ5nŞ_2eÉ2aOˇÚÜţYüč(˘€ (˘€ (˘€ (˘€ }»‘x©· ĆÇw¦ ńúţ”Ę}ą“í`ű˛ŚXűäcúĐkZ´‰{«]$Źś-4‹ЍÉŔ$ ţ5CVńnź¤YXM$wU´˛·@ÓĚH…$ rI Z­ńţI·˙ě7ţ‚k‹Ń˘ľđnżĄkľ$¸ŠţßYµŠĚŢöŤ6N©úDŔźď ž´ëJK"±R¤Ś•8Čöâ–Ľ—P†˙Ä>;ń-˝ÎšśZy†+xĺŐžĐ[#DzŞ©ÉbIßÔmÇj}Ž©{˘ižńV«©%Í»G.›¨ÜE1’7Ť™Ś2ç€Hd¶9.hŐꤦ}BîÓě—1‹mžD9w~CžqŚ yPÓ.uaŕÔŐ§»Câ ű»ëČRwOÝĽ éA(EE#ëëQř•$ÓlĽwaasqo ˘i[m™‹D7(ŕ’N}O~ôě”W™ę:rř?Ć6ŁĂËpďGľ’X^w”O,B3Äĺ˛Äg©ÍbϧivŢđn»k©\ťSPÔ4ó=ÇÚܵă<ŠdWÁäămÇ­{5Q@Q@Q@.6}µ±ź3Ë\ý2Řţ´ÚuĂ/ŰYBüâ5%˝F[ô?ť6€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQEQEQEÉbŽxĚrƲ!ę®2áO˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š†ëýXúŐJ·uţ¬}jĄQEQEQEQEQEQEQEQE!3 Śá·©?MĂ?¦kJłJd¶ţń~Ś?•Ą@¬´űM:9#ł·Ž–V™Â nv9f>äŐ9<3˘Mm}m&—jđ_Éç]FŃ‚˛Éýň?˝ŔçŻĚi^7[ 7[ľńÔŤ  źN€ÇJ `pŁ'ëÉ>őv_Úľź¬y6Ś–źhn…ťÝ«#ş…p3ĘäsÎFJ“Rđ>śľÖô} ÎÚĘ}JŮâ2¶~f*@.ܱ>ř«Ú„4MDąłÓ-`˝ň„o4iϸ_î‚yŔĆk+MńÜÂúĺő­ěšžĄn¬–p[,Ěu^ÉÎw«Rx˙BC}VwąŠ(®–Îx^C1ÇĘÉ×<Ćs‘ŚĐĂŕß_űTčÖźmó<ď3gü´ţţ:n˙kŁRđo‡5Ť@ßj=­ĹËÝÖĐ8čŘ÷Ír7ľ0ÔµŻčZUŚzîŹkP2!ś!ő_™¸÷¬ů|áą´ËM9ô{ckhXŰ ·¶ÖÎFIçžjľ©ă­/J˝»¶6ú…×ŘT5ě¶–­$v Ťß9ěň@Éť{ă}2Úĺm­ ľŐ%0%Ë®ťnfňânUô u>”§¤Aieiťm˝Ś˘khŇ0'ŤŔ˙3sîk']Đ/üAâ(] ˘i÷ |ĚÓÉ:† ¸ĆŐP[9ÎN1ŠÜÓ5;=gM·Ôl'Yín|r/CČ== [ Š( Şß1Q ťŇ`źO•ŽOÖ­U[ŇŕBd0ţĂiţ¸ (˘Š(˘Š(˘Š(˘Š)öâOµ‚îÂ0aëL§Űˇ7Šű°l6úäŻ?§ë@ŢYŰęSYÝ“[L…$ŤĆUÔőŁĽŇě5 1ôŰ»Hg˛tĐHąBŁ Ç¶ĺU|Oy>ťá=föÖO.âŢĆy˘|µÖ6 ŕđy­aŰřćÖËMŃ-ĽŐŻ´ŘîÄ6–ĆF—ĺ]ÄŔ’yŔ  ]OÁŢÖÔ4›yŢÄ(ěířI%}ŽEQńW†®őÝ2×Ăö±iđh.Ń‹ŔŔ‰4e`‘(yŰŚ’1ŘĚÖ~"Ćš6“¨i×R‹­V;ŢŐĽČpř’2ą“°ݍxăN°ĽžŐlő+ą-cY.ţÉjŇ PĂpó1Đ㝣'¨v]>Î{›K‰mŁi­ 5»•ć"T©Űé•$U{ŤJ»7†â ç–nw.|ß,ĺ7zăµdj^>Ńt÷±Ž5ĽÔ$ż¶űU¬vÍ3KG cź ®wJń¬ňř‹Äšž­y©YhúS’Ńě•cű±ýćÁs!gáAčEw÷:u˝ĹĚw†4°ĆńÁpT>7c>¸•yí·Ă«ůőM>mCOđĺ·Ů®ă»¸ż°Ť„÷mnaP±î` aŽq]^™ăGUţĚ–ĎQÓ®Ě-µR­Ý«Z©@Q@Q@Q@Q@Q@Q@Q@Q@ eF1‡mŁĚCźpŔůŕVťf7—üĚíóőÜ1úâ´čˇ†µ°Ď°“-ăeÔČćßÎVó:ôŔ'Ö·5­P˝ńžŁs ą6óřrK4”Ěd$/äs]­ăăCŐßš¬úV˝z~—ý—yoe?“s‘vżÎ„©ÎzqVcđ˝ôúÚˇŃőZçÄ7 ·÷f{‡‚'Pd“$íŕ2NŻW˘€9mCNż“â.ť©[Á›x´«L­÷VFxĘßť§ňŻ<»ŇĽŇa!ţQ´ä®Üçµ{j(DUŔć–Š(˘Š(˘ŠŁpíŚIýŮŤ@ůl˙Je:áH˝gÝÁŤFßLçőý)´QEvő+ř˙:’ŁýJţ?Τ Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( nżŐŹ­T«w_ęÇÖŞPEPEPEPEPEPEPEPEP2«Ś2†„fšbŤĄĐČ:9Q‘řÓč ŁYL˘4®dţ4$1FX¤HĄľńU?Z}Á FȱF¨ßyB€ÖŹ"+ĘňŁňóť›F3ô©( Ú^5FŠ6EűŞT>”Ż R/1_şYAÇŇźE0ĹJ%1ˇtrŁ#ń EĘe őpŁ'ń§Ń@ HbڱH‘K}âŞ~´‹)"ĹŁ}ĺ ?Z’ŠŹČ‡ĘňĽ¨üĽçfŃŚý(h!xŐ(Ůî©P@úT”P¤*^$bżt˛ŹĄ(ĚľiŤLřńĎçO˘€ (˘€ (˘€ (˘€ (˘€ MŁpl áî)h $1FX¤HĄľńU?ZE‚Ť‘bŤQľň…­IEGäCĺy^T~^słhĆ~”4ĽjŤl‹÷T¨ }*J(Ź R/1_şYAÇŇm(”ƆAŃĘŚŹĆźE0EĘe őpŁ'ńˇ!Š2Ĺ"E-÷ЍúÓč ÖR6EŠ5FűĘ~´yů^W•—śěÚ1źĄIEFĐBńŞ4Q˛/ÝR ôĄxb©x‘ŠýŇĘ>”ú(†(ÚQ)Ť Ł•Ť(ÖS(Ť‡«…?Ť>ŠbEeŚq˘ëµ@Í>Š(˘Š(ěęWńţu%Gú•üťI@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Ý«Z©VîżŐŹ­T Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š(  °©_ÇůÔ•ęWńţu%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE!†ëIĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@ ňÓű‹ůQĺ§÷ň§Q@`RŃEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE˙Ůpython-tuf-5.1.0/docs/repository-library-design-usage.jpg000066400000000000000000001237651470074210500235340ustar00rootroot00000000000000˙Ř˙ŕJFIF˙ŰC    $.' ",#(7),01444'9=82<.342˙ŰC  2!!22222222222222222222222222222222222222222222222222˙ŔŔ"˙Ä ˙ĵ}!1AQa"q2‘ˇ#B±ÁRŃđ$3br‚ %&'()*456789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz„…†‡‰Š’“”•–—™š˘Ł¤Ą¦§¨©Ş˛ł´µ¶·¸ąşÂĂÄĹĆÇČÉĘŇÓÔŐÖ×ŘŮÚáâăäĺćçčéęńňóôőö÷řůú˙Ä ˙ĵw!1AQaq"2B‘ˇ±Á #3RđbrŃ $4á%ń&'()*56789:CDEFGHIJSTUVWXYZcdefghijstuvwxyz‚„…†‡‰Š’“”•–—™š˘Ł¤Ą¦§¨©Ş˛ł´µ¶·¸ąşÂĂÄĹĆÇČÉĘŇÓÔŐÖ×ŘŮÚâăäĺćçčéęňóôőö÷řůú˙Ú ?÷ú(˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€"ťŮ89Şţ|źŢýOuţ¬}jĄIçÉýďĐQçÉýďĐTtPž|źŢý|źŢýGEIçÉýďĐQçÉýďĐTtPž|źŢý|źŢýGEIçÉýďĐQçÉýďĐTtPž|źŢý|źŢýGEIçÉýďĐQçÉýďĐTtPž|źŢý|źŢýGEIçÉýďĐQçÉýďĐTtPž|źŢý|źŢýGEIçÉýďĐQçÉýďĐT%Őz°¤ócţđ  üů?˝ú <ů?˝ú ÍŹűÂŹ6?ď źĎ“űß ŁĎ“űß ¨<Ř˙Ľ(ócţđ  üů?˝ú <ů?˝ú ÍŹűÂŹ6?ď źĎ“űß ŁĎ“űß ¨<Ř˙Ľ(ócţđ  üů?˝ú <ů?˝ú ÍŹűÂŹ6?ď źĎ“űß ŁĎ“űß ¨<Ř˙Ľ(ócţđ  üů?˝ú <ů?˝ú ÍŹűÂŹ6?ď źĎ“űß ŁĎ“űß ¨<Ř˙Ľ(ócţđ  üů?˝ú <ů?˝ú ÍŹűÂŹ6?ď źĎ“űß ŁĎ“űß ¨¨z0§ő  <ů?˝ú <ů?˝ú ŽŠ“Ď“űß ŁĎ“űß ¨č  <ů?˝ú <ů?˝ú ŽŠ“Ď“űß ŁĎ“űß ¨č  <ů?˝ú <ů?˝ú ŽŠ“Ď“űß ŁĎ“űß ¨č  <ů?˝ú <ů?˝ú ŽŠ“Ď“űß ŁĎ“űß ¨č  <ů?˝ú <ů?˝ú ŽŠ˝riőęWńţu%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQECuţ¬}jĄ[ş˙V>µ“޵ęé7gMD{ď%ľÎ®ŘS&>\źLâ€<·Z˝»›ÄŹŽíŢFłđýôV)“‰-×+tqĐüŇpéťvŢ!ńMÖ™©čš~—¦¦Ł>¬%ňXĎĺ˘lUmÄŕü¸br9ă€sY:O àÚŰęzM˝Ő˙’ÍËYä#ćlçÔśW>-|K j>ÓŢ {˝BÇíđF­6Ńq˘íů°v¶ĚuîĽő šçÇwz^™Ż˙jiQEŞčđ-ĂAÁh®#lídr AĘńбŠők[ý,kZ,Vv:¬˘icşó$ŠB D•v€ ~é =ë™ńf•ŞIáŹřŹX·ŠŇ{­9-`´Ž_0ĹrĚ‹1Öę Ë©®lďdą-ڧ$H…Im¬IÎF:Pµö˝®Ésĺib$·IĺąŐ'kdąňŔİďĐ\¶łă [Zđ˙†u&Ö;wź[KkZđ¨2##ÜŞwFĹI'ŘpsÄ×ţŐäń4÷š¦Źi⤷-žęábKwTÄ„ÄAfËeAĆqŠŠŰÁľ Óüah–¶rßéÚďöŠÁľ\sF› qňđÜ8Ĺiż‰ět-SĹ÷łi®·V­d˛ů3—7RÉŞpH\Ž˝M_ÄúĹŽŻ§Yx‹G¶łŹRs´ö·f`’í,#“*¸$‚22++Qđf««Íâ{äÚO-ŤŐ‘gŢ«,ąŽŰ†3éÍ\’ĂÄ^&Öti5}2ßK˛Ňî~Řán„Ď<ÁJ¨\µFâI<ž8  'â·&‡{®ŰřnŇôůeK–{ÜHë•fŤv`€y#śÓ'Đ ™.mâž2JH×>„dWmámN/†ZÖ„ËŰ®ţŰĺ ˙)ó]Ęäöá…v:tm¦ZA&7Ĺ #`÷   4QEQEQEŹŻęsŘZÇśk%őÔ«olŽ~RíÜ˙˛Xű)­ŠćµbOŚü*˝ľß/öéq@ţŃZ0úĹ´zŐŰs$ú‚ A?ěˇĘ ô ?3ÍM˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]Ď˙ ŕ˙ú4?üC˙ÄŃ˙'˙čTĐ˙đ]˙]ĎźxG/…ôd?ŢŽĆ4aô +28äđÎż—çK.—|ŚÖžs—h$\‹qä©rç$maśb»:äĽnvÝř]‡_íb?˛ÜPŕäQMŚć5úS¨˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(ěęWńţu%Gú•üťI@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Ý«Z©VîżŐŹ­T «Ëck=ݽܶń˝Ĺ¶ď&V\´{†¶EX˘€ Ľł¶Ô,ĺ´Ľ‚9íĺ]˛E"ĺXzSB€Ą˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ ću_ůĽ+˙_ň˙é%ĹtŐĚężň;xWţżĺ˙ŇKŠíëĎŻ “Ćż5]îîęD†6ÚÚfÝM*—ŮHmŞ | ŽNkĐk‰ŐtťwCń}׉|;eĄˇ qj{Ě!vhňHÝľ\…$qţtk Fń}´~ńMŞ[8’ÍMGíÜ•ŁVbĘŕźQÚ¬Iăűémݵ]7Ăs^č6RH’Ţ‹ĄI$’$h˘#çPAęĂ8ⳠеŹx»GŐf𵟇­4뇹‘ÚX丹fB¸Ä|ÎI&°"ř{>‘gyĄZk&iMž©%ҬE‹)™Ks‚ăŢ€;kďLuË]'BŃźVšďMMJÂĂ”ĚFXp:tťĂަ™ÄHO†çÔ.4»µuĄ5]]Ţď WŕAwfź¤řn÷MńÍĄďŮŕK<=źľ•«.vŞ’X.:g>™®sTđ©©húŇIe˛Źť^ÚÖi@K¸„j› íÜ7Žz(¬°ńečÖ“H×t_ěËÉŕyí ] ăś&7®ŕŞCAĆ:w®ZďÇsxŔw:Íď†.íô)#…ă™uI%=h 7( IÝÜ)ŕf¬řwÂĚž$KűZh6Ööň/™<Ë-Ä’°Ű„ŘĺU@ÎIäç ëDžÖ[ŕu݆…¨ţŐŽ(ˇóW¬Ęíóg=h{Pńn uŰÝ#@ĐN«>ž×Ž÷Kn‘łŤĘŠH;Ż8ŕ ŚšÜŃu1¬čöş€µąµóÓq‚ć2’Fs‚qřő®U­µď x«\żÓ´GÖ,µvŠuÜÇÁ* B­ĽŚ©Ŕ9Çm*=Iü«YÖíg_7„r` Ś@8Ćá‘Öą˙xŰRÔüâ;­+AžM ą¶MIn•\˛†C"GŚ” ź›p8ăŠŇş¶ń‹µ] 5 ´{=2ő/î$–ę9L˛"°TŚ!?.[$¶8+!ôoi>ŐĽaˇ-ärGužˇö¨Ň3Ą›¤î7•é‚qÎ2h~]ŰŤhşN–Ú®¨ÚlWS¸Ç d܆9$ ŕž+oĂ~!OZÜďµ’ÎöÎv¶»µ‘¤pĘAÔá5oLšÖź«Üřj=zĄCgsh'D– cčęY‚° F{fşŻčshÚuăĎŁXé updKKV.É/ů!źŻÝăĄuuČřçţ>|/˙as˙¤—×W#ăźřůđżý…Ďţ’\PÔ_ę×éO¦Eţ­~”ú(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š»ú•üťIQÁţĄçRPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP7_ęÇÖŞU»ŻőcëU(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(®gU˙‘ŰÂżő˙/ţ’\WM\¶»"ŰxŁĂwrť°Ĺ¨aŘôd2ÄżřôŠ?Š(˘Š(˘Š(˘Š(˘Š*§ömźö±Ő~Îżn0}›Îç>^íŰ~™ć­Ń@/ôŰ=R(Ł˝·Y’)’xĂgĺ‘UżV袀 (˘€ (˘€ (˘€ ä|s˙>˙°ą˙ŇKŠë«Źń¬‹&­á›E9_ItWţ™¬!?÷Ô¨?Ü‹ýZý)ôČ˙ŐŻŇź@Q@Q@Q@Q@Q@Q@Q@Q@`˙RżŹó©*8?ÔŻăüęJ(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š†ëýXúŐJ·uţ¬}jĄQEQEQEQEQEQEQEQEQE‘Żi1jÖŰĘŃÔ‚ kŃ@M·Çüó•rßđžx“ţ…[üż˙#Ń˙ ç‰?čU±˙Á«˙ň=u?cţyŹĘʱÁ˙<Çĺ@·ü'ž$˙ˇVÇ˙Ż˙ČôÂyâOúlđj˙üŹ]OŘŕ˙žcňŁěpĎ1ůP#Ä/Ld á;A±ŠťÚ¤'ŰýĄK˙ ç‰?čU±˙Á«˙ň=tW–n,ć6QBnv(Kť»»gŞH¬ÓĘO64ó6ŤŰzgľ=¨™˙„óÄźô*Ř˙ŕŐ˙ůŹřOµR€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€1|I©ÝiÚ|Fš[™îí­‘¦Śş/›2FX¨e'ÉĆGJö?‹č`ŃżđO/˙$ÖwŚŐéöÓ˙ô®*îh—ţÇńoý 7ţ ĺ˙äš?±ü[˙CŤ˙‚yů&şŠ(—ţÇńoý 7ţ ĺ˙äš?±ü[˙CŤ˙‚yů&şŠ(—ţÇńoý 7ţ ĺ˙äš?±ü[˙CŤ˙‚yů&şŠ(—ţÇńoý 7ţ ĺ˙äš?±ü[˙CŤ˙‚yů&şŠ(—ţÇńoý 7ţ ĺ˙äš?±ü[˙CŤ˙‚yů&şŠ(—ţÇńoý 7ţ ĺ˙äš?±ü[˙CŤ˙‚yů&şŠ(—ţÇńoý 7ţ ĺ˙䚥w/tMKH[ýKLĽ¶ľ»kgX4ů uý̲ fqÖ01ŽőÚ×'ă_řűđżý…Ďţ’\ĐĐ9Ą¦§ÜJuÇŘŁĚeÎŔH!rGŇźEW±Ľ‹P˛Šîâ9FĺŢĄN>†¬Uoµźí3göiöĽß?oîúăn˝ßf€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQM.ŞĘ¬Ŕ3}ĐO&ť@Q@®ř•ôÍB×JÓôé5-ZéT¶Ij‘©É#žr@čI<MŃüE}u«ľ“¬hϦŢů?h¤âxe@@;\óFAóžhˇ˘ŞÁ©Ř]]Kko}m5Ä?ë!ŽUgO¨#ń¦¦Ż¦It–©¨Ú5Ä€”‰gRěr@ÎOCůPĘ*¬şť„±ŮK}mÔźęŕyT;ý94Ë˝ZĘŃš&»¶X! yŐŰŰÉô˙»EfÇ­ZE¤ŮßjW¶i‰l·(T3(;Cç ŚőzÖŠ°e ¤# ކ€Ч­¦Ý]Éio¨ZMswĂĘθőPr(ŹVÓfş[Xµ G¸pYbY”ą©9ě*ąESm[MK¤µ}BŃnŠ$FeĚ:€3’}«&O0Ôő[lCɧĎk3Ü,aÄŘ9¸ůAéÔă­tTUYu;/c˛–úÚ;©?ŐŔň¨wú.riłjÚmµâYϨZEu&6@ó*»g¦śšąEs—>0˛KýkNµĎĄŰ¤íÜ$BBÁľPÄńŤŁ$đ7 ׹Ő,,šĽ˝µ¶’oői4ĘĄĎ Éçđ  ”UK˝SOÓÁ7·ÖÖŔOť2¦8“ÜůUäIcY#ut`YNA¸4ę*¨Ôě ů°ÖĆđ ›5|ĚzíÎhžžoÍ€ľ¶7€dŰů«ćcýÜć€-QX~%ńM‡†ô«ë©e‚[›[v¸~z¬’*Śđ:öëŠĐ¸Ől,üy{mló˙«YĄT.}O4rŠ+#Rń&źĄkz^“s*­Î˘Î"˨ µsÎNy<S@ôV‡üY§ë¶PHf‚Úęi&D´yÔČDrm›Fqž@9 ⊩uŞiö-Ýß[[´Ç ĄT/ôÉć‹˝SOÓóöŰëklOť2§ŕO­[˘«Ü_ŮÚZ}®ćîmÎ’@©ĎOśS_R±ŽÄ_=íşŮ\4Ş#Ç®ěâ€-QY·Ľb+ ¬ü‹»{»•nQUWkŔ“óň¸Ú9ç=ŤVŃĽW¤ëzuŐý˝ÔIok<ĘňH .Ć+¸pă žŁ·EU‹R°žŃnâ˝¶’Ů(™%R„“€7gÉ‹=JĂQ lŻm®B€[É•_ČçŘţF€-QL†h®#C*K$F 2#Đ‚)ôQEQEQEQEQEQE ×ú±őŞ•nëýXúŐJ(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Šĺüc÷4żű i˙úWw5ĂxÇîiöÓ˙ô®*îhÄ´˝oĂS\k#Ä^7Ö,őő[¸ŇőiĐ$bR*Śc Ĺu^ńMţ•ŕČgŐ"żÔ..u-4”•\ŢFXůLű±´í–8ŕgë©á˙ đ泤k–ĐÍţŁw?—ťŔÇ$…”űú\ýç‚5ýCĂ6Zv¦–şśúˇćZ©~[űl !•}­ŚŕňŁ®I  ů|wö ]`jÚDÖZ†™doÚ×ÎYĐň7#Ž#0qRŘxÎiu 6KCşÓ­ő\‹‰eFŢŰK…u(ĹA ôÇŠău_ ɦřKĹú“xwLŃ!mX"†ÝüŮ›‚ĚÎă/ …éś×Am§x‡Ä“řwűfĆŇÎÇKuĽybąó Ô˘6DÚ»FĹůÉ9ç ÷  OÄoôÖĆ…v|4˛ě:•3·vß7ĘÎď/=úăśU‹ßNš¶§gĄč7:ši{EŰA: 2Ę÷q“ąţR=2x¬!á§‚›Ŕ‹kgö¦ŘjĆ祱l˙ŞŰź3oÎ3ÎhńOµ-Nţô'‡tëÉY麬WfÖ{O(ó ŤĎµą'#Ś ·âŻÜiăĹSioţеŃbąQ$ Eo3±°áĆ >¸µhGâűőŹKÓ`Ń&ĽÖç˛S[‹„U†,íň>cĐO^Şž Ö.—Ä1KsŇ_řz9.dl'U30ě pN¶Ó#AŹmtšEě—ŇjŮ2i਒+ťŚÁIÎŇŃóg`}kOHńS^jWzn«¦ľ•{kkŰG,Ë 1±`Heă TőÎĂŕÝežĆţč[}şo®Żyre BŃ„R@Ü@Ű“’MGń7OkýWC·Óo-NýäҧŤ[çkIWt­ĎĘ9éĎ˝vŢÖ4=Y-¤·Ží<Čă“ďl$í'ę0Ó¨ŕ‚+kx­á@‘DEP0ĺRP\źŤżăďÂ˙ö?úIs]er~6˙Źż ˙Ř\˙é%Íl§ÜJu5>ŕúS¨˘Š(µü÷ÖRMkjn¦\m„8RÜă©ŕzŐ€rĆ=©jµ¤—’5ÇÚŕH‚ĘD;vôěÇĐőâ€,ŃEQEQEQEQEQEQEQEQEQEQEvő+ř˙:’ŁýJţ?Τ Š( ť>*ř Ç:ßÄèi–×V˛yBÎhĺ ¶řP'?&žůĎZúĘ9â±·ŽćQ-ÂD«,€pĚ'ń5=QEĂëw#Ă_ŁńüS&ëL2\ĹH-¤IÁp ¬Śă¨ĎZÂMWYÖu«¨|3â-GW˛“OĽ/$–©0NTy ’×'$÷=kÔĽŘüăľh]Ĺ3óc¦qéO 𵽼·ž¶¶Ôaűu”ŠŇÚŮč¦)íŔډVâC' yK6§łŃ­ řkŁßGaęŰńJg7?nŰťŘĎÜăé^ĹLycŤ‘^EVsµ8ΩŔ4áúť®×ńMŽ­¨Cˇy¨LŃAýŽ×“ˇ?¸h$ó8]ˇq‘Íuvz<ř—Ć——¶‰q{Ą˛Ç<ŃĘ~ÍÉ^»I=p}=+Ň( !ФÓ4ąt+żĹ±“ÂöPŮKs x‘Âź9:HçÝérAmđýeÓô{Č-ă˛v·Ó¤fm•LäHĆ9ČČcŃŃ@!ŁÜC6łŕym$ÓĹu±­ôÝ9ăa qĺË31%‰ŔÁÁ$ŠM.MďŔöÚf—l?á(}_Í‹lÍW[˛L۱÷`‚ŮĆ_j÷ ĎŃ4{mJŹN´iŢGF˛îÎzÝŤy=Ěţm;Çşt¶‚]zďQąKD–i6Ż•ĺ¶?…Îx?/$ă5kV·»[Ż-ČgźűGBó †`Ńn#ńÍzv—ŁŰhćý­ÚCöŰ·»—Ě`pě8ŕ`|˘®¬±ĽŹȬéŤęJç¦Gjň±&i˙ ‡´ÇĽÖ®ui%†Ýbo´]F\ N0p«´d0 ´çĎřĆé/4ż@ŘY]ý˘cöa$÷×1‰Ś…°‰… ®wŻw˘€<{ĹbŐ.Ľ|¦WÚÚ°‹™@Iw•lsÉ\ţĄ$Ú›â/Â[j’5âCö35ą“Ď·¨ň˘ŕüÁ÷ĺG9 ű×§Ń@Uá I|GŁGâ+¸»·đĽ(Âę0űĘĂ?Ä÷ëë]OĂHĚ>±„©A×(ŞF6¨¸(ú+¬¦y±ůĆ1|Đ»¶gćÇLăŇ€<ŻĂRč6ú^—Ąjz[ÜřŞ=HÉnžőŘ5¤ {ăG›ăh‘É?*±€:rU!SPŽŰčÖ|0µżŹOŤuíő›Ď7wöŽÜçűś}*†­lăĹ–Z˝ü_^_ĘĐŔtv¸»¸ŚăČ0Ić.H@ÇÝ*sŠöć–5•"iHŕ•By`:ŕwĆE>€9źÇ0řa®E#<łŤubËó3y-ś€O$ú\F±áYtĎ iÄ·šĆ±Ą[Ç š–“svîŹ.Ş1’‡ °ä=«ÖžXâ($‘P»m@ÇŹ\SÁ§ĐŽëoü%úýć§©iööWöнҚě\[ůC)\ű‰AÉÜ :ÂMEńN†üŚ1Ść˝šŠň 6Y.ŕŃ1cf°ŻŠÔ‹«^8.Á¶“2Ş6vŚť§) âŞÚGcmomÝ ţÎÓĽKw&­·8Ef›ěîëŽP1Sž@ŕöŻi¦$±Ę\G"ąFÚŰNvźCčy ×mí5h|M>‘oťîçJŹ1ĆV)ć+ćş r6‡Žř®Ćq?/¬µźŘG»·“Iň-ă ľsĺíŰhâMŔźúi^ŹYţ·Ôµ»Fćęé–ČďŠĚ8ypĆK Çă§6¤Ĺ č:T'rZ±î=\ËrrOÖ´h˘€ ('h Š( Š( Š( Š( Š( nżŐŹ­T«w_ęÇÖŞPEPEPEPEPEPEPEPEP/㹥˙Ř[O˙Ҹ«ą®Ć?sK˙°¶ź˙ĄqWs@´íZÇVFĆ4ZÜ˝¬ß#.ŮS†^@Î3ÔqWkÇcĽÔm4]Y4Ë粸şńËÚ™Ud•Tđx={ÖͶ‘ŞOă]SÇĹzĐÓˇ˛†ńLžx‘Ů×fÜíů3·Ôúq@‹qo ÝĽ–÷0Ç42)WŽE ¬PA੭©ŘčđŰąDÍ«®x™<o>·sj·ÚuÜ·ĎlUsFŁ)ç$ŽĹ±Šf©yzł¶…wy-ňi^)ŇÖ ™°dhäd#J’Fz‘ŚĐ®QEQEU_ěË©ťKěV˙o1ůFçĘ_7g]»±ś{UŞ(˘Š(®OĆßń÷áű źý$ஞą?Çß…˙ě.ô’ć€6SîĄ:šźp})ÔQEUiţŰöŰo Aö_›íóż§Ë·uëšłPÝ-ĂZJ-ŕˇňÚA• Ű#Ň€&˘™•`ŚLĘŇ…ŮFlrE>€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQEQEçţ/ńÖ‡¬kóŮŰÚyö~[¸ćxA·› Ú[©N3ŹRM/Úüb|U‚úΞ¦ďOkă:Ř’mĘ:©DRř`KŻ-č}k ÖĽ!§kłjÝÉr­}§Ť>_-Ŕ ĚŮ –<ţ•xčÖÇÄë[ĄűTVŹf«‘łc:ą$c9Ęţ´Äiţ'ń&¸ľµ·ąłµ›P†řŢĎäoÁ·•# “Á$ž@ϵC.Ż{¨ëZž¤ŃIy¦x•í$šŘł˛Hęűrv’®23ÔżyŕY#×|5™ume§Ĺ~Íy ©ćG$φRĽ`©ó[¶ľ Ó-VĂl·RKi|úšI<ó˛23HqĎ Đcŕb€8ť;⳨[Úk0}˘x®neE˘\°3íÜ.6í.ç'îśVµ/xŞ?Äյ݂Űiˇ´ŽÍí‰7 ˝î}ß/ßă·9Ď^°‚HăŹQÔ×LŽăí)Ą‰”[«îŢ?‡~ÝÜíÝ·=ŞÜţÓgŇuM5äąň5;ły97,­…8ŕeŻzću_ëž›[‹SžŰTk}2;ëvŽŘõŢSÂ1+ť§=qž´šw‹u¨őłÉ5ĆĄ ¶sJÓľ‰qd¶˛˘îQ™ ŤČőČó]fĄá]/Wľ»ąľI%ű]ŹŘ%¶Ǹ¶F9 “×>•VÓÁCpgĽŐőmIÖÝíˇ7“!ňQĆhT\±|Í“ÇZçô˝Ĺ j:…ő”¶ţ ŰEjSČ/JŚqÜ~^Aăž1YľŐ5MŔë$šĺŚ&çYş‹ÎšŐÝÔ ĺÝĺƤ™d€×Ţ'…¬Ç@´?—ˇ´mjw ¶Čš!żŽ~V=1Íe˙½±Ť•íµ]VÚXŻd˝µx䌛W“w¨*ŰŰ!~ĹkZţ±­řCÄO|KiúŤ€K™t÷·’hä’2DÄ!9îAśÖíďő+XÖ¬­â°mJK˝:Â+Ło´4’ÇĚ’rŔ`ąô­Łđ÷L{-bŢkíJgŐšą¸’e2y‘QÔíŔ9 Ć6üŁ ž_é·0ß-ÍĹěó^}ť¤¸iBʲ@ $ŞT >FIéžŘâ€9ýsÄţ đŞk6WWv×óĹŁÉ©ŮÝ (©F Čę e”‚1ÜV­ŽĄâ izv­{iy«i<áa¶1y#ňÎĐwĘDçž3íSż€¬.­u4Ôu FţăQµű·w'őŘP*ŚśýŢO\ÖĚú-µĆł§jŽŇ‰ě"–(€#ilÝ‘ŽOČ1Ó˝cxÇ[¸ŇćÓíŕŐm´őą2oo˛˝ŐĂí(PőäžśqĎŐ§ŚüC}¤éń[Én—Ňř‚M%î.,Ţ<Ć"wb$~ËžŁf»=cĂ1jÚť¦§ˇ}§_ŰFđ¬öŚ™hÜ‚ĘÁŐ”Ś¨=21Tě| Ąéâ.âúC¦Ú ifZvŚĆŮ$d‚ź©ëŽ(oëúcęš4ÓÚ]ęQjvV—ŤDĹŔs nvüÝĎ*Ö‹mŞ[|UĽMRö×ţŋ˚8|˘WÎ~rFsžGlVĺ÷´˝F]VIÚăĚÔZ ’M¦'„~íă eX~}¸ĄŇ<)•¬Ď«É©j7÷óŰ­»Ëy"‘X°"¨“Đ~ĽĐ9®E~ߤdľU¶ťŚ&ĺw¨#9ŕ“śvÇ˝Qđžˇâ Ăţ{›ŰIôíN(,ľĘ–ĹZ6ěń°}Ç'ŤŮçŚWk©xb×SÖíőfş»·¸ŠŢKVXBMňUÁSĐ€F1Í"xVÁ4ÝÄ=Ç•ˇĽojw ±Hš1żŽ~V=1Ípv_ő›ëx5›´O×!F“‰rGdŰ‘q·ipż6~ď{×uâífëEŇ!kŢÝÝÁenf˘<®3‚@É8ďŚU$đ „R,qęZšé‹söˇĄ¬Ę-ĂîßýÝűw|ŰwmĎjŰÖ´kM{K’Âđ?–Ě®ŻmxÝHeunĚĺő]KĹ^¶0Ďy§ę^ÜŰZXÜ´#˛ąVóQČÁô÷Şzż‹5żM¨Zj3[ę®4Ö˝łCäâDŚŁ€HŰ™ä`ă#޶[Ŕ:uĚWgQľÔ5 Ë•Ť~Űq*¬ĐŰ|~YEP›[ćŕrz楋Ŕús‹öŐ.ŻuyŻmľÉ,×λ„9ÎĹŞgśś€sŔ kS}gCń®‰­ßŰę1[éş„ŕÁoä°*‘ł®7Ž;őÎišŤuË»­âVžň=JD[›8ôKRŃ\d:Î˵‚śOF+¦łđ=¬ĄľˇyŞjšśÖđIm˝•DNe!Pg äň{“Ĺw­4ű‹ý©ŞÜYéíşÎĆyÔĂÁ Ś(fÚflPž+Ľ6—žQĽżhÖf9Ź0Ęw&~ë|¸ČěHď\µż‰üSýmâiŻlM™Ő>ĆÖ+jAxŤŃvýÜ7qŽ;×{©čöÚ´Út· k ±wÂ\#&Ś9ý+¤µ5h­uIĚ×Öq΢)É?.ĺŇ h[ń®ło|×j]M{hg±šçN’{yË fJ‡÷x'oA€3ĎJ‡PÔµ]/Äž9Ö­oíä[]Úâ(üťĘ~YĘałČśs»¶+¬ÔĽi¨\]ĽZž§co{Ĺwkk"¦P»:2ż.ŘWMľđ—{5Ű-Ĺí´7špÓg·‚EĽJ!ÁRw(cź¨4“¨júţ•§é˨ř‹N‚{÷y E§É,Č»á…K0IËžsT´ßkzŽźźÂşťĆµ.—sŮĽ[cHĽÓ#BÄűxÚHçň®»Tđ´ŤÝ…ě:…öź{cĂĹ«&ăcr°ue í¦AF/‡ÚT:|Ö±Ýę"I/˙´’čÎ Ń\m ]XŽŕ‚ůŹâ€9˙[xˇ!Ń­ď5k6ŰŻ[,1Űa¤dvVˇĂ tĹzXÎNO­r’řÂ{)Ł›QÔŢúk¨ŻR2 ¸ó#B>M€Ür{ó]ZŤŞbŘÉęhh˘Š(˘Š(˘Š(˘Š(˘Š†ëýXúŐJ·uţ¬}jĄQEQEQEQEQEQEQEQErţ1űš_ý…´˙ý+Š»šâĽ]iwscŮÇ“ŰÝŰÜŞJĺü©’BĄ€$d&3ÖŞx”Â+c˙g˙äzčtG˛m­ŞŤ\ţóţ^ĎÓ qZ1höë·Âű]Ä o!-ňěBĹxőËăá:ń/ý ¶?ř6ţGŁţŻ˙Đ«c˙g˙äz/~Çý·á‹{w—ĄZÝ ¸†ëd°»”+‚Npăˇë]> ŃáÓáł ;ďŁÔZY&-$·Á»[?Šç˙á:ń/ý ¶?ř6ţGŁţŻ˙Đ«c˙g˙äzím4جďon’[‡{·Wu’RĘ„.Ü ?tqĐw«•çßđťx—ţ…[ü?˙#Ń˙ ׉čU±˙Áł˙ň=z çßđžřŹ;áµÝ×?ÚŤ·_#9ü??á:ń/ý ¶?ř6ţG A˘ĽűţŻ˙Đ«c˙g˙äz?á:ń/ý ¶?ř6ţG A˘ĽűţŻ˙Đ«c˙g˙äz?á:ń/ý ¶?ř6ţG A®OĆßń÷áű źý$ą¬ŻřNĽK˙B­Źţ ź˙‘ęuwÄÚ–‘ö˝ÎĆ łrĎóĚÍű™c ÄŁ¬€ç=¨´O¸>”ęDáE-QEQEV´Šî).MÍŇĚŹ!hTFËOîçżÖ¬Ői ą}B Ň쥺+ -ö$'ˇÝÔb¬ĐEPEPEPEPEPEPEPEPEPEPŘ?ÔŻăüęJŽő+ř˙:’€ (˘€ (˘€ (˘€8­{XŐ˙á9‡C±Ö,´»s¦›¶–ćŘJYÄ»6ŚşăźÂ“Ań]ĚZľ»§ëZ–ťykĄŰÇtÚ•ŞyHŞŰ˛’ Ě ąŕňJ“Tđś×ÄhďµM"ÖűKM ­sH«7ś¶H;sÎ?ÂąđF©m˘x“Âmş î1u¦Ď¸/•&ŕZÝČ;ĘđŘ8đ(ݰńž‘¨I$jníäXĺîŇHL±/W@ŕn#§<ŹZŹJń·¬\ZEk-Č[ĐM¤ÓZÉW ŽŔ@ŹcŚâąHt[©®ŰPźEÖmVĘÂçtÚ¶Şn Čńí+‰ŚĺŽ: OĹ«x‹Ă~´mk]=mo%Ľyc1ş¤8AV,KnČFG4ÖGă­ Kä·Y®<§źě©xm¤Ď6q°KŤ¤çޏ'Śćť'ŤôHő&łi.p·"Í®~Ë'ŮÖrqĺ™q´:ăˇć덧·‘e!0 \”ă;ź¦|ś”ÖĹďŽt+ Ű‹y¦¸)k Šćę;Y w8ů^@6©ägž3Î+şĐ5»hfş‹K–ĺáńYÔÖäŚ<°lŰąw0çˇ ńDúfże řŹĂ0hR]ťVâéíŻÄŃU. $Ë–Ţ n<9ŔǰM¨xëDÓµKť6V»–ęŐ<ÉÖŢŇIDJSxf*Žž§ŠŃţßӉ҂OĽęŁ6{ź1voÝě6÷>ŁÖ±Ľ1 Ýi:ţĽ÷n¶ž;8ˇ™>pŽŚqśŽ}kŔDńř‡QYŮd±đů“JÓX6ď•ßÍoĹPĂüĐg¬ř‚ĂC6étf{‹–+˝Ľ-4˛2ŘUŕ§ ¬Tń”7Ţ!Đă°ť›yoz÷&XĘ_ť6čúr9ă§|VUĹ¦Ż |.ĐoOkMwĂłG 1K"7ÚwbU*H!„śÎTq@ź§j–š´wYą‘ ¸’ŮŰiz0ęČČî r>!ř‡imi¤´˛JÚŚbáí$6îLĘ’*ÉŤĄ€ÝĐö=pk¦đŢŽş‡,4°ŰÚŢ $“ţzHyvüX±ük€:?­Ľ#§řI4 fkBkń4B'….VMë–ÝĽĘ•ĹÉă Ť÷Ťô=:ú{iĺ¸+jÁ.®#¶‘ŕ¶cŚ $jžFrxĎ8®k[ńž ž*Öl­ďN°Ňt˙´I,š\“+ąŲA´c{¦m;iĽCbtťcPťäóŰÉo«-&9+2y€Ťą áNŕ+OYđ¶©$0‚ŇĚş^h6öVx~ňDYPIČĆĺäúĐý÷Žô]2ć{9Ţň{›h’[…µ˛–_-womŞp¸őţ•f÷ĆEšŮě’âňKŘ~ŃV6ď;Ľ\~ójBň9>µ›¦č·đk^(¸–Űlw¶–±ŔŰ—÷…"ea׌ßÖą[o jÚ_öőÎź«Ü*h6ÖéWâ  š,śŢ u;ŕśď@ÜŢ2Ń"Óě/b¸–éuÂŇ+XYeŰ÷°€n{ä t85µR€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€Č®0Ă5Ů!ţŕ©č >É÷d‡ű‚§˘€ ű$?Ü}’î žŠě˙pQöH¸*z(˛CýÁU¬tů!–öXç—{ëŔž=…hUkű }JŃ­®Uš& ¬Tđr9JwŮ!ţŕŁě˙pSMý˛ę+§™GÚš#(ŹîŚçëVh˛CýÁGŮ!ţŕ©č >É÷= ŤU@©( Š( Š( Š(  ×öŇÝÚ ş’Ő÷)ó#śČçץY¦J†Hť”,¤^«î*;8ÖÎ(y'd\dűÍîhz(˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQEQEX_Ŕu6ÓÁ´,"r<¶Ű°±QócČ®˛\Áq§ßĎ ˝ŃÖ>ϢÄpŕű¬Á;ÜŽŐŢxÝä—áv˝$ˇDŤ¤ĚÎäb9ǵ"|Dđń0™¤Ô-b™•{­6â˛Ü.](Îz“]Uy#ńđî“eâ]JŔxZűÉ‚ň{6IanŔ址Bp ĆGŐ­^m_Xńw‰aŰz`ŇÄIoö}\Ů-şůĄG’[–ČÂăhÔfŠ9á’PůáŽyLަPc ‘Ś8äz ­ˇßßxŽűCŃu=R÷ű8Ǩ2\CpĐɨy3ăĚC–8#v2hÖ*µŐüRÚÇ1p×Sy1mŤ›ćÚ[śňxüëˤźVşŠçFłŐ$Ľ†Ë]{xa›Q{yݎhEÂüĹ‘óžBŕž*{-ZvMÂ;ŤfÚHĽFÖwv÷—~cĆ ´ŹĺyŞO*A$žž‚€=.Ęţ×Q¦łť&‰dx‹'MČĹX~řUŠń˝¶Ňô]&=BţÚËT×ďá»^IĽŢb‘«–Ę*2W°yÉ«ĺΡ¦>żˇişĆ ¶¶÷š_“9ąi%¶i¦ $~cĚ6…8bxlt4ë”Wśk˛|8ÔRe»żĽÓnôůb†+»©'?lŚ™#P\’ «2ńýÁ]†Ť ú…-“T»–ââÚŰ}ŐÄ®\łąÎOlç€/Ú^Ă{çů>gîfh_|lź2őĆ@Č÷R^ßÚéЬ×s¬1Ľ‰–ţ'f ŞrIĽĎĂßÚşý׆l/5mF(ŰI—Uż\şĽżhyQ– (ÝŚ@\ UK3â8ü'§Ü^ę/k6Żuűt˘Ak 8Ś ąo+ I#<hŘčŻ:—µM*MyLđŢÜß´:|ÇX1C”HM¸áÉŰ†Ü 9'ŽŰš‡™¨żŤ5ŰÝWSŹMŇťŁµ¶µ˝’ »e<ą#íÎN0ŰhŢ"°×Ţčiâĺâ·sťíÝ"‚Tůn@‚¤gő­^K§éo§éş/†-µî%¶ÓÓPÔa]Q­ŁÝ Ű–—vő]Čçbaz“IáÝGS×4˙ éSj7qXęw:„ćXî٦kh_Ä'áÎw»†*˝EzÝĺ7÷7iwĄč©}Şi÷·wr97͡!¦Ý.,@v' îŔ WMŕ/ž]'µśW†;hŢôÝĽ8P$C!ä€ŮĆIĆqž(°˘Š(˘Š(˘Š(Ş7š6›¨_YŢŢYA=Í“¶’D b'+čx•^˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€Č®0Ă"›äGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEEöhsť=3Utó%Ő±’ęĹ­$2ům l€p#ÔsWꮡěÖÁl.c·›z’ňGĽmćĎqţ{ĐŢDÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŃäGýßÔÔ”P~DÝýMDÝýMIEGäGýßÔŐ[4YÇ*˝Ä×%ĺi™˛PáÂŻU[[6¶¸ş•®§NáÂHŮX¸čľ‚€&ň#ţďęhň#ţďęjJ(?"?îţ¦Ź"?îţ¦¤˘€#ň#ţďęhň#ţďęjJ(?"?îţ¦Ź"?îţ¦¤˘€#ň#ţďęhň#ţďęjJ(?"?îţ¦Ź"?îţ¦¤˘€#ň#ţďęhň#ţďęjJ(?"?îţ¦Ź"?îţ¦¤˘€#ň#ţďęhň#ţďęjJ(?"?îţ¦Ź"?îţ¦¤˘€T*ŕ Z( Š( Šóź|gđď„|Oý‡wäóG·íŔŞVŔÔäśN=}xŻC†hî Žx\Í)‘ßIŘ}FI8÷©ŢĘÖKČ.ŢÚâdŠRż2+cp¶v®~‚¸o‰úSÁáÍC]µŐő›K¸ü•U¶ÔeŠ!™OȬpM3Ä6×^—Ă«¤ÝkÝjá ťJI ŁČ— YÉÂd=~îpH ŰPŃôíUížţĘ —µ”MH€Üte=Ť Ń´Ńa±€ZG •!Ř6«‡Ţ]ß7Öą+ĎI=ŤŐľĄ§ÜŘ_Řj–VóEi{|×BŚ$ 7) ó) ެŰxćâű^ş±˛Ň#žŢÖôŮÎVőEĘŰLžF?ŐÎwdŽ@  «Ď řPÔĆĄwŁŮÍx ź9â±_şO©łŇ´®­`ľ´šÖęšŢd1ÉŚ«© ŽŕŠĺľ&^ę6ş“L,“<°ÂҤć'Ť^URT€y9Çlg=ŞmZ[Km_MkI‘ô8ci<˶śąhĽÂ7°ÜqÓ'˙­@Ćźgw§ľźqm¶o–Đ:…zc•ť¨xGĂÚ«Ŕ÷ú=ťĂAŠ3$@˘źUöŐsy¤ĂŞb[Áq¤„ŤĄ¶“@Ć9íŠużŹS˛Ń˙˛t‡¸Ôµ(Ą”ZÍ8‰`X›d…Üŕ9 0§9í@5éÚżŠż´ő[[›5ÓŇŇ;wŹ”evmĂĐa±Z÷Ţѵ-> ˝2Ö[H1äÄbbŔŔŮŹ»ÇW+?‰ĽA?ŠĽ/l4§±ŽćK¤Ľ¶š`7Ć2ß™@;”ŚnĎlRé/‘ô˝*ËHÓîu FđÝJ±Ţ^˙«Š9Š3É)Rq¸€ )ęjéĄđĆ…6Ź‘&“ftř4vâ űĂ'‘Ď&¤µđöŹcmmok¦ZìĆxU#d„\µ†#=y®nO%­ş.‰3ꏩť*[/=G•7–Ňż(@Sž89ĆF*ŐŻ‹ďĄ¶Ő˘›@”júlŃDöVó‰UüŔ 0…Âŕä’=hQü)áů"ľ‰ô{&ŽúA5ʆ%s¸Źďdç=sR[xoEłÓĹ…ľ—k •gň–1 !źV“ϸřÚöçĂ^+ÓŢôýRÇK7QÍaç¨S¸*•pGLwťâźęţ$:OÚ>×mq§G#Ëuň*ĘT-JąÎÓţöîŁÔkşŢ»­i^|¶G±ť/#3I:nŮÎpd™ČÇC[×Eum-Ľń¬Ę…$F ¤`ěEsrxÎ;(u˙í+3m>Ť\IËĽLŽ›—aŔÎX2tęľőW×_Eđ…λufŢeµ§Ú$µr\”ÝŽÝ3Šżo¦ŮZNg·µŠ)Z$„ş(ËLí_ ÉŔ÷ŞżđŽh¸ÓŔŇíGöqÍž#Čé÷}:ČW:ʦÉasbö05­Ë»Ď A¶Fcą‹äžMsZgÄŻáÓä–ĂĘ[ŤNM.wK…•!)+†çV xäŽ+{HÖ—X»Ő#†°XÝQ1lů®Ş ŕv ÇoÔ]KĂš.±u ÖŁ¦ZÝO íG–0Çns´úŚóĹ2çÂúŢ™ť>•jöp1xaňŔ“ś•ÇN§§©®2ĂĹšÖ—˙ UÜšTú†—§jł™®óJŞJÄ„ÁFN2Łž2sZş—Ä‹Sş´Ň¬­ŻVÎ$’âIŻŇß;×z¬`˝¶yÚ94Đ]řgCľÓ Ón4«G˛·Ç“”ĹŽ>\}ßÂŻYYZé¶qYŮ[EmmŰQ UQěQ·Ö-uo .±hŇ5­Ĺ§źkí+źŔ˙#\ľ›ă+Ů-´m7GŃ'Ô.'Ń ÔC]ß…!ĺĂČT–~8䓜P{EqOÄ ŤOűîMKm+Y“Č·ą{•2,»°ŃÂťŚně8¦Ű|@»—NŹYź@h4Cvmdş7@ȇÍňĽĎ/o)»ťŮëÁ4ÜŃ^wiăŃŁÖÁ{ŰąüC=ť”3\„U°]˛=°ć­7ÄŕŇő¦ÓCßéóŰC-­ĄŇĚ®'p¨ŃÉ€SÁĺqÇZĽkŻ-öˇ¦Ź )Ô, [©j+ĺ[8!ögy*ĂnÜq÷©Ó|DŠc§EĄYA<÷š|z‹ ËŐµH˘ş ŮsŔą"€;j+‰OFú=tŤîîµQr˘'¸XÄPáŰc$ň3ś šÎŐĂ•ně:cšôz+„Ô~$ĄťĆ§$|3éÚ\­ ÜÍ|‘ĚY?ÖyQ— Čä®H8ÍXŐ|_yqý«o éM Ť°{‹Ąą/ő ě«rW¨ç4ŮŃX~ žkŻxzââY&ž]6Ůä’F,ÎĆ5$’y$žőą@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@ţÇ˝ýƦóH¬ńpňź-UrsŔë×˙ŻVaš;RhdY"u ާ!čAĄ’4–6ŽE Ž ˛°Č ő©ĄŐ••ĺ¶“yNŃ3ĹDB\ d ˝(őQ@Q@Q@Q@U¶łk{»©ÚęyDěG#ebŔĆеUlěÚŇK—k«‰üůLelÇ÷WĐPŞ(˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€<çĹ<9âß˙n]ÍyŇműDP:…›hA ŕqéëÍz0ÇmpB"ŤB"ŽŠŔú(˘Š(ĹÚž$đĹŢ“ É “Čw¶EnßîÓµ˝MWPĐîReŚi×ßjpĂ;Ç•"`{ĺÁü+B÷Q±ÓcY/Ż-íQŽŐiĺTúMRą×ßÄz^!ßýˇó¬ÁřQ—Ć;çĚý(Tđ]ĹţŁŞÜ­ÜH··ÖJ ź”[•,ąŰĹRÖ< ©kZˇ{«Ť)ă‹sŁö]·Đ pÂ%eŔăCÝA5ÚZjš}ü’Gg}ks$\H°Ě®Sęâµ]:{‰­âżµ’h3F“)hŔë¸g#ń  >*Ńdń‡¦Ób™by%…øČ%G?\~5Ť©ř_[}c[źIÔ,ˇ¶ÖˇHî>Ń 4˛ˇMŃŕ€r¸ëĐŽý+¬–­äbćöpă÷ŁĘúńĎ«#PńUŽť®ŘéŇÉŽć;‡’ĺ¦UH >^U˝Ď;ŚcŢ€)ř“čÚťŤÔ—1ȶÚľ–UT‚Z2IoˇÍfŘř'TŃ`Ňn´ËëC©X-Ô.'Fň§†i|ÍĽ©RĎCë]—ö…‘±űwŰ-ţÇ·wÚ<Ńĺă×vqŠHő+­î+Űg¶rfYTŁplŕśń@ąđĆĽo´MN]ZÚćţĘćyn±0ŹËR<Ť ąÎzš­aŕťSDŹK»Ó/­Ąd—0H'Fň§†iڏă•*vŕóŢşÇ×4ěăĽ}RÉme;c™®#ź@ŮÁ5yX2†R ‘AŕŠâ­Ľv—6W÷WđËý˛u[ĆHĘŁ~ĺ˘ É W’yÁő¦ë޻էÖćŠň5 ›9Ä#4r,  Q˝«kĂľ+°ńډ`·ş™Ą hÓ©“ #&qÁ íĎJ×7¶« Ó5Ě"(IY\Č6ĆGPǶ=čĎgřs¨]&´LÚMÔt“§Ą˝Ť±Ž(âŔ˙µÔäŕvâµ5ŹÝj1řŹÉ˝†95Fł’čHŤ­öź›Ô˝»Ýłń&ť}âMCB†U7v1Ĺ$züŰ÷pŁ9ăoCů×(ßďcHM/P vZÓP…ž3LdYţú·zž•×Xx’ÂçĂšfły<_ŰE:-ÄĘ @Űrq’3ZksĚ"YŁ22y •點ôćš¶†ľř­Ůj‘Ë{¨_˝Ţťö;b§í8W‰q‡N9éÔő®çÂúCh^±Óĺ`÷›îűó1-#~.ĚjěÚť…µĽ—ß[E OĺÉ#ĘŞ¨ßÝ$śíTu/Ác&Š"UąŹTĽ±Ëjć7}Ůç#äÇă@Üţń CŻé°j¶)¦kwRË+4,f$\'8$¨ďĐóĎJe˙ĂébŐŻnô4Ic˝Ž5eŐ,üć·tAhĎpT.T÷Ď&»HµM>kײŠúÖK¸ţü 2—_Şç"§žâX{‰Ł†i$`ŞŁÔ“Ň€([i&×Ăi¤¬Ęě–ľGš"Xø-µ@žpcxoÂSčwúmÄ—QĘ-484¶ ¤nhÎKŹc] ęš{Řőľµk02nĘcôűŮĹęš}ͨş‚úÚ[rÁÉ2˛'dg$ {Đ1c໋OxSMk¸™ôKĹą‘ÂśHJ¸źëĺ\˙†<=®kž·Ó/.íbŃdżšYÂÂŕ˘];yyÎ0J»®1Ţ˝&ĎQ±ÔUÚĘňŢĺcm®`•\)ô8<ˇ/‰´Řˇj‘Ýl„ËůËÉĆGÎ~P©â€9í/Á÷Ţą’îŮŰMŠčL!¶XÚb‡äEmÇ9$c$š«yŕ­DŰ\˝•íŞÝť{űb9Ç÷Bělúň+~ÇÄÚ}˙o´8äQyg RČ»Ôç~î$®Ţxţ!N“Äš|^*‹Ă­*‹Ů-šäeÔ o\î9'č rW_.ÖďT#B0ęW/rno4ń5Ĺ«ąË„ĎĘă9+»ĎzŇąđ®±g{Ş6e ¶«,ës - ¬b=ń… ¨_”€ö®˘ OOą»–Ň ëinb˙Y r«:}TŠĄ®ëńhRibhĂ%őčµ.\(‹1»ď9ě­KáÝ1ô_ éZT’,Źeg»:Ś(I•iU{{ű;»OµŰ]Á5·'ÎŽ@ÉÇ^G–Z…–Ł–ĆňŢę0v—‚Up¦A  4QEQEQEQEQEQEQE6Ik“ž¸âŁűJz5_ęÇÖŞPż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁS^íR6aą«ŚźaÍV˘€c©}®Ę‰-¦·y&)@ܞƬ}Ą=˛Ţ[±¨Ĺ[ŁZŘĽŢgĚ­žßĎźňlĐż´§ŁQö”ôj©E[űJz5iOFŞ”Pż´§ŁQö”ôj©E[űJz5TÓŃl-|–ąşą;ټˆ Üśă>‚ُ‡íŇĂćI…7ĆpË‘ŚŘŇ[Cö{X ó$—Ë@ľdŤ–l džäЇÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ßÚSѨűJz5T˘€-ýĄ=Ź´§ŁUJ(ú0u :uGú•üťI@Q@Q@Q@]âĺ{_Ź{©_ŮXŘ>š‘ZO§¨woc"™B9ĘVŞŽ—§SÂđ2ę3Ű+WآŰČ›ËfŞ˘;xá=6ôŻ_˘€ŕڶqŹj·â VŰOđźÄŕL5K«Ë™ ¶±i#tB$+€r{`ײŃ@>ڶ–źuä–ŠćîĘŇKv1`ČH$!±ë·<úV‹ď ŹÇ+ ăL°#O/ő 9.Śŕ»f(P0]Ăž¬wéPŠřn9ż˛<"ŤŞŃiÚÜlŹB‘@RżĂŔé]˙‡‘‡ÂM) ĂB„#śůŠęč ¶ŽK9<5w©ŢŮŘXź YĂm>ˇ§–@ż˝Aó(G#g^X v­8c›ÂŢĐ|SmĺŇi˛OÄö¦ Ňv;TGąPâ" ś…ě+×( ÔôK˝ ü'&§wmonÜÍywufnaKéJ»3¨eĆs V'Śc˝?IµŘş ¶óËqi/ŠžhY}š,iC“s,¶H»š(ĹőËvŐ ń5÷‡b?đŹÍq§™Ě6Ĺăźc“pëǡ|˝ŘűŰHÍt> H.|ku}cŞA{ÓÖ)žĂLű5±mŕ fóénŕrG˝Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š†ëýXúŐ'%cf@&®Ý«ZŁ/ú§˙tĐťřfďÇ~$đ­–ą»¤Ć×1—[y4ćŔ ‘‚ÂNśuĹoxCĆ–ľ$Ň4ą.6[jW°É'نHo-ĚnTô##8Îpkř{ŕK Wáţ“qy­4-ć[&Ł,qą†6Őżă 2ŰĂš.Ť¬é–ÂĽ;:ż•˙—Vů&QëňťßđµâďXřvŇu‚Dšö –"ŽD~a’]„śgÓÖ«?Ä /]ÚßŢ$zq±·¸µid/Ľź”Ä`Üw®wQµ–ßáCëW¨Ës¨ęPj×|cFť ß Qí]…5®ŁńCVÔ­Y&†]"ѡťG ŚÎx>řŃčc@]tęv˙ŮŚ>[ŤÜś`ąĎĆsÚ›¦řłCŐˇ»–Îý_싾á7Ťă\g%Ć\W™ŰŻŘměµ;]´ť7ĹwŇ]BÂ%%Ő$  ±=łZÚťý§‰ FŁ‚Ř3@]ŻŹĽ/{}ogm«Äó\°ťŽŘŚ…FÝßěç9㮂âxímĄ¸•Š$.äpÉŕr óËuŹá•" V]-Ŕ Ń‹!'ëÉüë´‡Jń kĆî_¬şoĚ,>ŔŠBśá|ĚçŽ9Ç8  ß řöĂ]Đ-/ď%Kiîn͢Ć#|o.â5ŽIUÎzg=:Vőöż¦i×RZÝ\ěť-ŤÓFůA‚–ŕň@ÇZňkFÚ/‡şç;´/¬şš%­SΛćaŚă u:vµcŻ|\:dßi´ŹA–?´"ź-›ĎڭѰÎ=h÷Ăj'ŃdŐŻő¦3VÖ+CÚňß.óţł#o=±]Ąr? _†z ‚-ú÷ŤuÔQEQEQEQEQEQEQEQEQEÇťöi~Ěcě>_ÝŘă8礶‹X…ÉC8Aćó´¶9Ć{fĄŞŃĂtş„ňÉtŐŐDpy`#©Ýß4fŠ( Š( Š(  ×¶K}hÓĎÉLÂűIÇb}qVjłXĆÚ’_—ĚXŚAť„ś‘ëďVh˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(ěęWńţu%Gú•üťI@Q@Q@Q@gŠőëË-n>ŰY‚ĹZßÍÁ§É}u#n#ýZ *q׹ČăK@ń¶ˇumá­CTńYjÜA3(#ą±SÉŕ2G'ˇş GÂq^ërjÖú¦Ł§\ÍŰÜ}ŤĐ ‘I*äbÜyR=j¬ż´YĽľf»ţĎG/‰z„ą~íçśĆ€0äńn­6‰¤^O©ÚicUiî#Qg%ÍĎ‘śÂ±Â î;H,Ǧ@Ç5—â»íV= íĐ[Í:xŠ]<Ë5™Ťö¬20#săúdŽőÖę~¶żż˛ľµľĽÓ.ěŕkhĺ˛1ŚÄŰIBqňŚq‘ŠŻaŕM/Nű?•=ë5&ŐË0rÓ4eq#$ÄúäőÇÇhšÝΗŕťÚ ZŰO[‰oK·Ůdşą}łľP 9ňǧsQK­ë>$´đî%†=FŰÄŇÚ%ÄÖ­*°Kó´$‚­´ýŇG"»đ ŤŞŘ6ź©jVI4qÜ@ń—hĺ“Ět`čĘFîGëQ˙¸ҖÉ-ŁľŐ#hő&Őqp ˘ŕˇBŰŠśŽK`çźn(śń©©´Wš«<7Séú®“,wQEĺů‘Ëp¸ ą 0(ĂŽÇ^ř÷Y–çYąÓŢ\i×rŰ[éɢÜÎ.ĽŁµ·NŠUY`1Âńśó]bxN6łÇsw}usqwÜ÷sHľlŹ Ś(PŁn0ŕžç4ËĎY]\^ő=RŇÎţS5却ʰÎç‰Ę—]Řçk.h¦·›íŃM±ÓĚ@űa—#8#±©)U*€F…-QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQECuţ¬}jĄ[ş˙V>µBâ_łŰK1Ľ´/Ź\ Đ•…âoOâH!˛:¤¶şs/mă‰IąL·yĺuÖ ‡Äi®<—áý'űZâ{‡7+Q+ä ÜAË0Žťč¨¤(€ě+Ś“âąŃín!ÓĄ:Ś÷çM63J±yW ewäČ#9ČŔćˇ×üEpŢ ńëšő‹Y"oH.pł+ŔÇŚŹĆ€;Ş+…]U‡â†ĄksÍ"ŰLI™ŤŔŰĺÉ—n9'nÜv4áńę=: rďĂłŰřvv]·­p¦DŤ #ÄBśÁ$śPqH°®J_j2ř®˙BŇĽ=%ëX<"ćŕÝ,H‹" ČÉ<že>Âśľ:üoâě¤iç•m’ĸç™<ż/8ę=ş ĐYEą˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ ­{m5Ěq¬7r[•]™;€ę§=ŤY¤aąJ䌌du ˘«ŘÚµ•”Víq5Ë Á–fË·9äŐŠ(˘Š) 2HÔŇŐ[člďˇm>óc¬ęs|®0séŇ€ÎÂŢŔN-Ő‡ť+Lű·ĚÝz˙*łH±˘˘F…-QEQEQEQEQEQEQEQEQEQE]ýJţ?Τ¨ŕ˙RżŹó©(˘Š(źŐüqáťU‡LŐ5›[[ÉpV)đBÄpŁëŠč:׉|@ř¨řłÇ3kv:µ´6×{ ÂÎĽeT/ČĂp ŕ‘^Ígl¶V6öĚÉkł’c'ߊžŠ( +^Ö5řNaĐěu‹-.Üé¦íĄą¶–q.ÍŁ.¸ŕçđ¤Đ|WsŻ®éúÖĄ§^ZévńÝ6ĄjžR*¶ě¤sĂnx<ҤŐ<'µń;íSHµľŇÓH0«\Ä’*Í瀭’ÜóŹĆ°®|Ş[hž$đ†›nF»Ś]iłî ĺI¸·râ2Ľ6Á< ëěW ŞyçŚóŠć.´ nÚ®˘Ňĺąx|Vu5‚9#,6î]ĚyčH™ŻŮh>#đĚ—gU¸ş{kń4bK‚I2ĺ·‚›ŹNp1ěÓj:Ń4íRçM•®ĺşµO2u·´’Q”ŢŠ‚#§©â´·ôât “ď:¨ÍžĹ'Ě]›÷{ ˝Ď¨ő¬o h7ZNżŻ=Ä[­§ŽÎ(fbś#‡cg#źZÄđ‘<~!ÔVvY,|>dŇ´Ö »ĺwó[ńT0Ç˙4ÝOŞÚ[jÖzd˛uy˛B»I ±íÝĎAŤëůÖmÇŚt{x¤o2yd[DZXa·y$’e™UTp2sÓ­Rń=ľŁoâŤ]˛Ó'Ôa˛Žę á¶t(Źkě €cÁç˝s?đŹjŇé2ę~žW—]žűĘł˝Ý[٦âpę „nçŻŮÂkˇ˙c˙iůóţŃöO ŰÉçů˙óËĘĆíţŘéĎNh_h§L¸˝y§‹ěó-Ľ¶ň[Hł¬­Ť‰ĺcq-‘Śśń\Lľń5Ţ›§Ţ^.«rşv©$ÖöŹz‘ßW‹g2Łó`7r§Ôëá‹™´ÍfĺĽ3y0şžŰ÷úŁ=ä±ÄIŢĚ+©bPo9"€;˝Ä:çÚV×ĎŽ{W =˝Ě ±22¬ÁĐ×%ŻřúîĘoZZÚ:KŁiéqo4–ϱś‡'q´Ö­fÔţÜu5ŇŘÇöµiŁ–ĺHĚ%ź”ü¸ IŕÖ?Št-jćóĆpÚirÜG¬é1Em:IQ$k (Ŕ° ťă÷ źIń–‘«^ 8Ţâ)Úqąµ’š1ŚĽlŕQ‘ÓÔ”šwŤ´MRöŢÚŢK.‹ Yĺµ’8®H>[° Üx<‘‘Yţ&đĺi+yvë¦_ÚM8#÷-*F©Ćr~ééé\öáŤ@Oáű;ÍWGÓdŤçžďWi-PƤ…„¶O@T €;ëłčMˇ´O q]ę‘Ű\<˝"’3öűšŻˇx‚űÄÚęvF(<1dŠi÷—¬ ®~ä`‚yoaMń營đh¦ÉníaŐâžî6 /”© $‚y#ŽůŞxgQŇő=SJŇâ?đŽë¶ónĘ?ł®YH,ŞH;9ŔÎv€6´˙čšťäöň\rXZÍ-¬‘Ĺr@$Ý€VŕÁä ŚŐĽU¤ÍŁŘę±Ü3ZßL–ö˙»mÍ#6Đ»qA9éž•Âč>ÔŢłĽŃ5t}6HŢyîőv’Ń j@hPHKdô@jŢŤŁÍ˙ 6úŔmJ™őHTí¸ą@6ŰoďŘ×A@Óř˙Jľ¶ěËɢ1\EÜËa+DI™chĂ`Ķ:ńśö5fďâ‡ě®nˇš[˛¶sys%ś­łdŢ8]Ş9îk? jÉđŇĎJűÉ«-ĂĹ˝xA}ć–Îq÷9ëźĆźyáÍVOřîĹ,ÉşÔîMëűŐxĐ)Îp2A뎔Ńęž2Ńô‹ém.$ąy-ăܵ˝¬’­˛Ť#(!AÁ<öç§5ˇoŞ[ęzµśżčÓŰ ¤d?t®ŕJőéÚ¸[™µ ˙Ĺße°‹RŽę4žYî%ű‹uRł†`ÁpPŮń]„íĺo…š-¸Cć¶Ť <G;Ňíě4¸î'ążż»Óâ˝U˛°•Úda÷Â(%FA8=2qL¸ńőŁęއNŠk»-gĚ>|vŇ6Đ ŕ o˝źşÎ:ŐOxSÓ5MkËC[řbÚĆF.§lĘŮdŕöőéďT4­[ŇbđŚíĄK1°ľżűLQÉčŇy$Řü°a ŕô ˇ|qˇ¶ -D× ÇŮEß٤ű1›8ňĸۜń×ă9âłőĎÚ^ťm¬ÇeĺÝöť»Ö;)^4‘!]ÔaG~˝5ÉXx+RµÓ đíΑ«Ýî†n[We°h„»ÄžX“pl`ěŮ÷ąÎ9®ť4 I|/ă»O˛q©ÜŢ=˘î_އ…UsĆHÇ8 čľ3[}OţÖšú˙T»„ÜůşT«"GĐ“…Î@c÷»fµźĆšŇě/Ḛć=@°´ŠŢÝä–bżxpŰśŽőÍjw˙N†KM^]14¸˘Xô›¨ qp3¸Jĺ•¶ăn śjʇ`ŚgŇx#I—M±żš{ 줽»iü»›łq1]Şˇ¤bĚ˝ s@´ďZI?ŰRŽ[+M"ě@%{yeÚŽ\ł(䂸<ěé&Óő«©­ 0^B‚G¶»·x$z0W+‘ŚŽüW¨x{\{ýt[éŤ!Ő¶łhí*î„k1g9VůŚgÖć™o©jţ8O\éSé––ÚsŮÇËĆe™ŢEbHF`v2rIé@}Q@Q@Q@Q@Q@Q@Q@Ý«ZÍĽŤ¦±¸‰]ăeQęH­+ŻőcëTťÖ8ŮÝ‚Ş‚I=…yőׂő ˙„şN…$0ŤJĹ —ěňľcwŚ‚cf^ÄddzÓ4 mń5ŤäµŃ µWi§¸™e•ś®‹c',ÝGjíŕÖô˦łX/a‘Żbi­‚·úÔË/¨ťOck©ŮE{c:\[J7G,g*ĂÚ€<źĂOâMWáhZ"´WÂćŢ=E®EO+«\ďÜ2ŘńĎZéaҵxŠňóLҤŐě/í­˘eŠxă–!O,d9«.9 Ž•ŘéÚu¦“b–V,Ń–+çł=}I'ń«TćßđŚë1x~ęKÝOŐdÔőWżżŇÝÁ)(UX¤bzíS“Śó‚+5Ľ­ËáOŘéúmĹ…•ôGaĄ\ެ¬Ž.Ŕ–*€ńĆîݸŻ[˘€8Ť[AÔ®|k~ëdŇéz¶Źýź-ŇJ€Ű02rTXæk*çLń^©ŕ¨|q˘-żî˘´źTűLm”„č ď,U~éQ‚zצQ@Ö…¤]ŘxżÄ÷ÓDÖőíMłîxHB·ČÁő®bËD•ţ,ÝŘ#+hör˙mÁÎË©SË }9H>µé•›ŁxJđôĂĄYGl’ż™&ŇIvő$’Oô  *(˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (¤ Hę=(hŞÖš…µě—ŰČ]­¤1J6‘µ‡nzýE—Ń^Ip‘¬€ŰĘb}čW$zzŹzłEW¶˝¶ĽiÖŢe‘ Ĺ Âè5b€ (˘€ (˘€+[Yýšâćo´\Kç¸m’ľĺŹŚaGaVj´vb=B{ż>v2ŞŻ”Ď×Ôv'˝Y Š( ©Ăýźp/aň'šЉWˇţ%Í>öţŰN€Mw/—u@pNXśĹM1@ĄbŤ#RKŠ$ő4ú(˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQEQEQ\Š%mOĹŤ¦[ÇŞŢ=­’Ë-˝¶˘l`‡{6$ył§ŚĎSX^Ôo|Oká7TŐ/­Í•äňIŰĆ÷rE0ŤĘ…Xá c‚3Śšő™#IbxäPČŕ«)zŠeµ´6v°ÚŰD‘A ,qĆ ŠĐ^sËďi~˙„‹PşŇ‘o]Ą[—ŠYĄŠD•HvŘŽAÉÇ9ĹQżşĽ“[Ň|7§ęWú˝‚›Ćr5µ–Y#uIś|Ďĺ†9ÁÉîN(ÖhŻ%W×î´x´ďíšHu™bKí–IîaX·yĺ>mčÄžy źUWş†‹o¦XÜę+um«I Γ¨j†)ć-ţLwQ’Ě2°$ä‚x Y˘ą_^‹ť*öŐż´’{+Ç‚h5„Ň@ŘVŘ$ďP`’O<×#sćI˘xă\jQßi7×?c zâ;}ŠËÎÖ N0Ŕä P©]Mśâá7Ŕco1JîĘ㑎üUMŰLµĐě×GµKm=ăĂDc·͒¤ ÎNyĎZámăľŐĽGâkëÍGR€éÖÖňŰŮĂvńĹŹmąň óĎcÇSÔŐ8‚ĽâćÖďí–Z“ëZ˝ő†™—‚=Q¬âY›2Ľˇ±%[ ČP:V_‡ŚšőĎĂťCPľĽšä‹ĺ2-äź8ťą €ŮqóąP±Ő=KT´Ň-VćöSM,p† [çvŁŹV`+Čt˝GÄwz—‰‹Ď ô×Ëľâ}d‹|¶›łýŃĆT nĎ9®ăâ{ÍŃíâNşŤ‘Ž2ŰC·Ú#ŔĎlž3@•äw:îŻof’úéuéµlu8ţŃĺ­’´BĂś¬hP®Ů1“żq9[OńÝë:­ÍΚeŇţŃo šĂ]OÂżČá‰Ü¨ÝŹs@ŁEycxŽűĹşv©«ŘjßŮözLp’R‘%ěŰ]÷ăş Š RäŠŐđlóYxžçHľ¶Ő¬nä˛ ms©řUö™F%Ô’Ŕ8ń@őQ@QÓ4m7E†HtË(-#–C,‹Üç©>¦ŻQ@Q@ú—…4 bőo5Îęá@_2X@}@ô9­p€´PEPEPEPEPEPEPEPEPEPEPEPEP7_ęÇÖ˛µ?ů^×˙ĐMjÝ«ZĎą‡íłA»oŚ™Ćq‘Šój3Ă€,Ëň.4yžLĆ eV•čz~‚ş‰5}]gÝýŁşy;0#ň•‡\óťŢ®Y~ę)ᛟ ÇâA‰ "ĄóŔr[kH[ 2yůA#ŚŠ}ć§âMgĆ‘éV«—htoŢCjł0v‘×hÝŽĆÝă­2OjZnâ[k§YőÍ:óě¶ź o8Źł6ŃÇńsţᮋNđ×Ř|@šł^yŽ4¸´ó—´Ś[~rzîéú×1%•—‰ţ-A{df{}&#ý Ll±˝ĚlË ňć]ň6FG€=Ń'ŠĘîf\,j%”(ŘNLššŠ(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘ŠŽs*ŰĘĐ"ĽÁIEc€[~´ Ţ2j1Ú}šrŻHg űµÁëM·¶˛´š{îĎ›#düäż•Mng’Î3sĹ; ó[pVÇ őUmçd‰4Äq4g-Ç~\Pµą…‘H»WďôŁí0ů{üĹŰś~>•LÚÎčĂć •V|śź˝éNű,…YŚgqppe;¸g>ĽĐ¸Ľ˘Ąâ ‡9%GSę}ę˝˝˛é¶÷ ćÜÎŢsć1v䪏O@*kd‘!ħ-’}ńďďSPwI{g Ň$’ p˛.Öő=Vž[´ĽµHm–Kw-çĘdÁŚĆ|šł@Q@ÖŮlî®ďL÷yű?tĎ”BĹíśóęjXî’IL`0ažHă#¨ĎµTű*XA,BYfóĺyI¸ťťČ€c€*k4]üˇVD\ä ŚźÇ4rŠ*˝őÄÖ¶RĎoj÷R ĘÂŚoÄĐcĽŽ{ë‹?*PĐfgŚ„mÜŤ§żJµH¤•Ť¤ŽGĄ-QEQEQEQEQEQEQEQEQEQEQE]ýJţ?Τ¨ŕ˙RżŹó©(˘Š(˘Š(˘Š(+Sđ·¬ÝÇu©iv·Sƻ匷9Ú}F{)·Đn´Čôé´›FłŠC$pů@,nI%—t’OOSZôPEÇ…´­* .m"ͬmÎa‡Ęc>«Ž‡“Čő4\x[AşŇ ŇćŇ,ÚĆÜć< 3ę¸čy<Žy5ŻEdIám]=!ô‹3§ÄŰă·ň€ToďFäóדMo ř}ô…Ň[Głűżš°yC˙ľ?Ú÷ë[4PM7K°ŃěÖÓN´†ÖÝIaHdő'ÔźZçtĎiPjz†Ł¨ŘŮ^]ϨÉy ­-śmSž¤HôĎÖŃ@†źf˛ÝJ-˘]€.hĚ  ŁwŻTOŁi’ZYÚ=…»[Ů4okF ÂČ0…GbOJ˝Eej~Đő›¸îµ-.Öęx×bĽ±‚vç;OŞç±â‘ü1ˇÉoii6žUśćâŮ< 2ÜY}I‹ĄÜÉw$ú}´­yŠäĽ`ůČ3…lýŕ2zŐKoYč:}ÚřoO°ł»”d3Ćvł›Čů^+jŠÁđ÷†`Ň<;&™v°]˝Ô’Ďz| #šIXłü§?/8Đ µ¤xoEĐL‡JÓm­P´I†`:zŕvjQ@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Q@Ý«Z©VîżŐŹ­T Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( «\ý·í6ßfyŹÚ<ĚîŰŽ6ăŚçÖ¬Őoôďí<ćßěON|ß7?–Ü~9  4QEQEQEGpfŇ›p†}‡Ë÷KcŚűf’ŰíÖ/µĹĆÁć‰Ű»ă<ă5-V±űwßÚgó·¶>Ď»nÜń׾:ĐšŽâą¶–,D(JDZ©*µĺŚĆ;~`•fM®Wć3ŽŁŻVHÄĹb•%TórĹŹL–ö'ëV¬B‹`UTeŽJç Śóô¨n\fB­s Ą•ŹÓ‘WPÔšuVíżlşűG‘ölŻŮög~1ónĎzb›uޤ Ů *ČZ,ŕuSžĆ’ň]ČD`ɱżxNzt÷Ĺ7íÎBc,Şyc`Î9'©>‚ŻU+H¸Hń±śsÜóŰĄ] Š( Š( Š( Š( Š( Š( Š( Š( Š( Š( Š(  °©_ÇůÔ•ęWńţu%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQECuţ¬}jĄ[ş˙V>µR€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ ­‹ďí2soö'Ď›ćgňŰŹÇ5f«]­ëImöI!T8‘I%1Č\wé@h˘Š(˘Š(˘Š;qU¬>Űö(˙´|ʵsżěůŮÔăç¦+Öüu§řŞ{(Z1c‰ąŹs.>ţ9î? ôK»{űXî­eY`ezšŞEif÷Í©Âç’!•\Pă®:×ă?ßh^*µK7 p4-÷_q<C€0}ë¨đţ٦Ëáď,íÚÎĘ5f1ş-ŹQśň(n«}şÝµ&ÓňŢx‹Í#aŰ·8ëÓŻjĺ4?6şŢŢÚż ±I&ĐđîäĎĚQřŹJí(­µĄľ›h¶–1G Ĺ#ÎIÉýMUXěců©$¬ŤţŻ'9\~µˇ,)2íqśrŕę Â" ółł–cÉ  (˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š»ú•üťIQÁţĄçRPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEPEP7_ęÇÖŞU»ŻőcëU(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(®ë–v Ł©cMűD>W›çGĺç· ~tćdSuÜ<Ä÷,?ž iĐS\B®ÓFľë?JxŁ*TRßt?JŐ˘€2ĚŃ,˘#" E,3ůP&‰Ą1 Č:¨aźĘµ(  DŠŮď$űt—ä2˛ĽĽˇŽ€Š}ŽĄi¨Ř­í´Á aťÇĺÇÔ•ąGJĘűD>W›çGĺç· ~t5Ä(Ší4aî±aô­Z()çŠ2ˇĺE-÷A`3ô¨î®„Kłl— „¤!†ç8ŕď[4P–č·ű[]ëłÇuzě]­•˛ žNîäűtú×y Ű„ňŕh‚Ć1µÂě:VµćC‘kľ*żÖµIŮ$b„8ů€ącŮxéßé×GĆňóD‹JŃ óZé¶Gőč; zf»Ę©|ŞD›I•§kqůgň 7Đ~­¤Ń]ęw®Ó! ±Ű1P¤µ×ňÇÖ˝ Š(˘Š(˘Š(˘Š(¦I4PăÍ•=70§Ó­Ůę©_śĆÄ7 Ęä~Łň ^xŁ*TRßt?JS4K(Č‚CŃK ţU©Ee‰˘iLBD2Şgň¤Iâ°IQŠýŕ}kVŠĘ[]ÖhĘ/Ţ`ĂëGÚ!ňĽß:?/8ݸcó­Z()®!DWiŁßu‹ Ąń äV·2Ű<ĐC Bń±VĆeŚŹJŢĐ5űik`d ˝˘’9¤‘H§ ާŁ^iáű?¶…â+ż ë6±”Ö/Śv3Y†.ÂVČŕžŮ\fŞËuđ‡ăŃ%ąąŹZÖŹö©ą¸ű4˛ÎÁÚH¤u»,ęĐß4í4WŚ_Ĺ©hş‹í#†ßI´ţÁyĆź¦×- %\¨(du* m¶źiáíGÁ·šĚó^js®K\4źn„ÂÎŇ8$‚A ŔöÎ:q@™Ex¨·ľIăwľą_€×lűCîYÄňvgsňlĆ1Ú­xŽĘćm[_Őç´Ŭ ¦GłÔÚŢóH+–§ä8ĺÇ99ç4é—Úíľšu˝‚ęK Qu-ŰE™~l…#’Ŕ.HÇqZL—ńĎvH×>„fĽ‹Ä×kVľ1“Ěž[Gđ­µÜV*rDĚŔ< Öť¦‘a©řŻMđí÷t‹m ;»k?=ÂM+ČCąç-´c“ŤÔéÔW‹L˛ŢZŰh"ţńô¸Lzíoéš·U/™@€2ä™0§Đín,ţt Q@Q@Q@R3*)f!T ’N´űw"ńSnAŤŽďLăőý*$t‘Ń•‘†C)Č"Ą·2}¬?vQ‹|ŚZMkVBŃ/uk¤‘ŕł…¦‘b±U8źĆ¨jŢ-Óô‹+ ¤ŽââăP*¶–Včy‰đ¤€I$ëUľ#É6ńýć˙ĐMqz4WŢ ×ô­wÄ—_Űë6±Y›ŃѦÉŐ#_H3ýá“Ö€=iIdV*T‘’§ÜR×’ęßř‡Ç~%·ąŇS‹O0ĹoşłÚ dhoUU9,I;úŤ¸íO±Ő/tM3Â>*Őu$ą·hĺÓu¦2Fńł1†\đ €Ç%Íz˝T‚ôϨ]Ú}’ć1młČ€G.ŕOČsÎ1ŔÁŻ*eά<š´÷h|Aw}y Néű·ť"Č …¨¤}}j?¤šm—Žě,.n-á´M"+mł1h†ĺIĎ©ďŢ€=’Šó=GN_řĆÔxynÝč÷ŇK Îň‰ĺFcbś¶XŚő9¬Yôí.Ű ×mu+“Şj†žg¸ű[–Ľg‘LŠă8 ścŤ¸ő f˘Š(˘Š(˘Š(…Ć϶¶3ćykź¦[Ö›N¸eűk(_śF¤·¨Ë`~‡ó¦ĐEPŘ?ÔŻăüęJŽő+ř˙:’€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€!ş˙V>µR­Ý«Z©@Q@Q@Q@Q@Q@Q@Q@Q@DŚČ#8męOÓpĎéšŇ¬Ň…ŮmżĽCźŁŹÇĄiPk->ÓNŽHěíă%•¦pťŽYŹą5NO č“[_[IĄÚĽňy×Q´`¬˛|Źďp9ëĹsWŤÖĂMÖďĽAu#C'Ó 1Ă’¨\(ÉúňO˝]—Ç¶Ż§ëM†ŁĄ§ÚˇgwjČî‡!\ ňąó‘Ň€$ÔĽ§/„5˝Bł¶˛źR¶xŚ­ź™Š ·,@Ďľ*ö‡á A‘.lôËX/|ˇÍsîű žp1šĘÓ|wđľ…y}k{&§©[«%śÄË3ÝW˛sťÄ‚*Ôž?Đ ĐßUťîbŠ+Ąłž„ĐĚqň˛uĎ ńśäc4př7ÇWţŐ:5§Ű|Ď;ĚŮ˙-?żŽ›żÚĆhÔĽáÍcP7Ú†ŹkqrŔw_ő€t:6=ó\ŤďŚ5-kĆ:•c»ŁÚĎ“NNQ+•tPpv Éɢ·#ř‹ŁK$¶ú—ö}ÄâŢ-HÚ°¶w-´aúŕ·±ŹzčGÓe’ćI,`fş€[O”Ěg}Wćn=ë>_xnm2ÓN}ŘÚÚ6č-Ç-µł‘’y皯ŞxëKŇŻn퍾ˇuö {-Ą«I¨#wÎGű<2@§^řßL¶ą[kH/µIL rë§[™Ľ¸›•f=FHOĄiĹ iZYZC§[Goc(šÚ4ډŔ#pżĚÜűšÉ×t ˙x‡JBÄhš}Â_ó4ňNˇ‚®1µTÎs“Śb·4ÍNĎYÓmő Ö{[„ß‹ĆGĐňbCV袊(¬Ín[Č­ űÍ+NŞDŤ€'>¸§UoKQdĂű §úâ€3ďžň8ŘòďPVFÚç“ő˘ůď#MŚ1Ë.ődm .y?ZłEVľ{ČŕSc rË˝AYh žO֋缎60Ç,»Ô‘¶€ąäýi׊Ďi(@Kc€;ŐYł;ĚÁ%”Aź,ó†$đzŹQ@/žň8ŮC˛ďPVFÚç“ő˘ůď#M”1Ë.ődm .y?Z¦cĆË!!Î"bťqś˙:ĺGąX€[hhK. é·9Ôrůď#MŚ1Ë.ődm .y?Z­®˝Ňh÷Ň(är¤0vŔ ¸ýqW Ýöx÷.ĆÚ2ąÎ)졔«TŚzđĎ Ýxš€ĐĹÔ‹ź™wEřçő⽟ĂŇk*ľ­mĽe†MŔśŚ;wîjÚ"F#UDQ€Ş0Kn„Ţ+îŔ°Űë’Ľţź­Oygo¨YMgw Mm2’7WSÔŽóK°Ô4ÇÓní!žÉĐ#A"ĺ ŽŘ•Uń=äúw„ő›ŰY<»‹{ć‰đ×XŘÁ䵇oă›[-7D·ľŽöóVľÓc»Ú[_•wrIç€5u?xwXx_PŇmçxcٰ;¶á$•ö9GĹ^»×tË_ÚŧÁ »F/$XŃ•‚D mçn2HÇ`k3Yř‹hÚNˇ¤[]J.µXěn#{Vó!ĂâHĘäbNŔVľˇăŤ:Âň{UłÔ®äµŤd»ű%«H-C ĂĚÇCŽvŚśv  Ůtű9îm.%¶Ť¦´,ÖîW‰R§o¦T‘Uî4*ěŢ‹$7žYąÜąó|ł”ÝëŽŐ‘©xűEÓŢĆ8ÖóP’ţŰíV±Ř[4Í,y€=Ž~‚ąÝ+ĆłËâ/jzµćĄeŁéN"KG˛UŹîÇ÷›Ě…ź…ˇßÜéÖ÷1ŢĐ^ĂÇÁPZ řÝŚúŕ~Uç¶ßŻçŐ4ůµ ?Ă–ßf»ŽîâţÂ6Ý´m¸ …BÇą€-†9ĹuzgŚluWű2[=GN»0´ńÇnbócR2ś‘ĆFAÁéUěľ h××v‘ÇúZŢËäÚ_KjËopüáUĎ® qšę¨®Qľ!h«;.řéë?ŮŰSÍöQ&í¸ó=7|»±·=ë« Š( Š(   Më&Ţjwzä·§ëM§Üűc?v#Rľ[?Ň™@Q@`˙RżŹó©*8?ÔŻăüęJ(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š(˘Š†ëýXúŐJ·uţ¬}jĄQEQEQEQEQEQEQEQE5•ƶŹ1}ĂçZuŢ^có3·ĚLc×pÇëŠÓ .ŐţõR­Ý«Z©@Q@Q@Q@Q@Q@Q@Q@Q@Ę®0ĘzšiŠ6”JcC čĺFGăO˘€"Ťe2ĐHz¸Q“řĐĹb‘"–űĹT ýiôPk)"ĹŁ}ĺ ?Z<|Ż+ĘŹËÎvmĎҤ˘€#h!xŐ(Ůî©P@úRĽ1HTĽHĹ~éeJ}Ăm(”ƆAŃĘŚŹĆk)”F‚CŐÂŚźĆźE1!Š2Ĺ"E-÷ЍúŇ,¤l‹jŤ÷”(ýjJ(?"+ĘňŁňóť›F3ôˇ …ăThŁd_şĄAéRQ@ xb©x‘ŠýŇĘ>”Ł2ů¦52ăÇ?ť>Š(˘Š(˘Š(˘Š(˘Š)6ŤÁ°7‡¸Ą˘€Ĺb‘"–űĹT ýiR6EŠ5FűĘ~µ%‘•ĺyQůyÎÍŁúPĐBńŞ4Q˛/ÝR ô©( <1HTĽHĹ~éeJ Q´˘SG*2?}Ák)”F‚CŐÂŚźĆ„†(ˉ·Ţ* gëO˘€#X!HŮ(Őď(PúŃäCĺy^T~^słhĆ~•%A ƨŃFČżuJ‚Ň•áŠBĄâF+÷K(8úSč ŁiD¦42ŽTd~4ŁYL˘4®dţ4ú(‰q–1Ć[®Ő4ú( Š(  °©_ÇůÔ•ęWńţu%QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQECuţ¬}jĄ[ş˙V>µR€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€ (˘€.ÁţĄçRTp©_ÇůÔ”QEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE„ ­'–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE7ËOî/ĺG–źÜ_ĘťE KEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQEQE˙Ůpython-tuf-5.1.0/docs/repository-library-design.md000066400000000000000000000266501470074210500222450ustar00rootroot00000000000000# Python-tuf repository API proposal: _minimal repository abstraction_ This is an attachment to ADR 10: _Repository library design built on top of Metadata API_, and documents the design proposal in Dec 2021. ## Design principles Primary goals of this repository library design are 1. Support full range of repository implementations: from command line “repository editing” tools to production repositories like PyPI 2. Provide canonical solutions for the difficult repository problems but avoid making implementation decisions 3. Keep python-tuf maintenance burden in mind: less is more Why does this design look so different from both legacy python-tuf code and other implementations? * Most existing implementations are focused on a specific use case (typically a command line application): this is a valid design choice but severely limits goal #1 * The problem space contains many application decisions. Many implementations solve this by creating functions with 15 arguments: this design tries to find another way (#2) * The Metadata API makes modifying individual pieces of metadata simpler. This, combined with good repository API design, should enable more variance in where things are implemented: The repository library does not have to implement every little detail as we can safely let specific implementations handle things, see goal #3 * This variance means we can start by implementing a minimal design: as experience from implementations is collected, we can then move implementation details into the library (goals #2, #3) ## Design ### Application and library components ![Design: Application and library components](repository-library-design-ownership.jpg) The design expects a fully functional repository application to contain code at three levels: * Repository library (abstract classes that are part of python-tuf) * The Repository abstract class provides an ergonomic abstract metadata editing API for all code levels to use. It also provides implementations for some core edit actions like _snapshot update_. * A small amount of related functionality is also provided (private key management API, maybe repository validation). * is a very small library: possibly a few hundred lines of code. * Concrete Repository implementation (typically part of application code, implements interfaces provided by the repository API in python-tuf) * Contains the “application level” decisions that the Repository abstraction requires to operate: examples of application decisions include * _When should “targets” metadata next expire when it is edited?_ * _What is the current “targets” metadata version? Where do we load it from?_ * _Where to store current “targets” after editing? Should the previous version be deleted from storage?_ * Actual application * Uses the Repository API to do the repository actions it needs to do For context here’s a trivial example showing what “ergonomic editing” means -- this key-adding code could be in the application (or later, if common patterns are found, in the python-tuf library): ```python with repository.edit(“targets”) as targets: # adds a key for role1 (as an example, arbitrary edits are allowed) targets.add_key(key, “role1”) ``` This code loads current targets metadata for editing, adds the key to a role, and handles version and expiry bumps before persisting the new targets version. The reason for the context manager style is that it manages two things simultaneously: * Hides the complexity of loading and persisting metadata, and updating expiry and versions from the editing code (by putting it in the repository implementation that is defined in python-tuf but implemented by the application) * Still allows completely arbitrary edits on the metadata in question: now the library does not need to anticipate what application wants to do and on the other hand library can still provide e.g. snapshot functionality without knowing about the application decisions mentioned in previous point. Other designs do not seem to manage both of these. ### How the components are used ![Design: How components are used](repository-library-design-usage.jpg) The core idea here is that because editing is ergonomic enough, when new functionality (like “developer uploads new targets”) is added, _it can be added at any level_: the application might add a `handle_new_target_files()` method that adds a bunch of targets into the metadata, but one of the previous layers could offer that as a helper function as well: code in both cases would look similar as it would use the common editing interface. The proposed design is purposefully spartan in that the library provides very few high-level actions (the prototype only provided _sign_ and _snapshot_): everything else is left to implementer at this point. As we gain experience of common usage patterns we can start providing other features as well. There are a few additional items worth mentioning: * Private key management: the Repository API should come with a “keyring abstraction” -- a way for the application to provide roles’ private keys for the Repository to use. Some implementations could be provided as well. * Validating repository state: the design is very much focused on enabling efficient editing of individual metadata. Implementations are also likely to be interested in validating (after some edits) that the repository is correct according to client workflow and that it contains the expected changes. The Repository API should provide some validation, but we should recognise that validation may be implementation specific. * Improved metadata editing: There are a small number of improvements that could be made to metadata editing. These do not necessarily need to be part of the repository API: they could be part of Metadata API as well It would make sense for python-tuf to ship with at least one concrete Repository implementation: possibly a repo.py look alike. This implementation should not be part of the library but an example. ## Details This section includes links to a Proof of Concept implementation in [repository-editor-for-tuf](https://github.com/vmware-labs/repository-editor-for-tuf/): it should not be seen as the exact proposed API but a prototype of the ideas. The ideas in this document map to POC components like this: | Concept | repository-editor-for-tuf implementation | |-|-| | Repository API | [librepo/repo.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/librepo/repo.py), [librepo/keys.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/librepo/repo.py) | | Example of repository implementation | [git_repo.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/git_repo.py) | |Application code | [cli.py (command line app)](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/cli.py), [keys_impl.py (keyring implementation)](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/keys_impl.py) | | Repository validation | [verifier.py (very rough, not intended for python-tuf)](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/verifier.py) | Improved Metadata editing | [helpers.py](https://github.com/vmware-labs/repository-editor-for-tuf/blob/main/tufrepo/helpers.py) ### Repository API Repository itself is a minimal abstract class: The value of this class is in defining the abstract method signatures (most importantly `_load`, `_save()`, `edit()`) that enable ergonomic metadata editing. The Repository class in this proposal includes concrete implementations only for the following: * `sign()` -- signing without editing metadata payload * `snapshot()` -- updates snapshot and timestamp metadata based on given input. Note that a concrete Repository implementation could provide an easier to use snapshot that does not require input (see example in git_repo.py) More concrete method implementations (see cli.py for examples) could be added to Repository itself but none seem essential at this point. The current prototype API defines five abstract methods that take care of access to metadata storage, expiry updates, version updates and signing. These must be implemented in the concrete implementation: * **keyring()**: A property that returns the private key mapping that should be used for signing. * **_load()**: Loads metadata from storage or cache. Is used by edit() and sign(). * **_save()**: Signs and persists metadata in cache/storage. Is used by edit() and sign(). * **edit()**: The ContextManager that enables ergonomic metadata editing by handling expiry and version number management. * **init_role()**: initializes new metadata handling expiry and version number. (_init_role is in a way a special case of edit and should potentially be integrated there_). The API requires a “Keyring” abstraction that the repository code can use to lookup a set of signers for a specific role. Specific implementations of Keyring could include a file-based keyring for testing, env-var keyring for CI use, etc. Some implementations should be provided in the python-tuf code base and more could be implemented in applications. _Prototype status: Prototype Repository and Keyring abstractions exist in librepo/repo.py._ ### Example concrete Repository implementation The design decisions that the included example `GitRepository` makes are not important but provide an example of what is possible: * Metadata versions are stored in files in git, with filenames that allow serving the metadata directory as is over HTTP * Version bumps are made based on git status (so edits in staging area only bump version once) * “Current version” when loading metadata is decided based on filenames on disk * Files are removed once they are no longer part of the snapshot (to keep directory uncluttered) * Expiry times are decided based on an application specific metadata field * Private keys can be stored in a file or in environment variables (for CI use) Note that GitRepository implementation is significantly larger than the Repository interface -- but all of the complexity in GitRepository is really related to the design decisions made there. _Prototype status: The GitRepository example exists in git_repo.py._ ### Validating repository state This is mostly undesigned but something built on top of TrustedMetadataSet (currently ngclient component) might work as a way to easily check specific aspects like: * Is top-level metadata valid according to client workflow * Is a role included in the snapshot and the delegation tree It’s likely that different implementations will have different needs though: a command line app for small repos might want to validate loading all metadata into memory, but a server application hosting tens of thousands of pieces of metadata is unlikely to do so. _Prototype status: A very rough implementation exists in verifier.py : this is unlikely to be very useful_ ### Improved metadata editing Currently the identified improvement areas are: * Metadata initialization: this could potentially be improved by adding default argument values to Metadata API constructors * Modifying and looking up data about roles in delegating metadata (root/targets): they do similar things but root and targets do not have identical API. This may be a very specific use case and not interesting for some applications _Prototype status: Some potential improvements have been collected in helpers.py_ python-tuf-5.1.0/docs/tuf-horizontal-white.png000066400000000000000000000157271470074210500214070ustar00rootroot00000000000000‰PNG  IHDRČTÇüx¦sRGB®Îé„eXIfMM*JR(‡iZ,,  Č Ttę¬ pHYs.#.#xĄ?vYiTXtXML:com.adobe.xmp 1 LÂ'Y‡IDATxíť ´dUu†»PĨ ˘Ň°ED†˘€Ú"C¤Ťq@4`â2@X‘! Â$(Ń â@Păb5Ä;  "! ¨Ě$ĚóË÷ť:űr^˝ŞzUďUuż÷ęîµţÚgÜçś}÷>Ă˝·ŞfĎZ‚4222›ć–ridöěŮ/6‚ť?©k˝¨űá—¨;ŇąVť[k`rCťś”6µ ‡Đ°_Ä 5ě1Dą9$® VËeÁóŕ9đx˘•3d§±l[ŮäŐTk`„ábÔyE¤żšČ[ŔĆëĂßV«ť¤ě“ňxüÜn7_ ˙Ax"dë(ÖŐYꕥˇ–ús’(Ťq’˘Ř/5Śtš¶M9ţďŢ¶Ż­č1ź/äLWĆň®*ͤă,—ďŃćQ€v—'\Ż*ˇšOX}qޱ=ÚüŘ čÝÓ„o7WW„߀‡˙ m}[ĽüXl޶›čżu~ÎßÁYlG‡ŐQ^¨WµQÓ×8;aj›đ»ÁĄ ¤[śŢ^ׯN"k.Ř \žAż&p¨Ú"ÜjęWWj9µĆj 4:›‚ď 'ś ¶i®IšNµ\Ʋp± 0˝Ě–KőZČ\‡ĽÁÍ čQGU,Oí4×­ăµú®ŚÍ­‹F7ś‚4Ężk”Ť×°5Đؕ٠#K§IrK¤í € {¸ÝKD8ő=â5Ż5Đ7 `\i6W ámÁí čdŻŤĆŹ1ŢČë7§-ű5Ęđ‰ď®Aß%>ŞlżűSËB `TŢFMDř čZŢťŠĽĺ‰{j©mëUű„Oé!°“÷uE[*­ťТ'„ż‚Ž/Ň—ŞcD?‚ÓÁę`Nx}pMt~ĺŕéĚuj^k g `DÉ9ŕ+‚+ôŘ=„®Ś1Ҧ §oŢ*NDř4tJ‘Ţ·łQȬůhK*ťcQ¶¬ßÂ7qřpWŤ)o\ô±\˙ŠxĐ—ó8ę•d칯CÄ‚Ę}üÂlQwŔßŤŞš™űÚđ€„Ńoť ­tđ}@ĐÉy<• ¨ µŘ™˘,§šQ _’-é^¸OłÓĘ1]ÇĘâő§ó¸dN÷qM×ë1-ű]ѱŮűúČ´vޏĹřŽĘă“ÍË㛲ç©čÍ—˘0”؆ř!hÇl<Ój[ŐNŤ Ę2ÎWßĚĽî;_NŐö˛ťŚ:}5†ˇˇ F:LUŔg„sÄee<É ŕ+ĎVŇ7óXëóH(Şć/k‰Őăüd.##WdńIő”ż[őňHş 1¦8ŹĽ/ŹW¶wsý´˝;5G) #śc^6–gáëgc™±3*c 'ńŤcéaކ×[­á0˙ńG‰1¤~bk5ŁgRĆăv•ĽŐCgć‰!MăkŻ.1Ł5€AÄęń±d##‹áéĚźq[«ć‹Éc™źÇ˙ÜŻ×öfe sŁřY6ý˛qĚčŐŁĽÖŚ;íWgśžuPŻ"Ą˘†-Ś1ÄíÎ÷gøľb6Žłz0&oíújĚr†›ŻłéyĚ&,yIŻďĂÇ”o®_Çg¨¸řaqçęČl(ŁVŤ¸O÷5ń@ßînµßOŮc۶W^ŇĂ˝í{öÎş¨W‘RYĂĆVhĐ۲QÄ–Ă·ŁzľÓEť cݢĚ#ďtť¨§C+;ÎŽńs`ŕďoŤ9_“Ĺ©„Ąoĺr=Ź­SßęĽi¦ a;­úytť°ĆUͲ„]5޶[Í@z7«¨“Ś2âí8ő’c”ů¤­ ÖĘ˙}đf0ę'‚ŹęSY?”ÓŇtŽ_€ Ď1”c ůă\đ7pUEgŞĘE[5 pń?ź âl‡LxN ťđŽŕđKPţzŃ‘gŔMŕ°mQ§­!S®ZŤŻţü;đ5úfrUű!pć_ł”á’SF'N† ×áĽ+wřěo|»đ}Ö%^­„ĂiŢX”ŰĽą\Ůf ` ńE˘ôM»l;‘ţSPŇłDî:Ë=ŕEP’3óˇ2•ńe™1CŻFŢIŔ_?)é~"ţLł}ĽęůŹř"_)i>#•Ž÷—”łźťhÓÜ§ŞŹçŇÁŁ4żě{Ś­ćC¤ŚŕÉFF>”ŤáÜ—ý/8¸’¬ ćgę•Ŕ:`>8ř-Ă /Ů8 ńpŻ4ţ+ď6i;“űđ ‎¤áo—Ë„Ě8gŘż…Q~#8(űD ˝âFÄs yá$?¶0t@ٖᚆLÁĹÉ[­ň÷­N%}ÔOř´S ĺÜ–|)Ë‘ůDţU–‡ÇmăýÍČ´^mËÚÉŤtĘęX®.AϲăŔ­3ą%“ÜRíu“¶H_řŠ´fN~8öŹK[>jĹj®WÇg°¸řÎřŇ“ –¶>;ÇI[¸j,<Ś»)·7;wŐ‘®aŔű¦”ĆÇI…lĺ*#É.ŇmĂ4ŰŁ5~ň§M5ŢuÁ9Ń™˙59]Ń÷r+U…Ł˝ŕ”ŹĎŐGÚ'ËŞĆek>$Ŕ>źLˇńq7,nőVĆ9ž*¨ŁńĆë)›~H_Ţ™ JŰĺ‘еŃQ¶šÁ ź™…yčŢ \™ăWE?‰Źy=ź4ű8f[Ő\‡2oqCbëÜ×¶Nők>C5 –(ÝűĎFѵ‡jŰ©÷dyîůcE9'ä’–öúQŻNťĘ¸ ˙o°´­Š-ÝDúç·"oq–éťµHﹿ݌©.3Ĺ5€ÄöĹĂpçŘU3w·Cˇn¬$'„@¸żÂĆVz·2Ł2b ´á‡@Đn–!2fĺş­8ĺ݆%‡‚ďb›ůá˛ĚzőhĄĽaIĂÂI4xNp+áąŮ@z6ş\oUdܤĎLD–uš Yá€nߤ–÷dČ”wËw­Ę âŇc›Ů“ĚćľÖń˘ "fw·wÉsÄ»"ÜóH×Ű Ëçz'–.ĚńIr _KřCó˛ě®·VÔ©úA8ú¨¬˙ńÝô4e×Tk 9jŔ@VW OćôއÜR…TŚUÉoІ!§,ŰKAáxńp×ô*łáöęŰŃ9řżE_˘LÄk^k ièfa‚ ŁCE$Tło¤µâQ~IňEËžÔĚL}Ő- ´g–Yő»U_"Ťňá`«ľ^™Ęßîj|!łćC¦ ¦2ÂÇ„Á/U× )“äŔçÉ-ŃjĘ€w˝]kn“şŇť +ʞáŢŽ÷łŽě›űÔő }Şůj٩څđ^ZQ¦…đ0ňqWĘĆ™áÚ\˙łŮǭ۬zęǶͻlҡÝȢ\y§jwâĎZňý®÷† Âă:Ysźęřk@1ënKŘ’î…auRĺŇJßHĺ+ő]$őb5Ú*IiĽč¸VîCrśVý l•Gř \W泎ősý®ďŇQGť,U´g»´N}mWg˘él«“ě~çő<~:·U×%ü?@ňyAüâ˘ŻŠ´4öH‡ŻîŇŮ ř¸Ű´čl”…ź¤qy„2•|§[)ÓUđôb$Ľç•,úT󙯖FÝjاěëţwůwŔĽ\k|Ę"8űĄś^±˘îŃ$ľOąťI_ţbU°M€rË(>—"żőć¤ÝŘNéŃ_ťű»ŕ@:źzqWn9Â/4’Ç˙´”ZÚĺ˙ż÷Ňg'‰ęL™G9÷zůŮăę?×éČĽ¨&¤¦Â/ŃÎóMi]G‘Ýj ]ן@ÁJÇ];ŤŘѸ8„Ď"iżÜř‰¤”ËŚ1zĘ–ţKĘi´ <ËMŚúaěÇ’p¸śöv!}L[V }ň}î›»?›IşNj™–uÍk&ʦqĂ}óBđ(h6şćjýŽkČ>źůWĆđůńúůđ3¨óPöYó)ÔÇÉ”©®+i=QÔ…ŰĆ—ŔC Ĺ>Ż ®˘o¬$; Ţ˛ŹŁÂ ”Ý•Ś …^ŽŁżÇŰ~ ¤+9Tň÷˘’aöďSŃŰŁ^ţAŇ_$ś :„’ćěďE¸ ţ Ň÷>YßttRĘS-­\~ýv ťÖ`cëć¶uŽ­(sđÍ^gĚ=‘ămk [ˇ^Ľ^Ém™7(ŇMŠ^+÷©üšYNG˝Q&ň=§µësz Z”͢'Ä|NgÍäD5ň+ídOFn»şé×lČśí–ˇ'Ұ4Z Ťđ Tö•s=owpékîV¬ůŕ«‘JÎhŇ”Y“˛:]§~ÄLýQęx±o¤Îĺ °®\BFôɶťÍ®:Ç#ŕ)«sč¸.źc¶”é†Â©Ü.(cIâąÜÁŕăő7ôĺĺŃßH ýEŮńdvĘYe;±­z¶SĹ.ň˘żĄěK?y´c™ŐÉ0ŰöK3p6¸‹)¸5¸l<lAgńjż®aw‰ý e®®źR8A#V|R>:ëj#ťé˛ŞŐʰÎ}:śě‹€c»lf›”qŰŚäI‘ŽŇÍ… ‡˛/­ŕ,ß­,Ű“z5ćXIšŰWVäž,…¬A´ÓN¶mI˝č°Óu‹ëUé:h4Óă§— ď§TőÇ nn‘>?ç—w¸˘˝Ó)#íKą8_„9|’—śľŃÍŔďŔ×€”cډ𹤓rgÍú!ÜsŽ/ÚFĚ9{BlĹ\˵°oăÁ Â1·2jÓłeĆ“cľmJŁ~饑4´źęPýv«ĂńôĽRÖ¤[ii¶&E^6@źL˙Â.»Ę‰B~Ľ^â,«ÁĄäßÖóÁ7€«HäL†•.’r.2üţG8UđUČóĽ±=Τ\Zq(«Mvĺĺ6d»b>Ú®zäI˙3ŔUuK Ś Â9î%íŔíhŚ•`KrĐ9®ČąŃ§–…‡ 1ôé.áÇŔ d<ާuěYGyR˙t¬!6d¦™ß_, J["óHp5Iĺŕ‡äWćĽ0Ł–Mß4—Ó׍˛„Óľ¸ ś4dŚ’éáłÂu#‡zţ¬‘d˙\y%'Šž‰zăö…2ˇż‹mŠ6Ëđ‘6NBuízíLÔ…»Ęv<_JÉŕŕ=]ʇ­Äo”˛Cź˙Đkź»)OŰIÇVLs#®Yž|Óv1eţ|†°†˝é~EÖmŠ+Ĺyŕp°iŰw <Ý!#M #řóFtÖ×)s'eŇą†° ÚžĽoď ¸…úé—’î¸&s§úhB.b“Ň’áŤÎmsĺrqu“b<ŤXă3ś\ť8{uCé¬ŐMÁ^&ô™¶E\WćČDUŕs›´zôäŃăµB5P§R~đ4Řřćě:¤űÔĘđűI»HáiĐä{×áĽkőńTbÖ¬8·x÷Ę™doŇťă>°é:‡Ű.€MvąEähRf–­üŽ f\¬NËtô±Ł¬¦¶şu¤Ńťźą±Đo/:ŻlȬöÄ}S_Q6ToÇz.ąlt’wQć)ÂŇY 6ëäĎŐŕ:mĚŇ:¦ä-˛,\çř{‚ç‡n›’~éýĽS•„×Ă­ľ® Ą*ł!;›ßBşw ~|0ĺĎňě ×Ř˝ë‡ÔO™ąÍŇ Üvě›R˛#‘ć>ç"ŇÍéľň˛%ĺ}ŤŢ¶úq§*‹®Y­—ďŞ DŮĐÝn= ¶ˇ‘Żç†ÎÇ ŹÍaĎ)Ňޤą_ʭɇŻ î î×És+u ř|˝ewŕSúäTŤäúsH5¶çýűŔVč(ě¶)m™”t·GŇa¤•4·a>GY|xěËÓmZŇüÂÖępoçm$_ôŐuWĎ+őľĽˇ—úłĎ¸Ř_ XCÎĆ|8I±ťú8iŢ…Ňçü m{+ÎťŔmÓŻ€vĎ.;!Ď·‡˝SĄěę@eĽ¦áŐ6á„9a´Ň\2˛VýN Cfsź×č=ěŢĽŰĺ3ŹwÂ÷’|pëők sř]ôô€xMĂ«Ři¤ »ěĂŕ–š¨`Čî Ý^Ĺ`Gz˛ |!ůsÉóđľ>pĄľÜnIë4جká;SçaęĚ!ě!>mŰr~ZĄ"\óˇĐ@ŘîŘ»±ń°ł^`źG=vĹĆü" ¬±ŐŹFzŘU٢‘8t§z¤ŚŔá>Ň?l®Űťdc 93¸‚śMg?m‚Dř™FhôgŃŢčŚ:6S5ŕä«qż.c˛ă|Ł&]ÄA0ÖôĹř›hăźO;c ôáaÜq8‰đuýv”=Źđ'r9nßÜvůqžQĽí«łEÝ-|?é4dřJEů$žäšf¸ÂIśLJűhŽra?ak:H¤E™Á8Ňťůíô`wĐL®Ţyňl±)¸˙$~;áŁAŽő'éŔu¦‹;ä×Y3Wµ¶V:Cڶ4ř2l~Ä›yÔM| +HŃÂł9Ľ~XޤÇ@ş•goé.$íµ V )ý5nÝ÷/ß®•˘\#VÎt čNĆb˛¤Í…“U˛Â઄>Â`ďa…ř‘˛q_9¸5:•t˙b =ËČN˛€ô®ňľ¸¨ŚXWęBÓYNśžţ\´'Ó&B:Ř“ ťo±É°Űm±˘“a¸žtŽ•hüa‚‡FěiIžĎKtÔłáô’ y‡ľ—rç™yĐŻi⨠aâ"–ZMmF[Y„=ś=¨^,©$.„Ż¨Ç’¨áŹy%]'‰ÁR–hĂ› ˙#é®<:š/%L)ŃţđôŐ>ŤSc]’“čĘ6ŠM”7‚zíG5 7W´4·OľÓŠ1&slBę8?,ťĂ3Ťő,Ň{©©µ€:éÚ7ĄűEý”ŐKźb|Ď3iŽz¤Đ‹veűq¸i'{Ré8€/9ú"âtH¦sÄjôMÂoÍéSvąK“µ|n”;ôš>v¬“¬čC8m›¬¨%ľ‚ô0śţ›:—‚;€«ČÝŕ"ŕ’~Âi±úłÔ€gľf C}}Îč‡ţŢĐÜńëj/E»ŤŘ4řś˛3o^=`ł˝6]ž‘ő鯿M܉R\F¬ţ,5ŕł%©ÔQęƬĐ~»3ýÖYŁXĎźqfÜ,× ŮFŁÍĹ-ňrŇÔfSÖATŽëWý Ŕ«˛*MŚđT^—ö•ăĽ>w¤źµLźk5‘žĆŚ~2•ŢŚ4ĺiĐ:Í'0ŕ+Ŕ:”ŽO?“ąNaž_7x=đý7ď0*łtŽX=HNŻÉËvŤO *5Ő;Ű©rśę}^ŞýÓ¨1bgîáŢ< 8łűN\P¬$ď%Á?GúüűŔ»‡‚§€ş÷ś·rß \1ţřpÎ롌’üÚŠŕďh[™ö!V”˛Ü”O'‰ľľrĘku uPĂÄ@ýŔŮđwҵO X}ĆÂpl‘4|!iüń©N×Ŕ<)ę4b G çđ?\Ž˘MęÓvRkp tP<îjô"?–ćۨäó&˙D/겍í zŰWgA{eťx>ŃĽ.rWŠ0fťÇ˛éU¸Tć›öcz8‹+Gú#¸DłÓ÷wb€Ťˇ,Ď<ŁôÔu|öázK+z‘‰Ç…ěIÖR(®1…1-‘1 +TÖřá˝ ß@Gţ Ä»XöM'Ń)J›ĐđK G2-śĹń)'dMG¦ąMĂ“¤Đˇz 玼Hď+Ź%¶ŻB aˇŕ4çĎ7{Ĺ(%P? +íM/ő`¬–1»_×ř„ß«—Vm°Á˘3Ď#zŠ“Í)´¸8ÄűmöK{ĂW·í`Çç*˘cx}.› űȢťQ׍ü^)tďy©·Đˇú”üŇťe±>}ÚŘ )1Ąů°ČöúaĚ*Ă‹·˘ťFlj|†q8[üĐ $óĽčWúˇ“†¤źŻíp)ҡů.Â{>î·6w›5@ô“`[zś›ÁŔ%Č^lÉ,ŰkÓŹ1…ŻDž“I©C'I/R”mÄúô9ŻCIi ߉~^ާľvóU.·*§ÔÖ‹ţĐ­—ż|ÓjÝ”iUo˛i´ëjáj˘1'"M§]ĚŻŻN>Ú‰/> |˙í.ŕ/^j°‰¨›śŠ´XÝ#kRĽýtSf˘ť”ťˇóo'®â« QćO2l˙˝0?çÂ<1HEM´źô)b›úzP_ ŞM;m“‹ţőÜ—˘nőwmšDFŃN+)m{ŕŇjDHC‰ŘZçŮzíÎ$™ęńńΦnź4Ě~l٦´ ˙»báFnHYIEND®B`‚python-tuf-5.1.0/docs/tuf-icon-200.png000066400000000000000000000360471470074210500173250ustar00rootroot00000000000000‰PNG  IHDRČČ­X®žgAMA±Ź üa cHRMz&€„ú€ču0ę`:pśşQ<„eXIfMM*JR(‡iZ,,  Č Č‚ ˝ pHYs.#.#xĄ?vhiTXtXML:com.adobe.xmp 1 2 1 300 300 °T98™IDATxí} ś\E•~Ý{' „đ&Č#A"‚«ň&\Ežęt_ ¬ëş>ţë VÍ€  řŔ×˙'«"˘ ë*. ($""Š˘(A HB2Ó˙ď«>ßMu§{¦»çöôí™:ż©©ş÷ÖăśSç«SU÷щ‹4V $¨€! *FšaĽ(CCäT© ţdü×™¤ÔÎJOÎR‚@Acj˘曊°9ÂVÎ T¶¬W6E<šMCzyEk‘xĆÂÓźBxÍ.GxÄąµŽáQ„5ŤŔČöZ^pŚÔŞ"@FÖő#@4ĂfČłl|[Řŕ„ťpĽ#â`ĚŰ =@đ âH?V˘±”@RąUßôbÄw# ^» Ç÷"ÔWž†˛°Ć‘FĐ@ČúĘ( ŽÎ!q”˛Á¶öDx r6Î aŢú´FpĄâú|ŤŽŐOŚĺă9z‚f)řű3Ňż~Ťi‚J$°( ‘h@ŠopiRť"ꍅӣçáŇ? Ţao¤f:„CO­eôÍôŞëVÄGÍňňbŁü<ߨŚňJć‘·{~‰˘×ñ܂ôT†ő0őňăÔä¦FŠž,ˇě4&ĆuEŰĂVíŠ{!슮/4$–g±ńšęfýݦFŔ?ä…<ĐřCz ,Ţ–†©á÷°Üˇwq=DYXď¤'*r˛‘¦!(ž:v4 ±ăşBD#ٱLd»Y8‡˙=.˙3´»&hDÍH†É6ŘFHá1Ż7"ćoŤ®ó˝§‰/DVL+/As»âÜNĽP(Żôz–;÷›PĂ÷ř$éC`«žťŔ˙›uÄDYŔáÍE˙ľ‚˝S š×™W†'#Ŕ)O0üäW¸|Ęßj`X‚+Ş×˛ů¨ľ, ʤĽŠ«g;űŻ>c¬ šdč:V<Žâů0öPä`ŕÉ×U*C¨“PŽĺ8ľ*ú f73×ŮöHóE*ˇŘŃęl&ŹD_bşÂ8óđ Ň0 0÷OŃŃů5ć» áË(űjT0łŚ¬źŁ-cKl=;Eă y á”jO°=Y°ö¨‘ž!Ą>¨ę„z’Ţp-ůĘTFŮĂ)Zp)&˦EĐYŮčĚźĚWçÓÂΧ,Bř$Ş‹z° ®!ŐÍúËfg&ĐŹĎ5˙®Ęť.A,00aI Wę7'é%?ĺŃ€ŚWqń?AgsTd`§3&Hd>¤Ř˝ÉŽFáéŞŔbv:ŤŠőO4ŇČʆť˛Ś˙ť<čhý%×B1‡ʉ@ ”Q†dhŔłĐ©#„`PZŢCŔ¸ ů> °ëSC¬/¬łćâ< @(o˝gŮú9áéŚ őČc¦Ą[uŕ©‚śęëË/ÄÄřh€«N@§ďCxťĹ铼;Qi\Ëp9; e7Řd=“ č5ÉF`ÁT3;á—¦[ę—:8Âô“8.jÜÜjĄ^é©"ŤłBcŢS.4Ůq áĽ9XcdŘ…Ę^>9çĹ”&ÖŹ©c @ĽŠ nXd ]×{Lě;‘ďř Ę°žŕtLvCÜ9"aŻ?=7˛FFr;:ëäW'Éó0Ž4ş¤Ż gv,túsÓ;%”ŇoˇĐVúŹŢ$ĐbŃI*—Ł {ř‰\~¸¶ »×(¶ é÷"ŻŢęč ŻÂ¶˙I2rÄôČ)n&ćŢ[SYöҢĄ×­i  NĹäX5(5}”żŇ:EŁAˇ4s  ĘŹ••X >5Đl}ꆀ›©(˘é.c†ŻˇĚ&$ÍŞGń˙4 ăćä×MŮrëŚCŻq+Zš´vdp:& Đ€<ŠŞšľ¸,čźpŔ’7Á#9Sö¶á:RuĸM h¤ŮŠÇsOů¨¤i”: 9u Lm*zŚŮŐO¨†ë“ü†#űEŰÂę+‚ĺźö4mNĹähF'ŢŔKC ·Đ‚śŃ¨tp±_Pˇ@śŠÉqĐ×&Ňý–čź ¬ĎBoŻ>ăąóž"HeŚ–¤˘Ma\d{`4SňW‘S/OÁ(fgbÔ $h;{úO÷¦äA‚Á-ů12mfLĺzÁv´IpĄź3p„nZďUdoTNÄQą2J ˝ÉŽXĎ/°ľ¤ŃôX}ůgđ»łńűŃŃ(’›…'ŕţyÓőĆ_±Đ{q PNÇ"•SňęěÓĎ[źrMÂA/ś2ßöńŢŠ'•±ĂQîY$?¬S^ŕ’3H›.‡˘O¶~®Ëă-ăş§8B Zs„ŕŇ´=HW|š)Ť‘śŠÉ>Ń€ýÔ Oďi7×ä™ÔˇŔ±<ÇU¦,yÍQŽěSóÇő†)ŁŹ#ý,ô9ž“«Y‹hÚ…»ňţdSůűXäöY§ˇ3ŔčłďÖ))‡vި¤Žöő\Ö2ú-Đ˙xý !HţćźoLşY)(˝Č”ÓŔs8ĽŮć)ÎEM,Hđ|Vöë:ČCćmMnĺź`jX_ăô:Ąk÷:+Á±ľţ&Ň=^°J_g¶ĺ»>ů4KÓň‰¤YĚŕł·2tă(Ř­ĘŢj%"8jT7a’í`đ~şĄµ„©zšĐ1E  %ŕS—~űV[¸ZwĽÇÁaŠ$‘ŮĆÔÝ`X{xČ&4Ý:u"ۆY;@řűë Ńâ¦ĺť$¶Ĺ4 „čÓf#@°Łé·Ľjĺî“í@%×™ŕäBża‚Ó…Z^;ŁÉ¤3|ľ"ťOµy“ĄPĆö¦ 3ݲ‘!=Ç„6Pč @ľ:›ßü›0BO&«.XV żŽâA˘ÁÔfÉO¬˝ 1Ó0pdǰ ´cµ Âîd[ނիë7 pAăGś?z¤i¸ŔňM¨ľ¶y­Ž{ Ú­"P›‚š 1*N˛ťmۡÍh=‚5Š{‰5×· 1čNąĐŻŃŕ4pB-¸Šł‘I_“ěâH\5űĐT릡ľ\łj±őFN 0dřn®'sĄvٍZ h=ržŮ‘YĹłěSmé’É=ΆPzÝ’S«pݱťń®Ľ%%˛Őc Č;ŕÖüŁáT‹[ż¸Żć©oí6'ĚţÓP/ďa±˙Ę!%ę+ÔWű ţďĚžü f˛šj)ţxša| P=`łµ&ŽW8$„@r‰UcůZ«4ćšô0{Iq3ąfëWS­łMCĄt‰`şĂßáÎ\!§[ůwZăÔjŇŰ|[  ]1É/Ěž8čjW Ź.ů߯gĄĄťjzóOőČkhjuą•ĺU6ă˙’i@†˙bCŕŕŔ+űâ'„HĘW=*Éyî[/E ăś/ó|ÄÄŹ JÂvdŁĎ4 Ař ł1M±˝CTş)Ľć ÖĚmçĘíoQJt÷™‘Lfv9Ŕ’đăGé_Ěָεµ.ßNô?Í<ĘËtOIŚĚŁř*…HŕúŇ Ť»Ňˇş§Z‹ŤwŞł#˙Ó ´3m™7á§…<•ĆŢäöđYş;-ĚůňýsŤaMĂě0FQkŔl):ś Ń}6|äśEĺđ š2= >b!8lN诎Z sj!RQwxąŮśĽĹŮ[­!ĺ+ŞÝ¶ë1ŇŹŁtsňř8ŰÎjÔ4¬íb¨&°Á9˙ąoĘČ (Ăë´»žŮžƧ[ňť«Đ{śe‚őĹĆGŚ&– ݰÁYS,DoöĚţä=Ţe †kŹĺč L»<ŵ‡)"F…k€¶…:ÇČo-hşu™µÖűŁ÷`ĂIrc­=.2ć¦Xٍnh@ŢáMfá˙ 4¸“5ŞŮN7xhX§ÜŰ`Ś®ŤÓ+ ůg#{‚ކÇ“Q2ü-a{ş/B[Ô@ýZ@ęHc0âáŁŃ"Ë{ŇtwÉĎ˙˛šö瑌5Đ Đć8Kᔞ?ŤA˘-p*G!-ű§-j]l(őż?~¬µ$&ËÎŃË`>iŇk RIÜé lÖ!m,v4ć±m4|9°đ>$hĎB‚ç¶ţÁą5xxÖďjY^u‘4˝: ?­˘[cúA´ç]T~ßU}yĄ—_ŽkO1„Á9˙Ć/íŃúa»Ž çŞÝ‚&Đ0¶×< •`°r3Î,B`ťŃ{xőLâÇÇ'CnđĆ­Ý”á7ąĘĐ ‡©.ÁĎDgC˙原Ëg§7|bR;hUy‚JőSXz Ń‚ÂöYž˛°Ž2y#Ź $ňŢŠśĚ#}Ş|;}ƶƨ÷ŞŚ‡~šÇ'n8ŇMŮđ;®R™éV>ľĆ­|b­[µ1ÂÚ5ĂnĂMŢďνůłžą+®‹LŇv˛5—ö@"°Ă•"¨Ł6Ú1(vŤ»é-ÍFđůúέCG#¶I!(@}ţŮ8· .cÄáoCTř;Ú|řŚmcűÎqčÁCC÷#ÍÇY#™“$ĺĆ9!ÝPĆp䚆ăYřfXö,\â#9›#LG a°qŘÇO–!<•ăÎî>„•"é2Ę@tmĽb‚íŐöă‘ďŰŢíűš\6°“{fŐZ—¦JČ*ĂnęôĚ ?3Ďťzŕ7XÁT+éTłÍôŰhäu´SĎ*:ÓŇ:Ďk-+h“Ň=­%–Ô\PI4ÎZeáD@&H.žjBŔ®CBĂÁT6 ĘÔ%٤ G©,Aą_áÜBčĺVś{Ę 0ůi[)VľÓHŔ`»ěpĂă¦óď^wĎ‚ĘÄMČ«˙¸ůQYŠrw Üő^‡w!H.ÖAťwj\(Ú6±Ůžúz#¤_ę6Űa®{ěŢą v(Ž•ĂXsÔŤ\C6ą5«ţ Ç đG=ŤU†ß $$Ůkx®ĺt;óĎ·Ú){‘≣J=ńś •ť ăĎEŃ×"ý*ÄIIyQÖIĹ«n^¬'fŢ´ňstKcZ‚4ĽŰđ•"Č(çhu"Ë2’gę‰@H_ ą ge/o€ĘĹbńçŻ4řGĂ“&\€VŢ‚:ńeÁĘŻqî 4ócś§Ě$¶O’ŃVŹŠýO=’g¤ŻFúH„ÝšŐŐNśÁ®ńk ňߌ7ěYÝĄša.őÁňą‚šlpžeAéďŔÓÔ…ťKvBš†ŢÚŻź·JÖprŞ qéťĆO~Á*ŁT(™| x< ĺá-rb˝ ¬Ëęó×f}aťaS€á•2ůß E!đ÷ Ř ďÍČ«PŢŃŚYÚ"ňD9Uď\´˙NżSÄś}T2đÓ¤frň<ĺÔu柎bs#ř­uÜ$ó›#·á‰…ÓŃ"‰†Î¶)#€žaÔ>aßuŤpU©ę 2Śü2ŤJ^|ĘćÜź®ŕúÔźjă_µ·ö/Đ űzc_9ř€§¦·vw#'Ë‹T Ô‚ľkŚŁżź¨jÖk˛jśůiŚ6še'‚_LŇ/!&8¨HĺGŇůa`˝ ¬®Ŕc]c>ń…dnD¬ír—­rŞű-Š 3ÝT^c,[±.ĘŔzŃ^Ď•^Źôq‡7T8=‘çPĆNä¤^ vüŰQe)ë^Č@HlOzdšýs.6NAškCńKţZ%–!˙~şĐy†ýČë¬Sí/Ă ë×nËŮ7şG–\o˛=Dű›6ŔyšĽˇwµfŔfW­Ź{!·8ŢUçMűmýÂÝżś$óÖů™.%¨=Bţ´$”ĹGL25GŞiĚ‚KK~T˝~ţǧˇ(˙X Ë]Źđ&äźíKŐţٰě$c/HŐŇsqé_ĺ–umř·Ě xß&ůUűtë/±â¬·UbűF)<ŁdIW#ŤÎń`ČĎ/Aú ء~`a=’3”q$97D™Pçg ÚQ›”Uiňô6bÔLoş®XúŘĺ/ł6(›ő÷zýxt~ ĎQy|ú‚yîěďtźşĄâλŐÂo*î7®pçÜüţ<_UÁaGIă;Ĺ€•÷·tń«Q˛uÔŔH…LąS^(ĘćĂô¤Ý!ëťĆ ™Łb†ĽÓöżth5{ţź#ól‡FÓ 1ół\(8ҧ?±¶Ů>y/4žŁç;ÄöG#µĎ•`÷Č×AŔ…d˝”ńOV?˘«^ńJµ*#ˆ ËaTLOCXZmłĆpeĚä‡Ó-ۉâí Ă›­Ţfýx ÔţŠşĘ¨#ÜÓŔÝtŇűŻŢČť}Ó?ą3ţ)ÜY˙ś;ë†÷¸łŻßŃ_ăt†b¨Úž|ßPäť:8Őš°<Ĺ4Ö"€`ÔÍGë>$ćé \{Ěc4ŤÚ‹ tn Šd0V·Nw«ľ@áŮŃhSŻ$DĽ„ôŻÖâH ‘aÍB´ú4v€ęű;Ň@}\3Ř)í‚BeëcEüđú¶hSŁfČ uŻ>ş4¨¨™ľ%˙ŽŔäť(Ëş$c¨»»!¦–9Iďë×ëHĚóŐ&FşV›łŐ#ä#Ć;mPý2ß*é6@ÜóѨŚßŚ-»©:’p>GnŘQL r~L˘"Ăö' ţÇúŐĽ[úQu¶řÁKŁ&ÉHŞGŐ˙RęłQÇĘłĘâ Žě*dźdą¬Á•b’¬;ŕwŕđô;ă‹2Ҩɫɝ}/h¶Ţ“ĹVNĆĄu}upwŽÔj?&~ŤáźĎâ3ZĹyŤ*'Ő˙&Cţ{ä[}ý!Ë(9Ăr…¤ŐŃŰ ŃǨ,uč98 49r1ŤáÖ`NA‡ćçş™ BĚĽ©óĘ řgc$T˘@¶ňý!0¸Ü±ŽS!(ŁôśîZ’ňIŻđ\é7ŚOĘĄ>’Ľ—äO<š~řčK"ď(Ă úÓŻ3$Äx˨vGŠMŽô[&?y·ľÍŢjĂľ©®ŽŻaŽĘ›‚ŢP8zŞBŔČŕa]Š]&O˝T( ACĎ~jüË(e S2OTd`@|SŇË,Ď!1Xpç‰`’±Ů©qŤ‚Îçâz}$y±ąŕI2’gôkţ!6ɦ[ĐűW‹řő…ôb§JA¦D^”QżÎ řďĂT$”“‹HŁ L@‘RÁdţ],(ż$€™ě;f@2SdŠ5„¨§UąN/°Ľőŕ áPÍăXĂa™śŮŰŤďp ł>ËN4ľms…7p= äiÔ· ßn–·,ýhěÔDŇýČńd 7îeČů\ËMýtŤl„Ę?'0 )ő6pwí)0J;ÓŰHS&p±H ,ţ#ČÜNĄXŔ{Ů$«€ô(.îëłäiGĺĚsÔóŹő˘{Q•Őěőu2Ş ĺ®tŢ6ČmQÁMČO;$¤UŚý”„Ö Áa†“ł¬©˛C@’O+äI4‚~™9ᆥ—UŁ0=%<ÉŔ?ZeeUŤ·˛Ôô—Ĺ˙,ĹN¸ö@ѧţY2ŠižÔ$._¤>`…Óő)7H˛ßęQţ }3ÁȢ@©F¨ĎZ»BlŘ(¤Jž.˝Çd±Ç·«ď¨;o2ş“ a¦K•°żäő/5Yd4:ĺĹôĂË[?E~Żń$ăë‹cŞ6ĎÍ…™,ňňÖźĽ'ćI}n‡Ý‰¤đ󍍰áë4­ą^×Q;1ÍĂ vé9h$ ”‡AĘ–a]híYŮ1´>>E5¨m Y~o2i ($ŁąËڵq1¨Ő@ű2Ëކ¤d‘ Ö_ÉÝČgÓĺ[jłÎmĎßšÂe<H¸Ď÷ËËlLĆ[:X' .Ă‘AÝ™`hž4Řa©# RűC&Ę2,Ĺ$Ű›4e•1n:ßú-°˝üŢn1¸ą&KPĆÎt1’â6ÂB÷űĆ ‘+%Ű<>€ÜŽĆGY]5Ď€Q&żÉ&`PéâÝá&‹ Îű"Ň@pnĐ_’ʱŤĽ\Ô{*ë 螏÷ç^^<Ç"HˇÝČ ŚI7Q€ČôsĆ(RǨßbŰßřˇŇĺĆĹ–›0fÇš’!0>´ç©'ĘnY’ćĄ÷Í ăb“SÉ›ÝâěW ÍkëÍkŕ|sĹ€íbsös°·Ť±ŘÓ9ÉűLéÁ«Q‰ ńOí’gzy “ˇ‘É’?ŕHĹË#ŇUŰ–hţřJ)n“ 3°ědë+MŤ㬾2ĽBz—ńo€đý$Yľ 0đ¤2vŘ›ŁŤ€ň0ŽýuŹf1lĘ÷ç>°¨2Á©ž&Ťîxän›a@Gä7ËĆw;J3ŢÓóLFň"BEmĺiÚ©·›yÉŹ:źÎÉLŕ ˙’á#Ąë'ˇ#m˛Ä: ‰„¸ ÄB°30zfż2ţÉł\ř7ŚďŇ)>ĐçHIţ,ČôÉGđ«Ź0đT&ďAžMßىŻę“ ÎN ř/ă Ĺł§ůŢvëFëu;ĂJ®AnĽNćIŔ˛ĂžF2(;÷‚ä›ÓFNłv1îdl=e¶ÍĆMĎ57 5h-E]ާ˛W0ńufßě ń,˛c<‹'ĘXúľ‘Ác‡+ů® FČĐ$Řťf·Ş\ş™eG˝‹dÜÂţ«ńz‘3Ť5ÉŘ;NŰkYFł)dÂT*źBjţĽU§˘˝Ú‹Ď-ýb-‘?™ŰȆîBÓ{Zó*S<7]¨1Dżć»4´zôăażCŐ‘]`©ĺ*MŃ)Ŕŕ ‰F$ľšüëSË÷0Ł>?Őăe˘±Ń+2đ™¬çoeIłga˛°Žß`’-ĎŘÍňT`;­ET¶…ďEçF¦QKťŹâŢlURĐ^Díďž1­ňFÄŽ1~ý—KČn?uŠÉ”żÖdűŽé>ÔěĚřG68ń‰ÜËNÂöb°¦~ę§~XOŁ„Ύ€Đx:Ôś¦Yę$ŽdgXi–éug Ř_7~/Â÷´=ę c»´‘t9Ďd‘7´*貗†F]ZűŢNřmB©bž 4-Ů‚Sý™4đ^BzŹ /°Ă4J|âŮcËRXO–âeT˛—űGăJůzÂd‹ŤšáńŁÎąŃ™<ţc”ŢľW€ÚÎ_-hîdyb† Eˇ¶E_üÂ:‹Ŕŕô… 1đS@ůÜRóŃńVŤĹ: űŁF2MK$Óxó×j{đŕ=ś.@j¶F[­łČ|ţŇsÁ#í€A’N¬U+O,Ó+0 Ý‹¤t\ţŢ4•!·/Żňg°°›±Ń+ŻŮ A§ Ě0¶)/4ţd„ÝÓZç5› éŮ&.îFµzđ˛'[@˙fňnˇ-·Ď]L˝˛…Î{ ’AĄgYÇqÔĐč¬ë÷J«źĘěE'˛ylWç F—řĂçIm‡Ą‰¤/ÜoJďG ŽCţO5N{Áż¦ÜłŃ­ż2Ţňhev5xÜŇřśŕő$ÖŮ)¦ F áąwX!ćďy§żX’O5/?Úf”1«ĄÄű{Ś÷Đ{pʲłq9Ţ:8ö_÷o_ ţ”· îy'Ҩ$řˡ(Ľăť!ˇy(Ó"ä¤2ů‰.&d<»€˝­ŽÂď¶¶Ç“§VÄ`7Ä8t«é•ú”÷»Đ*‘¶RçXó'µw xyÜř’çűű4kŚeÔcmżoËËuîŠÎÔ+®R×&¦¸ěRH(CT<B[[éW­Cidâ Óü}mĺxđ4ZĆ3żbéy‚żUĄń2>¶cmĄďŞňóĹPýŤ›–Ľ™é‰`*“NŤ­ŢD ńH~ …Qit»ěĐ@üů±üm7ŤFÝćŘ:6˙h·x˛Ń8;ĘOĐŽ&łńś\eú#  ÔţMO–·<ŁU5ćëŇ Ś=Ĺ#-ľ?9đ1¨ź?ńď kmĽúvĚÂŤgRä4(ëë¦<Ť|ˇ"ď§Ţkś)ű±ńÜŤ—ń2¸ŃúÄřh 裭é{´úĆr]†ŽÝ˛äű¦»pŕÓÚăŹh3O*c‡1 5Yú1S(Glý´»ÁǵůK»ž¨Đn»b3¦ě˙Oć=Ľ‡oîĹUVĆmT¶ćFâő€WÓź˙Č8 OÝÖ™fŘ©b»áLŔëÍtČźÉČ·›Ç´&~˙Fě8uň P¬ľf¨yŞŚ“.úť&&€«páeLÜňý=ŤNĽ|ŮZěőč'<Ľa§ĘóÉF|Ž×¦‚ôîT©˙ÂéŐ—‚žŠŕ”ŃJŇ”<0¬$ąäPÉź *릒Ťźôýfx4:ÁňxŘÁř‘lŤ[Ňä÷_˛'_4ĘÇ­ŚľhĆ(»ÝçĘ^‡¶Ő_¨bňöÁ ńŕŢXp6&GŐ€F˘çAˇw P±R:;ŢŇÉ÷PćąžTĆ ‹dT[ŁÝż/äA#cYŢČŰ<-ř“Qj«Ľ[H`äü}ßWě/µŻúâ§N=±L/cŁż#uč¦PěOLńT¶FF3P˙”íö&ŞćżEK®ú3ôf0„„ŹÇŕľCĎČxËN4Ţ8”ŽpĎ!˙Ę~`Č…ńŞ>B…ü¶mşÉ‘{‘áĄÖj·˛Â„ę§ŠÂřÖá4KŁř=Ş›;\2®—€‡•!hŽŚă©_z7ŽÄüäŤ7ĐpŤôMc&Đcaě©NîT]im7ŔnA‹;X«ÝŔ Ş+’q‚÷üŃŹF.‹úě8°.ÜřHţ;0›>řwíŮtŔ«qŇÝČÚŔłk98¨ÁÝ\kľhľäž¶n·¶Mˇ«ü×ĹT¦»™¤µs”´NćŻV­7ż5CőFň4 µ„„¦24F´ďöłv‹l3Ąa’z%W<É«-äËĂ|… ý@TŤ›|^çGčÝż€Ö´~Oť"dżV˘ÎŮłÔ:GÓ¬°spç6'MňcHÁĺ?7mŕ ‘dX˝ăe 2ü= ¨Ö¦w‚ńS”ü”ÝôďWä©:ý‡”¶•ÉŁř4vbÔm hűlčţÖşN˘‘ŘšüŚldĚe$VOţ$2ŤQ†‰±ŰŃÚÓČi‡]‰ĚXÓóM4P–S¬?˘Elnx*‚ąŐS3Í5]ç‹ń'ďŘjłLE´mŐŨ Č“Ŕ<ň‘»y«\ţPé.V±Ę´ÓN}^Ť†ĎB›K¬ÝŔ‹¤§[˘YßľŽĹÇ,đ°´§ȇdţŇ‹¬-Ł^×÷˘Í¸SĄ*A¬Ž±¤_:Nٍ@˛ Ľ`ü«Łág<ąĺ»ąµŐÍŃÓdOßcr‡žěa´ż]Ao-Ťu-¤©ěÎhžŘ{i[ßř©śéż÷čżíËf»-ł‰Łv4@cרőt䊺ÎXhIJŠYF†ŢN[ĘkĆ—~ÓÚbćąü'ř™o, T;Ťbk;˙N¶Íé¶şóiÎX"Cź‹:ő¸ŹŔęóS*śŠÉ2i@ y:u)ˇiŤWűU0­ÎTÜ®VŽ_‡ôíČH $ů—8Ćb¤Íx˛:“k­mz.ó^~c‚ĺ:m7đČŮ[­ţŔ;Ş/3^€ň4ÖÁĆŞ‰Ńxh@ Ůřo¬5ňq„5Ŕd?3W{R;l)’QŔ ’¬‚Ä •ߢňÔ)›1!Ă?m±=’rÉ{a•ŻY=ŤÎej~ŢL2)†‡Î_c™nyĘF<Ćsh@F‰÷jn ™ĺ[’żG[;[{šo·ÓĽµ“ťĺ(+/Â4·|w·ĘĂk§ú†y­®ěbk“F+ppˇLЉÁJg¬â"«[¤<dţŠĺî»xęd`±˘1ęµÂ?Ď:\Fv8žÎułA™–Ř—!âY¤š§heLď°ŢfŤkÍ´3Ú{Âd˘÷°ö˛·wŘžäĆ#ńŮuV/ëdÝ®ĘóófÍĎ·§`4M˙Í:›®©‚bBöV«šedvjÄČŚ+ýŹŔdX÷ˇäfVZ`±˛Q.Ş­s¬­Ŕ{¤wŁěL+ß˙ňđ é"«Wë6Ę!°_†şőIJʌÂnĽÜ ajZr4:üI3C13?ČĘg'eŚÜň]nuÓ°Tď;­FéƵŚ~VíŮ9µR;Ť§VŤ—z±ĽüuĂź7Đő> hU/ž™řŻ4`Ć0uOĂ"30ŤŽ44η ’‹‘Z5j«;ýšŐAŁ5Ăĺ×óG.ĆâEŚ—ědk#śaşĺćß­´A°©ľQźô °)Ćy®Ż<­ÔmŮcÔŹĐB|tü ˇÁt‚óđüÇrZ‰F÷ý­ŤĽfhüqPO­ÔeYFh'˙Bŕ=2ş•Ń=Č“ţ5 4˝‚7ä#ôž8Dp2&zdŁ˝›cĂVlľ•QË@A—{R;lHň{ôHć•řstž¤†ŚpŇŚ:;ŇřĄA“_ĆŔôżť©Ť$Öüí÷¦˛ß…bđ´žTĆc44RŞŻ ŽŢD á(Šůą'ĘHٍ¦,dž'Ď´eµĺŰ H¬îüˇLz&óN-ż¨%ďů, ŤGĘ©Aő-Ż[#"8Şz”˙iě™ßľÎŕdxyĚyřŰLC4Ôf.đĚ@–? ô fxü˛ §vŤN`ĆS˛ů;4jóNü‚'ĺłĂšHm¤É_Q–Ľ‰/óBţÜĄFŞ/Č“Y4j3žěpŤvˇäAh„4 ÔY"šŹŤôůű4B•_Šňł¬Žf šČ“ÖVţA„ N>¨o˝ć‚ĺ3žî°ç÷N´(7yůr*śŠÉɬŤ°ÜẀH8ZË F» /˛=ĘT+°ü4ظ¶|C#ý˛ĺWŢ ¸Oęü\kÓ¦gČŔ+‚ňlÇ >;ůń}Ťç¨ČçX9–¸ěTŚ˘:Ó€¦ »ŔŘ1o÷Ƨ‘™^AřT@x"° \ů§7ĚOCĹvrţkY4ÖzRŮK-?AiířM7ËH¦;dyý±¦WO#ßëH¬»Q›ţbü5ЉĚŰyçşf„¦Ákn˙kT>ŰP™ípý­LčENł|†ś-3x~p:÷9ĺó$0â ˙.A$o#? W‹´µfEb5ĐšdČΤÇ{p¤®7Čű0ăy©Uąa5N?Ó ?w‘6±|á.ŹpvPFžg1ňoŚ ^ÎţŰň¤Ę'pü yźcm¨ŚĆ(j x hjšó{ô šf™'ák®rÚy´ĺx÷Ë!ăPřÎÇĚKŁW˝Íkô÷@˛úť*ćµéUö?Č+Şţ xLF tG4bćő)ć÷5S.Í˙iÜř¨›?ôăóL¨ä:»ŞłŐë×,Ďşäp'Ý=ĎňďóđT5í˛Nĺý’ZEl)8“Q]ÖA˘5ĆÁ0LĚó˝±jjĂi—ĽŠíX fy ćAČżř(CŢ6}‹ĺeňWeâo‰çżÝ¨şÔAőˇ@vŐśŠÉ¨ńÓ€< ćůůÝn„ŁąMŤ˛+ÁŇć0ükqŽF~ú5cŔ ů­ÚzďéŮÔÝQŽ;UĽ¦:”Ź1Ú­ůy3MתŐĆ˙Q=Ň€@ÂóŹĚx9˛kşc€áôšź·ÖőGÁ·¦NH&WY˝ Śßżr¦ťg9y ­M0ÝĘż¦M„HQĺŇ@0•IżŘŔ5z×VÚuŽú2đ3Lś}qŽ x‡áoóĽ®É;5ÚZ.—v"7“\•JâN9#7bO5żôŞŃ^±Śž±Śť”~Ĺ€ @…yÍ›ř2ÇPH;UŃsxĆ%ŃŔ0x˝¦Wëx:nP‹÷ăaě6˝ŇB;DhřLÓřOG¸AÇa‰ů´/ Q­3¶Ž•ŠčŤµ-ëÜq—Ou_¸ÂnnđűúHśřÚ ď૵‘W4­R¬óa,`đś¶Ź ŚuĽ¨Ő¦Ť@…U8*˘NšéĄâćfnÁ ŚtöMďrw˛zfW—$0ÖdąË®rO-?ÇťyÔ"ź§zC÷@*/Ĺ1¦FM÷€á{jÔ6€ăďgŕŁwÉ›ťú>Žé©x^ĺěX~¬utÔp?jÔýŔw·x䔩jüٵpÎÍ—ą ¦żŢ­z %h÷°1bdέYő°Ë¦ĺ>đÜéöÄ®K‡w×áIÚz`Pŕ¸ŰąáăPöwě·˘ŚšuŃ ±ťHu©Sů:ítĚűëhŹĂ2wűOw§_7ßm:óßÜŠGV»JeŠKRMs*®2ü @2Í W»‡z™űôQĽnć îexc”aębŁ<°î;ŔÎ —"l…3Ćá~¬•Ľ‡Š "BZ‘^ťi Ó3 ň4:ëdÇfŃí?Ĺ݆cq­ňl·b9˛%S1µZ—‡#{’Ms«źZí6ÚlŽŰj‹· Ó§Ý Ö)ÇĂř†±vHďÂ9…ĺč­FęŐŤµ ÉĘďXŃé<’eřxčH_ŹŔ5M ” *BŃŞ«_c( •ż+XÁ4¨Á–flăܓ˜;ęýxđŐ·f5Ŕ ÇQ_¦2ä¦m”Á›üŘ}hź#ŕe`‡^Ől žmmq‹–?Ň- ąŰý„#‡q§Ţ=fm±ÍHĐ@łžLĘ1c¨ĽĚ„ć‚‚Sڍ0řÄŹ¬CnĂHŹbŻĽ/2ŚâĂĂ[ůE}’° z €ač‡ÉaHßW=öS婏‘Ĺ󑽨 Ů“' ŠîćŹ"rO"ŠY×Ů4uL;°ú®řéGćV­@š6<\–$ř–Ő‚Á!·n[é1ŕIÖŕÄáý€Ţ çËO4ĚfüpúSTĺŁW#đ$?’‘¤*'Rs `[Ő=í˛feKq/ď™U[a—jŘ Ąë{ŘxexČ Lžü9ĘŁÜÁ4ěĐ9jS÷KqůÔŇÜáiŞ…Ë…ĺŔ“ľ}¦CPŇŔDŞ$¤qoj]0vÍőVnőckÝ~oĘÜM—<ćŽ~ď§ÜŚÍßíw±Ş†Ć™Zăw±Ö¬Zě}čâęé…!8ާŞSűÄĂ˙‚ e>ŃKŁí¬?`ęďË/ňŔk‘h ¤RÖťJV Íé2„aÝt‰]ÚönĺŰ»[ľ;V0uŕ)Áun÷NŰhî‹<ěV®xŁ;ď5ąË/ĎÜńÜÁjHśşÍM™¶SŞ'ÝŔ”%ĆĄ‚˝Ě ľüf˙¬Vsp_ŽâJµţîĹěkz NăŘV¤¨–4@Łĺ_Áčš?†ţŽęµş{ÜĄŇÂű”+§»3®ÝĂťqő^îWͲüÎ{Žü`\”@fh4đ<€$7"‰©Vţŕ#ÖDS´‹×¨,ŠMNŇ99ĄďTjmŰňiŢůsWbˇÎ/”TÉ?á‹5ÇčžC%Š Žúi\ŁsE´5©ęKwÎ[ëéM*©űÓ‰ŰýŽŠăąń%BvÖ'Á;ŕ‹*Ăź A૥Ż)j c ĐŔ@mL±ŞĘđ߸ěí6eâ´‰áA0·§1H⫪‡¶ţÇůf[ę*Uf‚Ţ*ר|Ő8ĂúÉ{ Đ–,Ü:í÷9˝Ö&Xź${ŘôŚ‘FĐ@HcĺĐp¸ĐĹĎ$üń™‰H”OÓ.¦"Ői dťBę×4zŤ~÷ë$\—˘¬!8®D[X§ź<•˛ÎPî3­Ôb"NCä1,X—¬ĺŤERô$U=Ä˙¦` `ĺÜs¤XÄúé‰0ä˛~°NŃ8Lrł“]! =>ô<0čt;F4a‰žż2t7âß#HVŕ(ŘŘ4 O2¶Zúłôd–}Äű˙˘CZ ´/IEND®B`‚python-tuf-5.1.0/docs/tuf-icon-32.png000066400000000000000000000035411470074210500172410ustar00rootroot00000000000000‰PNG  IHDR szzôsRGB®Îé„eXIfMM*JR(‡iZ,,     9_Ű pHYs.#.#xĄ?vYiTXtXML:com.adobe.xmp 1 LÂ'YIDATX µ—[UUÇ×^űŚă8ĄQä4d©A&ŘbŇ•’ Ňz‹ 2°^ęÁ(m(j‚Ń "¤ Š(ş8ćÝ˘Ś¨Ľ%Í6]­ ›ĐÂŔ€Ś6Ň\Îé÷_ű[űěăŚgΔ}°Îw]뻬o­µOâęCęQ3™é\iĄsĺ+ŕ:W™–^0â\2ţýÇŕž‘0ĆĹLRN’k˛&¶C>Ä:—á´ţ+čPý }Ś![‚I磗Íĺđí»±Ů-(&’Ięü¦UťűoČήʧ¤®&®· j–—šu¬VÝÉçÉNż‰5ĄĎˇĄ…d§ GÔIfPZ… Ű’®3AÝ TJ´‡‰ť‘Ď`ôÍä¤Ô8ý’őÖglŢ7ĆV‘eź|€ńc&ž®ăęjUČŘGŻ6U«ĚĐ?í]ĐĹ j ˙X@b?1˙L[#ŻhÜł6 h87Ë T‘ÜČdE$]Ey‘N\מ’»ĺ–L¨ýëf‹{â· |Ԕʼn¶í Z9Í'›˝’(Î]YËwuY˘ę/wQaž‘.Ýu®qęü şl"hÁX­(›172`Zkłi×˝î‰}×VmüăpĽ4Ĺ(—pF4Śň*{17Ú_đÍŚăčž÷2TŔ? ş ›đV˙2Ď;Á"űét W\çć´ÝŕŽ~čłLś°NĺVł«Ä5N“÷Áź˛ĐđJ¤×3AA=‹üUśs¤Ü^čŰŔ Z,e ÷WÔFD»]Ú|Č:°Ö ¶;?6€Ţ`üÖšmL%JÁžŚýý&°¬r5¦{Ń˙ ľ3—N ü6lŽ0ž Ę~;z¶!@(€˛,Bń nĎŘĐhj8ő€}ôßgä„‹I[% şůhńÄ­y.ę`Ó÷ĐwgęÚßń9đŘä§AÁi^׳ŁĚbŕbšLôć÷dt83Śne‰č6/¤äjŔ‘,ˇiňó*U!]‡|«@´Ź |úrś™_ÂśźŃÝaň¬2ĆśŚb%{˛Iv 4‘¬Ó^đů&—óý#¨ź6ą‘Ţ„ě7dú†ÄŠdÜ)~UZ+ŻšŇ„?Żjë»Ĺ&ŠU@]sÁŔçsůžĐsł űc‹‡,~G÷~UA±ŠĄIż#N†~žęí©ň±JIdÜÇĹ,Ş}¤éŚ»3Zża«–g|ň¶[3:ü*Đ˙±tgŕ¨ĎůćwV;ź_1ĂŇ*ęĂ…~_OťćMýa[yj˙ŤD7ÂtŮéJ^î*ă;ÝŃ#ó]iÖ^ÂÍw#şĂřâ†Lžá†ě†×öŤ1ĘŚÓʦ›`F]©őśrń„śW08Ůľ š>™•qń5‹şŮuĽŰă¶ôíw,» ľâŇîső„×1|:ž÷†‚hd R•Ý@˙Í஥µÇť8¶Ěőý|ŹKg.uă#‘đ _flDľťěô€é7Đ€÷–,f8—ě·ˇă¨ú·ŔŤ$–š˛K ©Ăhöß`ÍzOeoA˝˘k©ه'ľáě§-č‰Ý=xWÖŃ#óŠžaŰ˙-€¬ě}} \Cý!౪¨:z´v®‰ ď•-¬O0ÁßĘiľä|E’U)WŐ' 6›ö›+9č+mÄ·y}h$[¬Ü‹żŐ žéI+G@ăłŃn.Ü”ŰńD®ď4e<ŠUIEND®B`‚python-tuf-5.1.0/examples/000077500000000000000000000000001470074210500154505ustar00rootroot00000000000000python-tuf-5.1.0/examples/README.md000066400000000000000000000002151470074210500167250ustar00rootroot00000000000000# Usage examples * [repository](repository) * [client](client) * [uploader tool](uploader) * [Low-level Metadata API examples](manual_repo) python-tuf-5.1.0/examples/client/000077500000000000000000000000001470074210500167265ustar00rootroot00000000000000python-tuf-5.1.0/examples/client/README.md000066400000000000000000000025461470074210500202140ustar00rootroot00000000000000# TUF Client Example TUF Client Example, using ``python-tuf``. This TUF Client Example implements the following actions: - Client Initialization - Target file download The client can be used against any TUF repository that serves metadata and targets under the same URL (in _/metadata/_ and _/targets/_ directories, respectively). The used TUF repository can be set with `--url` (default repository is "http://127.0.0.1:8001" which is also the default for the repository example). ### Usage with the repository example In one terminal, run the repository example and leave it running: ```console examples/repository/repo ``` In another terminal, run the client: ```console # initialize the client with Trust-On-First-Use ./client tofu # Then download example files from the repository: ./client download file1.txt ``` Note that unlike normal repositories, the example repository only exists in memory and is re-generated from scratch at every startup: This means your client needs to run `tofu` every time you restart the repository application. ### Usage with a repository on the internet ```console # On first use only, initialize the client with Trust-On-First-Use ./client --url https://jku.github.io/tuf-demo tofu # Then download example files from the repository: ./client --url https://jku.github.io/tuf-demo download demo/succinctly-delegated-1.txt ``` python-tuf-5.1.0/examples/client/client000077500000000000000000000113621470074210500201350ustar00rootroot00000000000000#!/usr/bin/env python """TUF Client Example""" # Copyright 2012 - 2017, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 import argparse import logging import os import sys import traceback from hashlib import sha256 from pathlib import Path from urllib import request from tuf.api.exceptions import DownloadError, RepositoryError from tuf.ngclient import Updater # constants DOWNLOAD_DIR = "./downloads" CLIENT_EXAMPLE_DIR = os.path.dirname(os.path.abspath(__file__)) def build_metadata_dir(base_url: str) -> str: """build a unique and reproducible directory name for the repository url""" name = sha256(base_url.encode()).hexdigest()[:8] # TODO: Make this not windows hostile? return f"{Path.home()}/.local/share/tuf-example/{name}" def init_tofu(base_url: str) -> bool: """Initialize local trusted metadata (Trust-On-First-Use) and create a directory for downloads""" metadata_dir = build_metadata_dir(base_url) if not os.path.isdir(metadata_dir): os.makedirs(metadata_dir) root_url = f"{base_url}/metadata/1.root.json" try: request.urlretrieve(root_url, f"{metadata_dir}/root.json") except OSError: print(f"Failed to download initial root from {root_url}") return False print(f"Trust-on-First-Use: Initialized new root in {metadata_dir}") return True def download(base_url: str, target: str) -> bool: """ Download the target file using ``ngclient`` Updater. The Updater refreshes the top-level metadata, get the target information, verifies if the target is already cached, and in case it is not cached, downloads the target file. Returns: A boolean indicating if process was successful """ metadata_dir = build_metadata_dir(base_url) if not os.path.isfile(f"{metadata_dir}/root.json"): print( "Trusted local root not found. Use 'tofu' command to " "Trust-On-First-Use or copy trusted root metadata to " f"{metadata_dir}/root.json" ) return False print(f"Using trusted root in {metadata_dir}") if not os.path.isdir(DOWNLOAD_DIR): os.mkdir(DOWNLOAD_DIR) try: updater = Updater( metadata_dir=metadata_dir, metadata_base_url=f"{base_url}/metadata/", target_base_url=f"{base_url}/targets/", target_dir=DOWNLOAD_DIR, ) updater.refresh() info = updater.get_targetinfo(target) if info is None: print(f"Target {target} not found") return True path = updater.find_cached_target(info) if path: print(f"Target is available in {path}") return True path = updater.download_target(info) print(f"Target downloaded and available in {path}") except (OSError, RepositoryError, DownloadError) as e: print(f"Failed to download target {target}: {e}") if logging.root.level < logging.ERROR: traceback.print_exc() return False return True def main() -> None: """Main TUF Client Example function""" client_args = argparse.ArgumentParser(description="TUF Client Example") # Global arguments client_args.add_argument( "-v", "--verbose", help="Output verbosity level (-v, -vv, ...)", action="count", default=0, ) client_args.add_argument( "-u", "--url", help="Base repository URL", default="http://127.0.0.1:8001", ) # Sub commands sub_command = client_args.add_subparsers(dest="sub_command") # Trust-On-First-Use sub_command.add_parser( "tofu", help="Initialize client with Trust-On-First-Use", ) # Download download_parser = sub_command.add_parser( "download", help="Download a target file", ) download_parser.add_argument( "target", metavar="TARGET", help="Target file", ) command_args = client_args.parse_args() if command_args.verbose == 0: loglevel = logging.ERROR elif command_args.verbose == 1: loglevel = logging.WARNING elif command_args.verbose == 2: loglevel = logging.INFO else: loglevel = logging.DEBUG logging.basicConfig(level=loglevel) # initialize the TUF Client Example infrastructure if command_args.sub_command == "tofu": if not init_tofu(command_args.url): return "Failed to initialize local repository" elif command_args.sub_command == "download": if not download(command_args.url, command_args.target): return f"Failed to download {command_args.target}" else: client_args.print_help() if __name__ == "__main__": sys.exit(main()) python-tuf-5.1.0/examples/manual_repo/000077500000000000000000000000001470074210500177525ustar00rootroot00000000000000python-tuf-5.1.0/examples/manual_repo/basic_repo.py000066400000000000000000000342471470074210500224440ustar00rootroot00000000000000""" A TUF repository example using the low-level TUF Metadata API. The example code in this file demonstrates how to *manually* create and maintain repository metadata using the low-level Metadata API. It implements similar functionality to that of the deprecated legacy 'repository_tool' and 'repository_lib'. (see ADR-0010 for details about repository library design) Contents: * creation of top-level metadata * target file handling * consistent snapshots * key management * top-level delegation and signing thresholds * target delegation * in-band and out-of-band metadata signing * writing and reading metadata files * root key rotation NOTE: Metadata files will be written to a 'tmp*'-directory in CWD. """ import os import tempfile from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Dict from securesystemslib.signer import CryptoSigner, Signer from tuf.api.metadata import ( SPECIFICATION_VERSION, DelegatedRole, Delegations, Metadata, MetaFile, Root, Snapshot, TargetFile, Targets, Timestamp, ) from tuf.api.serialization.json import JSONSerializer def _in(days: float) -> datetime: """Adds 'days' to now and returns datetime object w/o microseconds.""" return datetime.now(timezone.utc).replace(microsecond=0) + timedelta( days=days ) # Create top-level metadata # ========================= # Every TUF repository has at least four roles, i.e. the top-level roles # 'targets', 'snapshot', 'timestamp' and 'root'. Below we will discuss their # purpose, show how to create the corresponding metadata, and how to use them # to provide integrity, consistency and freshness for the files TUF aims to # protect, i.e. target files. # Common fields # ------------- # All roles have the same metadata container format, for which the metadata API # provides a generic 'Metadata' class. This class has two fields, one for # cryptographic signatures, i.e. 'signatures', and one for the payload over # which signatures are generated, i.e. 'signed'. The payload must be an # instance of either 'Targets', 'Snapshot', 'Timestamp' or 'Root' class. Common # fields in all of these 'Signed' classes are: # # spec_version -- The supported TUF specification version number. # version -- The metadata version number. # expires -- The metadata expiry date. # # The 'version', which is incremented on each metadata change, is used to # reference metadata from within other metadata, and thus allows for repository # consistency in addition to protecting against rollback attacks. # # The date the metadata 'expires' protects against freeze attacks and allows # for implicit key revocation. Choosing an appropriate expiration interval # depends on the volatility of a role and how easy it is to re-sign them. # Highly volatile roles (timestamp, snapshot, targets), usually have shorter # expiration intervals, whereas roles that change less and might use offline # keys (root, delegating targets) may have longer expiration intervals. SPEC_VERSION = ".".join(SPECIFICATION_VERSION) # Define containers for role objects and cryptographic keys created below. This # allows us to sign and write metadata in a batch more easily. roles: Dict[str, Metadata] = {} signers: Dict[str, Signer] = {} # Targets (integrity) # ------------------- # The targets role guarantees integrity for the files that TUF aims to protect, # i.e. target files. It does so by listing the relevant target files, along # with their hash and length. roles["targets"] = Metadata(Targets(expires=_in(7))) # For the purpose of this example we use the top-level targets role to protect # the integrity of this very example script. The metadata entry contains the # hash and length of this file at the local path. In addition, it specifies the # 'target path', which a client uses to locate the target file relative to a # configured mirror base URL. # # |----base artifact URL---||-------target path-------| # e.g. tuf-examples.org/artifacts/manual_repo/basic_repo.py local_path = Path(__file__).resolve() target_path = f"{local_path.parts[-2]}/{local_path.parts[-1]}" target_file_info = TargetFile.from_file(target_path, str(local_path)) roles["targets"].signed.targets[target_path] = target_file_info # Snapshot (consistency) # ---------------------- # The snapshot role guarantees consistency of the entire repository. It does so # by listing all available targets metadata files at their latest version. This # becomes relevant, when there are multiple targets metadata files in a # repository and we want to protect the client against mix-and-match attacks. roles["snapshot"] = Metadata(Snapshot(expires=_in(7))) # Timestamp (freshness) # --------------------- # The timestamp role guarantees freshness of the repository metadata. It does # so by listing the latest snapshot (which in turn lists all the latest # targets) metadata. A short expiration interval requires the repository to # regularly issue new timestamp metadata and thus protects the client against # freeze attacks. # # Note that snapshot and timestamp use the same generic wireline metadata # format. But given that timestamp metadata always has only one entry in its # 'meta' field, i.e. for the latest snapshot file, the timestamp object # provides the shortcut 'snapshot_meta'. roles["timestamp"] = Metadata(Timestamp(expires=_in(1))) # Root (root of trust) # -------------------- # The root role serves as root of trust for all top-level roles, including # itself. It does so by mapping cryptographic keys to roles, i.e. the keys that # are authorized to sign any top-level role metadata, and signing thresholds, # i.e. how many authorized keys are required for a given role (see 'roles' # field). This is called top-level delegation. # # In addition, root provides all public keys to verify these signatures (see # 'keys' field), and a configuration parameter that describes whether a # repository uses consistent snapshots (see section 'Persist metadata' below # for more details). # Create root metadata object roles["root"] = Metadata(Root(expires=_in(365))) # For this example, we generate one 'ed25519' key pair for each top-level role # using python-tuf's in-house crypto library. # See https://github.com/secure-systems-lab/securesystemslib for more details # about key handling, and don't forget to password-encrypt your private keys! for name in ["targets", "snapshot", "timestamp", "root"]: signers[name] = CryptoSigner.generate_ecdsa() roles["root"].signed.add_key(signers[name].public_key, name) # NOTE: We only need the public part to populate root, so it is possible to use # out-of-band mechanisms to generate key pairs and only expose the public part # to whoever maintains the root role. As a matter of fact, the very purpose of # signature thresholds is to avoid having private keys all in one place. # Signature thresholds # -------------------- # Given the importance of the root role, it is highly recommended to require a # threshold of multiple keys to sign root metadata. For this example we # generate another root key (you can pretend it's out-of-band) and increase the # required signature threshold. another_root_signer = CryptoSigner.generate_ecdsa() roles["root"].signed.add_key(another_root_signer.public_key, "root") roles["root"].signed.roles["root"].threshold = 2 # Sign top-level metadata (in-band) # ================================= # In this example we have access to all top-level signing keys, so we can use # them to create and add a signature for each role metadata. for name in ["targets", "snapshot", "timestamp", "root"]: roles[name].sign(signers[name]) # Persist metadata (consistent snapshot) # ====================================== # It is time to publish the first set of metadata for a client to safely # download the target file that we have registered for this example repository. # # For the purpose of this example we will follow the consistent snapshot naming # convention for all metadata. This means that each metadata file, must be # prefixed with its version number, except for timestamp. The naming convention # also affects the target files, but we don't cover this in the example. See # the TUF specification for more details: # https://theupdateframework.github.io/specification/latest/#writing-consistent-snapshots # # Also note that the TUF specification does not mandate a wireline format. In # this demo we use a non-compact JSON format and store all metadata in # temporary directory at CWD for review. PRETTY = JSONSerializer(compact=False) TMP_DIR = tempfile.mkdtemp(dir=os.getcwd()) for name in ["root", "targets", "snapshot"]: filename = f"{roles[name].signed.version}.{roles[name].signed.type}.json" path = os.path.join(TMP_DIR, filename) roles[name].to_file(path, serializer=PRETTY) roles["timestamp"].to_file( os.path.join(TMP_DIR, "timestamp.json"), serializer=PRETTY ) # Threshold signing (out-of-band) # =============================== # As mentioned above, using signature thresholds usually entails that not all # signing keys for a given role are in the same place. Let's briefly pretend # this is the case for the second root key we registered above, and we are now # on that key owner's computer. All the owner has to do is read the metadata # file, sign it, and write it back to the same file, and this can be repeated # until the threshold is satisfied. root_path = os.path.join(TMP_DIR, "1.root.json") root = Metadata.from_file(root_path) root.sign(another_root_signer, append=True) root.to_file(root_path, serializer=PRETTY) # Targets delegation # ================== # Similar to how the root role delegates responsibilities about integrity, # consistency and freshness to the corresponding top-level roles, a targets # role may further delegate its responsibility for target files (or a subset # thereof) to other targets roles. This allows creation of a granular trust # hierarchy, and further reduces the impact of a single role compromise. # # In this example the top-level targets role trusts a new "python-scripts" # targets role to provide integrity for any target file that ends with ".py". delegatee_name = "python-scripts" signers[delegatee_name] = CryptoSigner.generate_ecdsa() # Delegatee # --------- # Create a new targets role, akin to how we created top-level targets above, and # add target file info from above according to the delegatee's responsibility. roles[delegatee_name] = Metadata[Targets]( signed=Targets( version=1, spec_version=SPEC_VERSION, expires=_in(7), targets={target_path: target_file_info}, ), signatures={}, ) # Delegator # --------- # Akin to top-level delegation, the delegator expresses its trust in the # delegatee by authorizing a threshold of cryptographic keys to provide # signatures for the delegatee metadata. It also provides the corresponding # public key store. # The delegation info defined by the delegator further requires the provision # of a unique delegatee name and constraints about the target files the # delegatee is responsible for, e.g. a list of path patterns. For details about # all configuration parameters see # https://theupdateframework.github.io/specification/latest/#delegations delegatee_key = signers[delegatee_name].public_key roles["targets"].signed.delegations = Delegations( keys={delegatee_key.keyid: delegatee_key}, roles={ delegatee_name: DelegatedRole( name=delegatee_name, keyids=[delegatee_key.keyid], threshold=1, terminating=True, paths=["manual_repo/*.py"], ), }, ) # Remove target file info from top-level targets (delegatee is now responsible) del roles["targets"].signed.targets[target_path] # Increase expiry (delegators should be less volatile) roles["targets"].signed.expires = _in(365) # Snapshot + Timestamp + Sign + Persist # ------------------------------------- # In order to publish a new consistent set of metadata, we need to update # dependent roles (snapshot, timestamp) accordingly, bumping versions of all # changed metadata. # Bump targets version roles["targets"].signed.version += 1 # Update snapshot to account for changed and new targets metadata roles["snapshot"].signed.meta["targets.json"].version = roles[ "targets" ].signed.version roles["snapshot"].signed.meta[f"{delegatee_name}.json"] = MetaFile(version=1) roles["snapshot"].signed.version += 1 # Update timestamp to account for changed snapshot metadata roles["timestamp"].signed.snapshot_meta.version = roles[ "snapshot" ].signed.version roles["timestamp"].signed.version += 1 # Sign and write metadata for all changed roles, i.e. all but root for role_name in ["targets", "python-scripts", "snapshot", "timestamp"]: roles[role_name].sign(signers[role_name]) # Prefix all but timestamp with version number (see consistent snapshot) filename = f"{role_name}.json" if role_name != "timestamp": filename = f"{roles[role_name].signed.version}.{filename}" roles[role_name].to_file(os.path.join(TMP_DIR, filename), serializer=PRETTY) # Root key rotation (recover from a compromise / key loss) # ======================================================== # TUF makes it easy to recover from a key compromise in-band. Given the trust # hierarchy through top-level and targets delegation you can easily # replace compromised or lost keys for any role using the delegating role, even # for the root role. # However, since root authorizes its own keys, it always has to be signed with # both the threshold of keys from the previous version and the threshold of # keys from the new version. This establishes a trusted line of continuity. # # In this example we will replace a root key, and sign a new version of root # with the threshold of old and new keys. Since one of the previous root keys # remains in place, it can be used to count towards the old and new threshold. new_root_signer = CryptoSigner.generate_ecdsa() roles["root"].signed.revoke_key(signers["root"].public_key.keyid, "root") roles["root"].signed.add_key(new_root_signer.public_key, "root") roles["root"].signed.version += 1 roles["root"].signatures.clear() for signer in [signers["root"], another_root_signer, new_root_signer]: roles["root"].sign(signer, append=True) roles["root"].to_file( os.path.join(TMP_DIR, f"{roles['root'].signed.version}.root.json"), serializer=PRETTY, ) python-tuf-5.1.0/examples/manual_repo/hashed_bin_delegation.py000066400000000000000000000210021470074210500245760ustar00rootroot00000000000000""" A TUF hash bin delegation example using the low-level TUF Metadata API. The example code in this file demonstrates how to *manually* perform hash bin delegation using the low-level Metadata API. It implements similar functionality to that of the deprecated legacy 'repository_tool' and 'repository_lib'. (see ADR-0010 for details about repository library design) Contents: - Re-usable hash bin delegation helpers - Basic hash bin delegation example See 'basic_repo.py' for a more comprehensive TUF metadata API example. NOTE: Metadata files will be written to a 'tmp*'-directory in CWD. """ import hashlib import os import tempfile from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Dict, Iterator, List, Tuple from securesystemslib.signer import CryptoSigner, Signer from tuf.api.metadata import ( DelegatedRole, Delegations, Metadata, TargetFile, Targets, ) from tuf.api.serialization.json import JSONSerializer def _in(days: float) -> datetime: """Adds 'days' to now and returns datetime object w/o microseconds.""" return datetime.now(timezone.utc).replace(microsecond=0) + timedelta( days=days ) roles: Dict[str, Metadata[Targets]] = {} signers: Dict[str, Signer] = {} # Hash bin delegation # =================== # Hash bin delegation allows to distribute a large number of target files over # multiple delegated targets metadata. The consequence is smaller metadata # files and thus a lower network overhead for repository-client communication. # # The assignment of target files to targets metadata is done automatically, # based on the hash of the target file name. More precisely, only a prefix of # the target file name hash is needed to assign it to the correct hash bin. # # The number of bins is the only number that needs to be configured. Everything # else is derived using the mathematical operations shown below. # # The right number of bins depends on the expected number of target files in a # repository. For the purpose of this example we choose ... NUMBER_OF_BINS = 32 # ..., which determines the length of any hash prefix # considered for bin assignment (PREFIX_LEN), how many hash prefixes are # covered by all bins (NUMBER_OF_PREFIXES), and how many prefixes are covered # by each individual bin (BIN_SIZE): # # The prefix length is the number of digits in the hexadecimal representation # (see 'x' in Python Format Specification) of the number of bins minus one # (counting starts at zero), i.e. ... PREFIX_LEN = len(f"{(NUMBER_OF_BINS - 1):x}") # ... 2. # # Compared to decimal, hexadecimal numbers can express higher numbers with # fewer digits and thus further decrease metadata sizes. With the above prefix # length of 2 we can represent at most ... NUMBER_OF_PREFIXES = 16**PREFIX_LEN # ... 256 prefixes, i.e. 00, 01, ..., ff. # # If the number of bins is a power of two, hash prefixes are evenly distributed # over all bins, which allows to calculate the uniform size of ... BIN_SIZE = NUMBER_OF_PREFIXES // NUMBER_OF_BINS # ... 8, where each bin is # responsible for a range of 8 prefixes, i.e. 00-07, 08-0f, ..., f8-ff. # Helpers # ------- def _bin_name(low: int, high: int) -> str: """Generates a bin name according to the hash prefixes the bin serves. The name is either a single hash prefix for bin size 1, or a range of hash prefixes otherwise. The prefix length is needed to zero-left-pad the hex representation of the hash prefix for uniform bin name lengths. """ if low == high: return f"{low:0{PREFIX_LEN}x}" return f"{low:0{PREFIX_LEN}x}-{high:0{PREFIX_LEN}x}" def generate_hash_bins() -> Iterator[Tuple[str, List[str]]]: """Returns generator for bin names and hash prefixes per bin.""" # Iterate over the total number of hash prefixes in 'bin size'-steps to # generate bin names and a list of hash prefixes served by each bin. for low in range(0, NUMBER_OF_PREFIXES, BIN_SIZE): high = low + BIN_SIZE - 1 bin_name = _bin_name(low, high) hash_prefixes = [] for prefix in range(low, low + BIN_SIZE): hash_prefixes.append(f"{prefix:0{PREFIX_LEN}x}") yield bin_name, hash_prefixes def find_hash_bin(path: str) -> str: """Returns name of bin for target file based on the target path hash.""" # Generate hash digest of passed target path and take its prefix, given the # global prefix length for the given number of bins. hasher = hashlib.sha256() hasher.update(path.encode("utf-8")) target_name_hash = hasher.hexdigest() prefix = int(target_name_hash[:PREFIX_LEN], 16) # Find lower and upper bounds for hash prefix given its numerical value and # the the general bin size for the given number of bins. low = prefix - (prefix % BIN_SIZE) high = low + BIN_SIZE - 1 return _bin_name(low, high) # Keys # ---- # Given that the primary concern of hash bin delegation is to reduce network # overhead, it is acceptable to re-use one signing key for all delegated # targets roles (bin-n). However, we do use a different key for the delegating # targets role (bins). Considering the high responsibility but also low # volatility of the bins role, it is recommended to require signature # thresholds and keep the keys offline in a real-world scenario. # NOTE: See "Targets delegation" and "Signature thresholds" paragraphs in # 'basic_repo.py' for more details for name in ["bin-n", "bins"]: signers[name] = CryptoSigner.generate_ecdsa() # Targets roles # ------------- # NOTE: See "Targets" and "Targets delegation" paragraphs in 'basic_repo.py' # example for more details about the Targets object. # Create preliminary delegating targets role (bins) and add public key for # delegated targets (bin_n) to key store. Delegation details are update below. roles["bins"] = Metadata(Targets(expires=_in(365))) bin_n_key = signers["bin-n"].public_key roles["bins"].signed.delegations = Delegations( keys={bin_n_key.keyid: bin_n_key}, roles={}, ) # The hash bin generator yields an ordered list of incremental hash bin names # (ranges), plus the hash prefixes each bin is responsible for, e.g.: # # bin_n_name: 00-07 bin_n_hash_prefixes: 00 01 02 03 04 05 06 07 # 08-0f 08 09 0a 0b 0c 0d 0e 0f # 10-17 10 11 12 13 14 15 16 17 # ... ... # f8-ff f8 f9 fa fb fc fd fe ff assert roles["bins"].signed.delegations.roles is not None for bin_n_name, bin_n_hash_prefixes in generate_hash_bins(): # Update delegating targets role (bins) with delegation details for each # delegated targets role (bin_n). roles["bins"].signed.delegations.roles[bin_n_name] = DelegatedRole( name=bin_n_name, keyids=[signers["bin-n"].public_key.keyid], threshold=1, terminating=False, path_hash_prefixes=bin_n_hash_prefixes, ) # Create delegated targets roles (bin_n) roles[bin_n_name] = Metadata(Targets(expires=_in(7))) # Add target file # --------------- # For the purpose of this example we will protect the integrity of this very # example script by adding its file info to the corresponding bin metadata. # NOTE: See "Targets" paragraph in 'basic_repo.py' example for more details # about adding target file infos to targets metadata. local_path = Path(__file__).resolve() target_path = f"{local_path.parts[-2]}/{local_path.parts[-1]}" target_file_info = TargetFile.from_file(target_path, str(local_path)) # The right bin for a target file is determined by the 'target_path' hash, e.g.: # # target_path: 'repo_example/hashed_bin_delegation.py' # target_path (hash digest): '85e1a6c06305bd9c1e15c7ae565fd16ea304bfc...' # # --> considered hash prefix '85', falls into bin '80-87' bin_for_target = find_hash_bin(target_path) roles[bin_for_target].signed.targets[target_path] = target_file_info # Sign and persist # ---------------- # Sign all metadata and write to temporary directory at CWD for review using # versioned file names. Most notably see '1.bins.json' and '1.80-87.json'. # NOTE: See "Persist metadata" paragraph in 'basic_repo.py' example for more # details about serialization formats and metadata file name conventions. PRETTY = JSONSerializer(compact=False) TMP_DIR = tempfile.mkdtemp(dir=os.getcwd()) for role_name, role in roles.items(): signer = signers["bins"] if role_name == "bins" else signers["bin-n"] role.sign(signer) filename = f"1.{role_name}.json" filepath = os.path.join(TMP_DIR, filename) role.to_file(filepath, serializer=PRETTY) python-tuf-5.1.0/examples/manual_repo/succinct_hash_bin_delegations.py000066400000000000000000000145541470074210500263610ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """ A TUF succinct hash bin delegation example using the low-level TUF Metadata API. The example code in this file demonstrates how to perform succinct hash bin delegation using the low-level Metadata API. Succinct hash bin delegation achieves a similar result as using a standard hash bin delegation, but the delegating metadata is smaller, resulting in fewer bytes to transfer and parse. See 'basic_repo.py' for a more comprehensive TUF metadata API example. For a comprehensive explanation of succinct hash bin delegation and the difference between succinct and standard hash bin delegation read: https://github.com/theupdateframework/taps/blob/master/tap15.md NOTE: Metadata files will be written to a 'tmp*'-directory in CWD. """ import math import os import tempfile from datetime import datetime, timedelta, timezone from pathlib import Path from typing import Dict from securesystemslib.signer import CryptoSigner from tuf.api.metadata import ( Delegations, Key, Metadata, SuccinctRoles, TargetFile, Targets, ) from tuf.api.serialization.json import JSONSerializer # Succinct hash bin delegation # ============================ # Succinct hash bin delegation aims to distribute a large number of target files # over multiple delegated targets metadata roles (bins). The consequence is # smaller metadata files and thus a lower network overhead for repository-client # communication. # # The assignment of target files to a target's metadata is done automatically, # based on the byte digest of the target file name. # # The number of bins, name prefix for all bins and key threshold are all # attributes that need to be configured. # Number of bins, bit length and bin number computation # ----------------------------------------------------- # Determining the correct number of bins is dependent on the expected number of # target files in a repository. For the purpose of this example we choose: NUMBER_OF_BINS = 32 # # The number of bins will determine the number of bits in a target path # considered in assigning the target to a bin. BIT_LENGTH = int(math.log2(NUMBER_OF_BINS)) # Delegated role (bin) name format # -------------------------------- # Each bin has a name in the format of f"{NAME_PREFIX}-{bin_number}". # # Name prefix is the common prefix of all delegated target roles (bins). # For our example it will be: NAME_PREFIX = "delegated_bin" # # The suffix "bin_number" is a zero-padded hexadecimal number of that # particular bin. # Keys and threshold # ------------------ # Succinct hash bin delegation uses the same key(s) to sign all bins. This is # acceptable because the primary concern of this type of delegation is to reduce # network overhead. For the purpose of this example only one key is required. THRESHOLD = 1 # Create one signing key for all bins, and one for the delegating targets role. bins_signer = CryptoSigner.generate_ecdsa() bins_key = bins_signer.public_key targets_signer = CryptoSigner.generate_ecdsa() # Delegating targets role # ----------------------- # Akin to regular targets delegation, the delegating role ships the public keys # of the delegated roles. However, instead of providing individual delegation # information about each role, one single `SuccinctRoles` object is used to # provide the information for all delegated roles (bins). # NOTE: See "Targets" and "Targets delegation" paragraphs in 'basic_repo.py' # example for more details about the Targets object. expiration_date = datetime.now(timezone.utc).replace(microsecond=0) + timedelta( days=7 ) targets = Metadata(Targets(expires=expiration_date)) succinct_roles = SuccinctRoles( keyids=[bins_key.keyid], threshold=THRESHOLD, bit_length=BIT_LENGTH, name_prefix=NAME_PREFIX, ) delegations_keys_info: Dict[str, Key] = {} delegations_keys_info[bins_key.keyid] = bins_key targets.signed.delegations = Delegations( delegations_keys_info, roles=None, succinct_roles=succinct_roles ) # Delegated targets roles (bins) # ------------------------------ # We can use the SuccinctRoles object from the delegating role above to iterate # over all bin names in the delegation and create the corresponding metadata. assert targets.signed.delegations.succinct_roles is not None # make mypy happy delegated_bins: Dict[str, Metadata[Targets]] = {} for delegated_bin_name in targets.signed.delegations.succinct_roles.get_roles(): delegated_bins[delegated_bin_name] = Metadata( Targets(expires=expiration_date) ) # Add target file inside a delegated role (bin) # --------------------------------------------- # For the purpose of this example we will protect the integrity of this # example script by adding its file info to the corresponding bin metadata. # NOTE: See "Targets" paragraph in 'basic_repo.py' example for more details # about adding target file infos to targets metadata. local_path = Path(__file__).resolve() target_path = f"{local_path.parts[-2]}/{local_path.parts[-1]}" target_file_info = TargetFile.from_file(target_path, str(local_path)) # We don't know yet in which delegated role (bin) our target belongs. # With SuccinctRoles.get_role_for_target() we can get the name of the delegated # role (bin) responsible for that target_path. target_bin = targets.signed.delegations.succinct_roles.get_role_for_target( target_path ) # In our example with NUMBER_OF_BINS = 32 and the current file as target_path # the target_bin is "delegated_bin-0d" # Now we can add the current target to the bin responsible for it. delegated_bins[target_bin].signed.targets[target_path] = target_file_info # Sign and persist # ---------------- # Sign all metadata and write to a temporary directory at CWD for review using # versioned file names. Most notably see '1.targets.json' and # '1.delegated_bin-0d.json'. # NOTE: See "Persist metadata" paragraph in 'basic_repo.py' example for more # details about serialization formats and metadata file name convention. PRETTY = JSONSerializer(compact=False) TMP_DIR = tempfile.mkdtemp(dir=os.getcwd()) targets.sign(targets_signer) targets.to_file(os.path.join(TMP_DIR, "1.targets.json"), serializer=PRETTY) for bin_name, bin_target_role in delegated_bins.items(): file_name = f"1.{bin_name}.json" file_path = os.path.join(TMP_DIR, file_name) bin_target_role.sign(bins_signer, append=True) bin_target_role.to_file(file_path, serializer=PRETTY) python-tuf-5.1.0/examples/repository/000077500000000000000000000000001470074210500176675ustar00rootroot00000000000000python-tuf-5.1.0/examples/repository/README.md000066400000000000000000000020351470074210500211460ustar00rootroot00000000000000# TUF Repository Application Example :warning: This example uses the repository module which is not considered part of the python-tuf stable API quite yet. This TUF Repository Application Example has the following features: - Initializes a completely new repository on startup - Stores everything (metadata, targets, signing keys) in-memory - Serves metadata and targets on localhost (default port 8001) - Simulates a live repository by automatically adding a new target file every 10 seconds. - Exposes a small API for the [uploader tool example](../uploader/). API POST endpoints are: - `/api/role/`: For uploading new delegated targets metadata. Payload is new version of ROLEs metadata - `/api/delegation/`: For modifying or creating a delegation for ROLE. Payload is a dict with one keyid:Key pair ### Usage ```console ./repo ``` Your repository is now running and is accessible on localhost, See e.g. http://127.0.0.1:8001/metadata/1.root.json. The [client example](../client/README.md) uses this address by default. python-tuf-5.1.0/examples/repository/_simplerepo.py000066400000000000000000000207221470074210500225620ustar00rootroot00000000000000# Copyright 2021-2022 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Simple example of using the repository library to build a repository""" import copy import json import logging from collections import defaultdict from datetime import datetime, timedelta, timezone from typing import Dict, List, Union from securesystemslib.signer import CryptoSigner, Key, Signer from tuf.api.exceptions import RepositoryError from tuf.api.metadata import ( DelegatedRole, Delegations, Metadata, MetaFile, Root, RootVerificationResult, Signed, Snapshot, TargetFile, Targets, Timestamp, VerificationResult, ) from tuf.repository import Repository logger = logging.getLogger(__name__) _signed_init = { Root.type: Root, Snapshot.type: Snapshot, Targets.type: Targets, Timestamp.type: Timestamp, } class SimpleRepository(Repository): """Very simple in-memory repository implementation This repository keeps the metadata for all versions of all roles in memory. It also keeps all target content in memory. Attributes: role_cache: Every historical metadata version of every role in this repository. Keys are role names and values are lists of Metadata signer_cache: All signers available to the repository. Keys are role names, values are lists of signers target_cache: All target files served by the repository. Keys are target paths and values are file contents as bytes. """ expiry_period = timedelta(days=1) def __init__(self) -> None: # all versions of all metadata self.role_cache: Dict[str, List[Metadata]] = defaultdict(list) # all current keys self.signer_cache: Dict[str, List[Signer]] = defaultdict(list) # all target content self.target_cache: Dict[str, bytes] = {} # version cache for snapshot and all targets, updated in close(). # The 'defaultdict(lambda: ...)' trick allows close() to easily modify # the version without always creating a new MetaFile self._snapshot_info = MetaFile(1) self._targets_infos: Dict[str, MetaFile] = defaultdict( lambda: MetaFile(1) ) # setup a basic repository, generate signing key per top-level role with self.edit_root() as root: for role in ["root", "timestamp", "snapshot", "targets"]: signer = CryptoSigner.generate_ecdsa() self.signer_cache[role].append(signer) root.add_key(signer.public_key, role) for role in ["timestamp", "snapshot", "targets"]: with self.edit(role): pass @property def targets_infos(self) -> Dict[str, MetaFile]: return self._targets_infos @property def snapshot_info(self) -> MetaFile: return self._snapshot_info def _get_verification_result( self, role: str, md: Metadata ) -> Union[VerificationResult, RootVerificationResult]: """Verify roles metadata using the existing repository metadata""" if role == Root.type: assert isinstance(md.signed, Root) root = self.root() previous = root if root.version > 0 else None return md.signed.get_root_verification_result( previous, md.signed_bytes, md.signatures ) if role in [Timestamp.type, Snapshot.type, Targets.type]: delegator: Signed = self.root() else: delegator = self.targets() return delegator.get_verification_result( role, md.signed_bytes, md.signatures ) def open(self, role: str) -> Metadata: """Return current Metadata for role from 'storage' (or create a new one) """ if role not in self.role_cache: signed_init = _signed_init.get(role, Targets) md = Metadata(signed_init()) # this makes version bumping in close() simpler md.signed.version = 0 return md # return latest metadata from storage (but don't return a reference) return copy.deepcopy(self.role_cache[role][-1]) def close(self, role: str, md: Metadata) -> None: """Store a version of metadata. Handle version bumps, expiry, signing""" md.signed.version += 1 md.signed.expires = datetime.now(timezone.utc) + self.expiry_period md.signatures.clear() for signer in self.signer_cache[role]: md.sign(signer, append=True) # Double check that we only write verified metadata vr = self._get_verification_result(role, md) if not vr: raise ValueError(f"Role {role} failed to verify") keyids = [keyid[:7] for keyid in vr.signed] verify_str = f"verified with keys [{', '.join(keyids)}]" logger.debug("Role %s v%d: %s", role, md.signed.version, verify_str) # store new metadata version, update version caches self.role_cache[role].append(md) if role == "snapshot": self._snapshot_info.version = md.signed.version elif role not in ["root", "timestamp"]: self._targets_infos[f"{role}.json"].version = md.signed.version def add_target(self, path: str, content: str) -> None: """Add a target to top-level targets metadata""" data = bytes(content, "utf-8") # add content to cache for serving to clients self.target_cache[path] = data # add a target in the targets metadata with self.edit_targets() as targets: targets.targets[path] = TargetFile.from_data(path, data) # update snapshot, timestamp self.do_snapshot() self.do_timestamp() def submit_delegation(self, rolename: str, data: bytes) -> bool: """Add a delegation to a (offline signed) delegated targets metadata""" try: logger.debug("Processing new delegation to role %s", rolename) keyid, keydict = next(iter(json.loads(data).items())) key = Key.from_dict(keyid, keydict) # add delegation and key role = DelegatedRole(rolename, [], 1, True, [f"{rolename}/*"]) with self.edit_targets() as targets: if targets.delegations is None: targets.delegations = Delegations({}, {}) if targets.delegations.roles is None: targets.delegations.roles = {} targets.delegations.roles[rolename] = role targets.add_key(key, rolename) except (RepositoryError, json.JSONDecodeError) as e: logger.info("Failed to add delegation for %s: %s", rolename, e) return False # update snapshot, timestamp self.do_snapshot() self.do_timestamp() return True def submit_role(self, role: str, data: bytes) -> bool: """Add a new version of a delegated roles metadata""" try: logger.debug("Processing new version for role %s", role) if role in ["root", "snapshot", "timestamp", "targets"]: raise ValueError("Only delegated targets are accepted") md = Metadata.from_bytes(data) for targetpath in md.signed.targets: if not targetpath.startswith(f"{role}/"): raise ValueError(f"targets allowed under {role}/ only") if md.signed.version != self.targets(role).version + 1: raise ValueError("Invalid version {md.signed.version}") except (RepositoryError, ValueError) as e: logger.info("Failed to add new version for %s: %s", role, e) return False # Check that we only write verified metadata vr = self._get_verification_result(role, md) if not vr: logger.info("Role %s failed to verify", role) return False keyids = [keyid[:7] for keyid in vr.signed] verify_str = f"verified with keys [{', '.join(keyids)}]" logger.debug("Role %s v%d: %s", role, md.signed.version, verify_str) # Checks passed: Add new delegated role version self.role_cache[role].append(md) self._targets_infos[f"{role}.json"].version = md.signed.version # To keep it simple, target content is generated from targetpath for targetpath in md.signed.targets: self.target_cache[targetpath] = bytes(f"{targetpath}", "utf-8") # update snapshot, timestamp self.do_snapshot() self.do_timestamp() return True python-tuf-5.1.0/examples/repository/repo000077500000000000000000000106041470074210500205630ustar00rootroot00000000000000#!/usr/bin/env python # Copyright 2021-2022 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Simple repository example application The application stores metadata and targets in memory, and serves them via http. Nothing is persisted on disk or loaded from disk. The application simulates a live repository by adding new target files periodically. """ import argparse import logging import sys from datetime import datetime from http.server import BaseHTTPRequestHandler, ThreadingHTTPServer from time import time from typing import Dict, List from _simplerepo import SimpleRepository from tuf.api.serialization.json import JSONSerializer logger = logging.getLogger(__name__) class ReqHandler(BaseHTTPRequestHandler): """HTTP handler for the repository example application Serves metadata, targets and a small upload API using a SimpleRepository """ def do_POST(self): """Handle POST requests, aka the 'uploader API'""" content_len = int(self.headers.get("content-length", 0)) data = self.rfile.read(content_len) if self.path.startswith("/api/delegation/"): role = self.path[len("/api/delegation/") :] if not self.server.repo.submit_delegation(role, data): return self.send_error(400, f"Failed to delegate to {role}") elif self.path.startswith("/api/role/"): role = self.path[len("/api/role/") :] if not self.server.repo.submit_role(role, data): return self.send_error(400, f"Failed to submit role {role}") else: return self.send_error(404) self.send_response(200) self.end_headers() def do_GET(self): """Handle GET: metadata and target files""" data = None if self.path.startswith("/metadata/") and self.path.endswith(".json"): data = self.get_metadata( self.path[len("/metadata/") : -len(".json")] ) elif self.path.startswith("/targets/"): data = self.get_target(self.path[len("/targets/") :]) if data is None: self.send_error(404) else: self.send_response(200) self.send_header("Content-length", len(data)) self.end_headers() self.wfile.write(data) def get_metadata(self, ver_and_role: str): repo = self.server.repo ver_str, sep, role = ver_and_role.rpartition(".") if sep == "": # 0 will lead to list lookup with -1, meaning latest version ver = 0 else: ver = int(ver_str) if role not in repo.role_cache or ver > len(repo.role_cache[role]): return None # return metadata return repo.role_cache[role][ver - 1].to_bytes(JSONSerializer()) def get_target(self, targetpath: str): repo = self.server.repo # unimplement the dumb hashing scheme # TODO: maybe use hashed paths as the target_cache key dir, sep, hashname = targetpath.rpartition("/") _, _, name = hashname.partition(".") target = f"{dir}{sep}{name}" if target not in repo.target_cache: return None # send the target content return repo.target_cache[target] class RepositoryServer(ThreadingHTTPServer): def __init__(self, port: int): super().__init__(("127.0.0.1", port), ReqHandler) self.timeout = 1 self.repo = SimpleRepository() def main(argv: List[str]) -> None: """Example repository server""" parser = argparse.ArgumentParser() parser.add_argument("-v", "--verbose", action="count") parser.add_argument("-p", "--port", type=int, default=8001) args, _ = parser.parse_known_args(argv) level = logging.DEBUG if args.verbose else logging.INFO logging.basicConfig(level=level) server = RepositoryServer(args.port) last_change = 0 counter = 0 logger.info( f"Now serving. Root v1 at http://127.0.0.1:{server.server_port}/metadata/1.root.json" ) while True: # Simulate a live repository: Add a new target file every few seconds if time() - last_change > 10: last_change = int(time()) counter += 1 content = str(datetime.fromtimestamp(last_change)) server.repo.add_target(f"file{str(counter)}.txt", content) server.handle_request() if __name__ == "__main__": main(sys.argv) python-tuf-5.1.0/examples/uploader/000077500000000000000000000000001470074210500172635ustar00rootroot00000000000000python-tuf-5.1.0/examples/uploader/README.md000066400000000000000000000032721470074210500205460ustar00rootroot00000000000000# TUF Uploader Tool Example :warning: This example uses the repository module which is not considered part of the python-tuf stable API quite yet. This is an example maintainer tool: It makes it possible to add delegations to a remote repository, and then to upload delegated metadata to the repository. Features: - Initialization (much like the [client example](../client/)) - Claim delegation: this uses "unsafe repository API" in the sense that the uploader sends repository unsigned data. This operation can be compared to claiming a project name on PyPI.org - Add targetfile: Here uploader uses signing keys that were added to the delegation in the previous step to create a new version of the delegated metadata. The repository will verify signatures on this metadata. The used TUF repository can be set with `--url` (default repository is "http://127.0.0.1:8001" which is also the default for the repository example). In practice the uploader tool is only useful with the repository example. ### Usage with the repository example In one terminal, run the [repository example](../repository/) and leave it running: ```console examples/repository/repo ``` In another terminal, run uploader: ```console # Initialize with Trust-On-First-Use ./uploader tofu # Then claim a delegation for yourself (this also creates a new signing key): ./uploader add-delegation myrole # Then add a new downloadable target file to your delegated role (to keep the # example simple, the target file content is always the targetpath): ./uploader add-target myrole myrole/mytargetfile ``` At this point "myrole/mytargetfile" is downloadable from the repository with the [client example](../client/). python-tuf-5.1.0/examples/uploader/_localrepo.py000066400000000000000000000117111470074210500217550ustar00rootroot00000000000000# Copyright 2021-2022 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """A Repository implementation for maintainer and developer tools""" import contextlib import copy import json import logging import os from datetime import datetime, timedelta, timezone from typing import Dict import requests from securesystemslib.signer import CryptoSigner, Signer from tuf.api.exceptions import RepositoryError from tuf.api.metadata import Metadata, MetaFile, TargetFile, Targets from tuf.api.serialization.json import JSONSerializer from tuf.ngclient import Updater from tuf.repository import Repository logger = logging.getLogger(__name__) class LocalRepository(Repository): """A repository implementation that fetches data from a remote repository This implementation fetches metadata from a remote repository, potentially creates new versions of metadata, and submits to the remote repository. ngclient Updater is used to fetch metadata from remote server: this is good because we want to make sure the metadata we modify is verified, but also bad because we need some hacks to access the Updaters metadata. """ expiry_period = timedelta(days=1) def __init__(self, metadata_dir: str, key_dir: str, base_url: str): self.key_dir = key_dir if not os.path.isdir(self.key_dir): os.makedirs(self.key_dir) self.base_url = base_url self.updater = Updater( metadata_dir=metadata_dir, metadata_base_url=f"{base_url}/metadata/", ) self.updater.refresh() @property def targets_infos(self) -> Dict[str, MetaFile]: raise NotImplementedError # we never call snapshot @property def snapshot_info(self) -> MetaFile: raise NotImplementedError # we never call timestamp def open(self, role: str) -> Metadata: """Return cached (or fetched) metadata""" # if there is a metadata version fetched from remote, use that # HACK: access Updater internals trusted_set = self.updater._trusted_set # noqa: SLF001 if role in trusted_set: # NOTE: The original signature wrapper (Metadata) was verified and # discarded upon inclusion in the trusted set. It is safe to use # a fresh wrapper. `close` will override existing signatures anyway. return Metadata(copy.deepcopy(trusted_set[role])) # otherwise we're creating metadata from scratch md = Metadata(Targets()) # this makes version bumping in close() simpler md.signed.version = 0 return md def close(self, role_name: str, md: Metadata) -> None: """Store a version of metadata. Handle version bumps, expiry, signing""" targets = self.targets() role = targets.get_delegated_role(role_name) public_key = targets.get_key(role.keyids[0]) uri = f"file2:{self.key_dir}/{role_name}" signer = Signer.from_priv_key_uri(uri, public_key) md.signed.version += 1 md.signed.expires = datetime.now(timezone.utc) + self.expiry_period md.sign(signer, append=False) # Upload using "api/role" uri = f"{self.base_url}/api/role/{role_name}" r = requests.post(uri, data=md.to_bytes(JSONSerializer()), timeout=5) r.raise_for_status() def add_target(self, role: str, targetpath: str) -> bool: """Add target to roles metadata and submit new metadata version""" # HACK: make sure we have the roles metadata in updater._trusted_set # (or that we're publishing the first version) # HACK: Assume RepositoryError is because we're just publishing version # 1 (so the roles metadata does not exist on server yet) with contextlib.suppress(RepositoryError): self.updater.get_targetinfo(targetpath) data = bytes(targetpath, "utf-8") targetfile = TargetFile.from_data(targetpath, data) try: with self.edit_targets(role) as delegated: delegated.targets[targetpath] = targetfile except Exception as e: # noqa: BLE001 print(f"Failed to submit new {role} with added target: {e}") return False print(f"Uploaded role {role} v{delegated.version}") return True def add_delegation(self, role: str) -> bool: """Use the (unauthenticated) delegation adding API endpoint""" signer = CryptoSigner.generate_ecdsa() data = {signer.public_key.keyid: signer.public_key.to_dict()} url = f"{self.base_url}/api/delegation/{role}" r = requests.post(url, data=json.dumps(data), timeout=5) if r.status_code != 200: print(f"delegation failed with {r}") return False # Store the private key using rolename as filename with open(f"{self.key_dir}/{role}", "wb") as f: f.write(signer.private_bytes) print(f"Uploaded new delegation, stored key in {self.key_dir}/{role}") return True python-tuf-5.1.0/examples/uploader/uploader000077500000000000000000000102461470074210500210270ustar00rootroot00000000000000#!/usr/bin/env python # Copyright 2021-2022 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Simple uploader tool example Uploader is a maintainer application that communicates with the repository example. Uploader controls offline signing keys and produces signed metadata that it sends to the repository application so that the metadata can be added to the repository. """ import argparse import logging import os import sys from hashlib import sha256 from pathlib import Path from typing import List, Optional from urllib import request from _localrepo import LocalRepository logger = logging.getLogger(__name__) def build_metadata_dir(base_url: str) -> str: """build a unique and reproducible metadata dirname for the repo url""" name = sha256(base_url.encode()).hexdigest()[:8] # TODO: Make this not windows hostile? return f"{Path.home()}/.local/share/tuf-upload-example/{name}" def build_key_dir(base_url: str) -> str: """build a unique and reproducible private key dir for the repository url""" name = sha256(base_url.encode()).hexdigest()[:8] # TODO: Make this not windows hostile? return f"{Path.home()}/.config/tuf-upload-example/{name}" def init_tofu(base_url: str) -> bool: """Initialize local trusted metadata (Trust-On-First-Use)""" metadata_dir = build_metadata_dir(base_url) if not os.path.isdir(metadata_dir): os.makedirs(metadata_dir) root_url = f"{base_url}/metadata/1.root.json" try: request.urlretrieve(root_url, f"{metadata_dir}/root.json") except OSError: print(f"Failed to download initial root from {root_url}") return False print(f"Trust-on-First-Use: Initialized new root in {metadata_dir}") return True def init(base_url: str) -> Optional[LocalRepository]: """Initialize a LocalRepository: local root.json must already exist""" metadata_dir = build_metadata_dir(base_url) keydir = build_key_dir(base_url) if not os.path.isfile(f"{metadata_dir}/root.json"): print( "Trusted local root not found. Use 'tofu' command to " "Trust-On-First-Use or copy trusted root metadata to " f"{metadata_dir}/root.json" ) return None print(f"Using trusted root in {metadata_dir}") return LocalRepository(metadata_dir, keydir, base_url) def main(argv: List[str]) -> None: """Example uploader tool""" parser = argparse.ArgumentParser() parser.add_argument("-v", "--verbose", action="count", default=0) parser.add_argument( "-u", "--url", help="Base repository URL", default="http://127.0.0.1:8001", ) subparsers = parser.add_subparsers(dest="sub_command") tofu_cmd = subparsers.add_parser( "tofu", help="Initialize client with Trust-On-First-Use", ) add_delegation_cmd = subparsers.add_parser( "add-delegation", help="Create a delegation and signing key", ) add_delegation_cmd.add_argument("rolename") add_target_cmd = subparsers.add_parser( "add-target", help="Add a target to a delegated role", ) add_target_cmd.add_argument("rolename") add_target_cmd.add_argument("targetpath") args = parser.parse_args() if args.verbose == 0: loglevel = logging.ERROR elif args.verbose == 1: loglevel = logging.WARNING elif args.verbose == 2: loglevel = logging.INFO else: loglevel = logging.DEBUG logging.basicConfig(level=loglevel) if args.sub_command == "tofu": if not init_tofu(args.url): return "Failed to initialize local repository" elif args.sub_command == "add-delegation": repo = init(args.url) if not repo: return "Failed to initialize" if not repo.add_delegation(args.rolename): return "Failed to add delegation" elif args.sub_command == "add-target": repo = init(args.url) if not repo: return "Failed to initialize" if not repo.add_target(args.rolename, args.targetpath): return "Failed to add target" else: parser.print_help() if __name__ == "__main__": sys.exit(main(sys.argv)) python-tuf-5.1.0/pyproject.toml000066400000000000000000000104331470074210500165470ustar00rootroot00000000000000[build-system] # Dependabot cannot do `build-system.requires` (dependabot/dependabot-core#8465) # workaround to get reproducibility and auto-updates: # PIP_CONSTRAINT=requirements/build.txt python3 -m build ... requires = ["hatchling"] build-backend = "hatchling.build" [project] name = "tuf" description = "A secure updater framework for Python" readme = "README.md" license = { text = "MIT OR Apache-2.0" } requires-python = ">=3.8" authors = [ { email = "theupdateframework@googlegroups.com" }, ] keywords = [ "authentication", "compromise", "key", "revocation", "secure", "update", "updater", ] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: Apache Software License", "License :: OSI Approved :: MIT License", "Natural Language :: English", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows", "Operating System :: POSIX", "Operating System :: POSIX :: Linux", "Programming Language :: Python", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Security", "Topic :: Software Development", ] dependencies = [ "requests>=2.19.1", "securesystemslib~=1.0", ] dynamic = ["version"] [project.urls] Documentation = "https://theupdateframework.readthedocs.io/en/stable/" Homepage = "https://www.updateframework.com" Issues = "https://github.com/theupdateframework/python-tuf/issues" Source = "https://github.com/theupdateframework/python-tuf" [tool.hatch.version] path = "tuf/__init__.py" [tool.hatch.build.targets.sdist] include = [ "/docs", "/examples", "/tests", "/tuf", "/requirements", "/tox.ini", "/setup.py", ] [tool.hatch.build.targets.wheel] # The testing phase changes the current working directory to `tests` but the test scripts import # from `tests` so the root directory must be added to Python's path for editable installations dev-mode-dirs = ["."] # Ruff section # Read more here: https://docs.astral.sh/ruff/linter/#rule-selection [tool.ruff] line-length=80 [tool.ruff.lint] select = ["ALL"] ignore = [ # Rulesets we do not use at this moment "COM", "EM", "FA", "FIX", "FBT", "PERF", "PT", "PTH", "TD", "TRY", # Individual rules that have been disabled "ANN101", "ANN102", # nonsense, deprecated in ruff "D400", "D415", "D213", "D205", "D202", "D107", "D407", "D413", "D212", "D104", "D406", "D105", "D411", "D401", "D200", "D203", "ISC001", # incompatible with ruff formatter "PLR0913", "PLR2004", ] [tool.ruff.lint.per-file-ignores] "tests/*" = [ "D", # pydocstyle: no docstrings required for tests "E501", # line-too-long: embedded test data in "fmt: off" blocks is ok "ERA001", # commented code is fine in tests "RUF012", # ruff: mutable-class-default "S", # bandit: Not running bandit on tests "SLF001", # private member access is ok in tests "T201", # print is ok in tests ] "examples/*/*" = [ "D", # pydocstyle: no docstrings required for examples "ERA001", # commented code is fine in examples "INP001", # implicit package is fine in examples "S", # bandit: Not running bandit on examples "T201", # print is ok in examples ] "verify_release" = [ "ERA001", # commented code is fine here "S603", # bandit: this flags all uses of subprocess.run as vulnerable "T201", # print is ok in verify_release ] ".github/scripts/*" = [ "T201", # print is ok in conformance client ] [tool.ruff.lint.flake8-annotations] mypy-init-return = true # mypy section # Read more here: https://mypy.readthedocs.io/en/stable/config_file.html#using-a-pyproject-toml-file [tool.mypy] warn_unused_configs = "True" warn_redundant_casts = "True" warn_unused_ignores = "True" warn_unreachable = "True" strict_equality = "True" disallow_untyped_defs = "True" disallow_untyped_calls = "True" show_error_codes = "True" disable_error_code = ["attr-defined"] [[tool.mypy.overrides]] module = [ "requests.*", "securesystemslib.*", ] ignore_missing_imports = "True" python-tuf-5.1.0/requirements/000077500000000000000000000000001470074210500163555ustar00rootroot00000000000000python-tuf-5.1.0/requirements/build.txt000066400000000000000000000002271470074210500202160ustar00rootroot00000000000000# The build and tox versions specified here are also used as constraints # during CI and CD Github workflows build==1.2.2 tox==4.1.2 hatchling==1.25.0 python-tuf-5.1.0/requirements/dev.txt000066400000000000000000000004431470074210500176750ustar00rootroot00000000000000# Install tuf in editable mode and requirements for local testing with tox, # and also for running test suite or individual tests manually. # The build and tox versions specified here are also used as constraints # during CI and CD Github workflows -r build.txt -r test.txt -r lint.txt -e . python-tuf-5.1.0/requirements/docs.txt000066400000000000000000000003011470074210500200400ustar00rootroot00000000000000# Install documentation build requirements # pinned tuf runtime dependencies (should auto-update and -trigger ci/cd) -r pinned.txt # install sphinx and its extensions sphinx sphinx-rtd-theme python-tuf-5.1.0/requirements/lint.txt000066400000000000000000000004351470074210500200660ustar00rootroot00000000000000# Requirements needed in tox lint environment # pinned tuf runtime dependencies -r pinned.txt # Lint tools # (We are not so interested in the specific versions of the tools: the versions # are pinned to prevent unexpected linting failures when tools update) ruff==0.6.8 mypy==1.11.2 python-tuf-5.1.0/requirements/main.txt000066400000000000000000000006111470074210500200400ustar00rootroot00000000000000# TUF runtime requirements plus securesystemslib with extra dependencies # # This file together with 'pip-compile' is used to generate a pinned # requirements file with all immediate and transitive dependencies. # # 'pinned.txt' is updated on GitHub with Dependabot, which # triggers CI/CD builds to automatically test against updated dependencies. # securesystemslib[crypto, pynacl] requests python-tuf-5.1.0/requirements/pinned.txt000066400000000000000000000006301470074210500203720ustar00rootroot00000000000000certifi==2024.8.30 # via requests cffi==1.17.1 # via cryptography, pynacl charset-normalizer==3.3.2 # via requests cryptography==43.0.1 # via securesystemslib idna==3.10 # via requests pycparser==2.22 # via cffi pynacl==1.5.0 # via securesystemslib requests==2.32.3 securesystemslib[crypto,pynacl]==1.1.0 urllib3==2.2.3 # via requests python-tuf-5.1.0/requirements/test.txt000066400000000000000000000002601470074210500200730ustar00rootroot00000000000000# Requirements needed in tox test environments # pinned tuf runtime dependencies (should auto-update and -trigger ci/cd) -r pinned.txt # coverage measurement coverage==7.6.1 python-tuf-5.1.0/tests/000077500000000000000000000000001470074210500147745ustar00rootroot00000000000000python-tuf-5.1.0/tests/.coveragerc000066400000000000000000000002621470074210500171150ustar00rootroot00000000000000[run] branch = True omit = */tests/* */site-packages/* [report] exclude_lines = pragma: no cover def __str__ if __name__ == .__main__.: @abstractmethod python-tuf-5.1.0/tests/__init__.py000066400000000000000000000000001470074210500170730ustar00rootroot00000000000000python-tuf-5.1.0/tests/aggregate_tests.py000077500000000000000000000016101470074210500205170ustar00rootroot00000000000000#!/usr/bin/env python # Copyright 2013 - 2017, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """ aggregate_tests.py Konstantin Andrianov. Zane Fisher. January 26, 2013. August 2013. Modified previous behavior that explicitly imported individual unit tests. -Zane Fisher See LICENSE-MIT OR LICENSE for licensing information. Run all the unit tests from every .py file beginning with "test_" in 'tuf/tests'. Use --random to run the tests in random order. """ import sys import unittest if __name__ == "__main__": suite = unittest.TestLoader().discover(".") all_tests_passed = ( unittest.TextTestRunner(verbosity=1, buffer=True) .run(suite) .wasSuccessful() ) if not all_tests_passed: sys.exit(1) else: sys.exit(0) python-tuf-5.1.0/tests/generated_data/000077500000000000000000000000001470074210500177235ustar00rootroot00000000000000python-tuf-5.1.0/tests/generated_data/__init__.py000066400000000000000000000000001470074210500220220ustar00rootroot00000000000000python-tuf-5.1.0/tests/generated_data/ed25519_metadata/000077500000000000000000000000001470074210500225615ustar00rootroot00000000000000python-tuf-5.1.0/tests/generated_data/ed25519_metadata/root_with_ed25519.json000066400000000000000000000035701470074210500265550ustar00rootroot00000000000000{ "signatures": [ { "keyid": "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba", "sig": "06fc2b26d10afae02689c96dee96ff40e702734accec6f3e69671dec0a59e0763bd7cb7d5ffa4b9e87441c4c98e798ce97cb462e7075e38ad9bc1d0d0c657309" } ], "signed": { "_type": "root", "consistent_snapshot": true, "expires": "2050-01-01T00:00:00Z", "keys": { "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9": { "keytype": "ed25519", "keyval": { "public": "250f9ae3d1d3d5c419a73cfb4a470c01de1d5d3d61a3825416b5f5d6b88f4a30" }, "scheme": "ed25519" }, "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7": { "keytype": "ed25519", "keyval": { "public": "0e6738fc1ac6fb4de680b4be99ecbcd99b030f3963f291277eef67bb9bd123e9" }, "scheme": "ed25519" }, "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d": { "keytype": "ed25519", "keyval": { "public": "82380623abb9666d4bf274b1a02577469445a972e5650d270101faa5107b19c8" }, "scheme": "ed25519" }, "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba": { "keytype": "ed25519", "keyval": { "public": "b11d2ff132c033a657318c74c39526476c56de7556c776f11070842dbc4ac14c" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba" ], "threshold": 1 }, "snapshot": { "keyids": [ "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d" ], "threshold": 1 }, "targets": { "keyids": [ "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7" ], "threshold": 1 }, "timestamp": { "keyids": [ "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9" ], "threshold": 1 } }, "spec_version": "1.0.31", "version": 1 } }python-tuf-5.1.0/tests/generated_data/ed25519_metadata/snapshot_with_ed25519.json000066400000000000000000000006601470074210500274260ustar00rootroot00000000000000{ "signatures": [ { "keyid": "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d", "sig": "bab356be0a82b85b9529aa4625cbd7b8e03b71d1a0fb5d3242f6e8377f102bcf60cc1b8c2a566fd5618c5f5ee3fc07745e84920d26e5514ad455868d7899ae03" } ], "signed": { "_type": "snapshot", "expires": "2050-01-01T00:00:00Z", "meta": { "targets.json": { "version": 1 } }, "spec_version": "1.0.31", "version": 1 } }python-tuf-5.1.0/tests/generated_data/ed25519_metadata/targets_with_ed25519.json000066400000000000000000000006041470074210500272360ustar00rootroot00000000000000{ "signatures": [ { "keyid": "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7", "sig": "9e47f85b3edc79b7215bfee1291da46655deca0b6de99cb3968293218f3329855e57c1523120a50e3a2a8cc50aa9e886f4f74d902d28f43559f294681152f30b" } ], "signed": { "_type": "targets", "expires": "2050-01-01T00:00:00Z", "spec_version": "1.0.31", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/generated_data/ed25519_metadata/timestamp_with_ed25519.json000066400000000000000000000006621470074210500275740ustar00rootroot00000000000000{ "signatures": [ { "keyid": "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9", "sig": "f1b1921a5963485eb5f1cf83f1b44548420bdcced420a367f0c42b63c91950410287f6d062824941085361c3906bb44a365352e2971787a653443ff8df484007" } ], "signed": { "_type": "timestamp", "expires": "2050-01-01T00:00:00Z", "meta": { "snapshot.json": { "version": 1 } }, "spec_version": "1.0.31", "version": 1 } }python-tuf-5.1.0/tests/generated_data/generate_md.py000066400000000000000000000100121470074210500225410ustar00rootroot00000000000000"""Script for generating new metadata files.""" # Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 import os import sys from datetime import datetime, timezone from typing import List, Optional from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey from securesystemslib.signer import CryptoSigner, Signer, SSlibKey from tests import utils from tuf.api.metadata import Metadata, Root, Snapshot, Targets, Timestamp from tuf.api.serialization.json import JSONSerializer # Hardcode keys and expiry time to achieve reproducibility. public_values: List[str] = [ "b11d2ff132c033a657318c74c39526476c56de7556c776f11070842dbc4ac14c", "250f9ae3d1d3d5c419a73cfb4a470c01de1d5d3d61a3825416b5f5d6b88f4a30", "82380623abb9666d4bf274b1a02577469445a972e5650d270101faa5107b19c8", "0e6738fc1ac6fb4de680b4be99ecbcd99b030f3963f291277eef67bb9bd123e9", ] private_values: List[bytes] = [ bytes.fromhex( "510e5e04d7a364af850533856eacdf65d30cc0f8803ecd5fdc0acc56ca2aa91c" ), bytes.fromhex( "e6645b00312c8a257782e3e61e85bafda4317ad072c52251ef933d480c387abd" ), bytes.fromhex( "cd13dd2180334b24c19b32aaf27f7e375a614d7ba0777220d5c2290bb2f9b868" ), bytes.fromhex( "7e2e751145d1b22f6e40d4ba2aa47158207acfd3c003f1cbd5a08141dfc22a15" ), ] keyids: List[str] = [ "5822582e7072996c1eef1cec24b61115d364987faa486659fe3d3dce8dae2aba", "09d440e3725cec247dcb8703b646a87dd2a4d75343e8095c036c32795eefe3b9", "3458204ed467519c19a5316eb278b5608472a1bbf15850ebfb462d5315e4f86d", "2be5c21e3614f9f178fb49c4a34d0c18ffac30abd14ced917c60a52c8d8094b7", ] signers: List[Signer] = [] for index in range(len(keyids)): key = SSlibKey( keyids[index], "ed25519", "ed25519", {"public": public_values[index]}, ) private_key = Ed25519PrivateKey.from_private_bytes(private_values[index]) signers.append(CryptoSigner(private_key, key)) EXPIRY = datetime(2050, 1, 1, tzinfo=timezone.utc) OUT_DIR = "generated_data/ed25519_metadata" if not os.path.exists(OUT_DIR): os.mkdir(OUT_DIR) SERIALIZER = JSONSerializer() def verify_generation(md: Metadata, path: str) -> None: """Verify that newly generated file equals the locally stored one. Args: md: Newly generated metadata object. path: Path to the locally stored metadata file. """ with open(path, "rb") as f: static_md_bytes = f.read() md_bytes = md.to_bytes(SERIALIZER) if static_md_bytes != md_bytes: raise ValueError( f"Generated data != local data at {path}. Generate a new " "metadata with 'python generated_data/generate_md.py'" ) def generate_all_files( dump: Optional[bool] = False, verify: Optional[bool] = False ) -> None: """Generate a new repository and optionally verify it. Args: dump: Wheter to dump the newly generated files. verify: Whether to verify the newly generated files with the local staored. """ md_root = Metadata(Root(expires=EXPIRY)) md_timestamp = Metadata(Timestamp(expires=EXPIRY)) md_snapshot = Metadata(Snapshot(expires=EXPIRY)) md_targets = Metadata(Targets(expires=EXPIRY)) md_root.signed.add_key(signers[0].public_key, "root") md_root.signed.add_key(signers[1].public_key, "timestamp") md_root.signed.add_key(signers[2].public_key, "snapshot") md_root.signed.add_key(signers[3].public_key, "targets") for i, md in enumerate([md_root, md_timestamp, md_snapshot, md_targets]): assert isinstance(md, Metadata) md.sign(signers[i]) path = os.path.join(OUT_DIR, f"{md.signed.type}_with_ed25519.json") if verify: verify_generation(md, path) if dump: md.to_file(path, SERIALIZER) if __name__ == "__main__": utils.configure_test_logging(sys.argv) # To generate a new set of metadata files this script is supposed to be run # from the "tests" folder. generate_all_files(dump=True) python-tuf-5.1.0/tests/repository_data/000077500000000000000000000000001470074210500202045ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/README.md000066400000000000000000000030021470074210500214560ustar00rootroot00000000000000# Unit and integration testing ## Running the tests The unit and integration tests can be executed by invoking `tox` from any path under the project directory. ``` $ tox ``` Or by invoking `aggregate_tests.py` from the [tests](https://github.com/theupdateframework/python-tuf/tree/develop/tests) directory. ``` $ python3 aggregate_tests.py ``` Note: integration tests end in `_integration.py`. If you wish to run a particular unit test, navigate to the tests directory and run that specific unit test. For example: ``` $ python3 test_updater.py ``` It it also possible to run the test cases of a unit test. For instance: ``` $ python3 -m unittest test_updater.TestMultiRepoUpdater.test_get_one_valid_targetinfo ``` ## Setup The unit and integration tests operate on static metadata available in the [repository_data directory](https://github.com/theupdateframework/python-tuf/tree/develop/tests/repository_data/). Before running the tests, static metadata is first copied to temporary directories and modified, as needed, by the tests. The test modules typically spawn HTTP(S) servers that serve metadata and target files for the unit tests. The [map file](https://github.com/theupdateframework/python-tuf/tree/develop/tests/repository_data) specifies the location of the test repositories and other properties. For specific targets and metadata provided by the tests repositories, please inspect their [respective metadata](https://github.com/theupdateframework/python-tuf/tree/develop/tests/repository_data/repository). python-tuf-5.1.0/tests/repository_data/client/000077500000000000000000000000001470074210500214625ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/map.json000066400000000000000000000007011470074210500231300ustar00rootroot00000000000000{ "mapping": [ { "paths": [ "*1.txt" ], "repositories": [ "test_repository1", "test_repository2" ], "terminating": false, "threshold": 1 }, { "paths": [ "*3.txt" ], "repositories": [ "test_repository2" ], "terminating": true, "threshold": 1 } ], "repositories": { "test_repository1": [ "http://localhost:30001" ], "test_repository2": [ "http://localhost:30002" ] } } python-tuf-5.1.0/tests/repository_data/client/test_repository1/000077500000000000000000000000001470074210500250215ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/000077500000000000000000000000001470074210500266015ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/000077500000000000000000000000001470074210500302635ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/1.root.json000066400000000000000000000065041470074210500323050ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/role1.json000066400000000000000000000023441470074210500322030ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "9408b46569e622a46f1d35d9fa3c10e17a9285631ced4f2c9c2bba2c2842413fcb796db4e81d6f988fc056c21c407fdc3c10441592cf1e837e088f2e2dfd5403" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file3.txt": { "hashes": { "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, "length": 28 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/role2.json000066400000000000000000000006711470074210500322050ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "6c32f8cc2c642803a7b3b022ede0cf727e82964c1aa934571ef366bd5050ed02cfe3fdfe5477c08d0cbcc2dd17bb786d37ab1ce2b27e01ad79faf087594e0300" } ], "signed": { "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/root.json000066400000000000000000000065041470074210500321460ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/snapshot.json000066400000000000000000000010031470074210500330070ustar00rootroot00000000000000{ "signatures": [ { "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", "sig": "085672c70dffe26610e58542ee552843633cfed973abdad94c56138dbf0cd991644f2d3f27e4dda3098e08ab676e7f52627b587947ae69db1012d59a6da18e0c" } ], "signed": { "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { "version": 1 }, "role2.json": { "version": 1 }, "targets.json": { "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/targets.json000066400000000000000000000031301470074210500326240ustar00rootroot00000000000000{ "signatures": [ { "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", "sig": "d65f8db0c1a8f0976552b9742bbb393f24a5fa5eaf145c37aee047236c79dd0b83cfbb8b49fa7803689dfe0031dcf22c4d006b593acac07d69093b9b81722c08" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ "file3.txt" ], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file1.txt": { "custom": { "file_permissions": "0644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" }, "length": 31 }, "file2.txt": { "hashes": { "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" }, "length": 39 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/current/timestamp.json000066400000000000000000000010551470074210500331620ustar00rootroot00000000000000{ "signatures": [ { "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", "sig": "de0e16920f87bf5500cc65736488ac17e09788cce808f6a4e85eb9e4e478a312b4c1a2d7723af56f7bfb1df533c67d8c93b6f49d39eabe7fae391a08e1f72f01" } ], "signed": { "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { "sha256": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" }, "length": 515, "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/000077500000000000000000000000001470074210500304555ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/1.root.json000066400000000000000000000065041470074210500324770ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/role1.json000066400000000000000000000023441470074210500323750ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "9408b46569e622a46f1d35d9fa3c10e17a9285631ced4f2c9c2bba2c2842413fcb796db4e81d6f988fc056c21c407fdc3c10441592cf1e837e088f2e2dfd5403" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file3.txt": { "hashes": { "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, "length": 28 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/role2.json000066400000000000000000000006711470074210500323770ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "6c32f8cc2c642803a7b3b022ede0cf727e82964c1aa934571ef366bd5050ed02cfe3fdfe5477c08d0cbcc2dd17bb786d37ab1ce2b27e01ad79faf087594e0300" } ], "signed": { "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/root.json000066400000000000000000000065041470074210500323400ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/snapshot.json000066400000000000000000000010031470074210500332010ustar00rootroot00000000000000{ "signatures": [ { "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", "sig": "085672c70dffe26610e58542ee552843633cfed973abdad94c56138dbf0cd991644f2d3f27e4dda3098e08ab676e7f52627b587947ae69db1012d59a6da18e0c" } ], "signed": { "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { "version": 1 }, "role2.json": { "version": 1 }, "targets.json": { "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/targets.json000066400000000000000000000031301470074210500330160ustar00rootroot00000000000000{ "signatures": [ { "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", "sig": "d65f8db0c1a8f0976552b9742bbb393f24a5fa5eaf145c37aee047236c79dd0b83cfbb8b49fa7803689dfe0031dcf22c4d006b593acac07d69093b9b81722c08" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ "file3.txt" ], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file1.txt": { "custom": { "file_permissions": "0644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" }, "length": 31 }, "file2.txt": { "hashes": { "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" }, "length": 39 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository1/metadata/previous/timestamp.json000066400000000000000000000010551470074210500333540ustar00rootroot00000000000000{ "signatures": [ { "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", "sig": "de0e16920f87bf5500cc65736488ac17e09788cce808f6a4e85eb9e4e478a312b4c1a2d7723af56f7bfb1df533c67d8c93b6f49d39eabe7fae391a08e1f72f01" } ], "signed": { "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { "sha256": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" }, "length": 515, "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/000077500000000000000000000000001470074210500250225ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/000077500000000000000000000000001470074210500266025ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/000077500000000000000000000000001470074210500302645ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/1.root.json000066400000000000000000000065041470074210500323060ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/role1.json000066400000000000000000000023441470074210500322040ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "9408b46569e622a46f1d35d9fa3c10e17a9285631ced4f2c9c2bba2c2842413fcb796db4e81d6f988fc056c21c407fdc3c10441592cf1e837e088f2e2dfd5403" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file3.txt": { "hashes": { "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, "length": 28 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/role2.json000066400000000000000000000006711470074210500322060ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "6c32f8cc2c642803a7b3b022ede0cf727e82964c1aa934571ef366bd5050ed02cfe3fdfe5477c08d0cbcc2dd17bb786d37ab1ce2b27e01ad79faf087594e0300" } ], "signed": { "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/root.json000066400000000000000000000065041470074210500321470ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/snapshot.json000066400000000000000000000010031470074210500330100ustar00rootroot00000000000000{ "signatures": [ { "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", "sig": "085672c70dffe26610e58542ee552843633cfed973abdad94c56138dbf0cd991644f2d3f27e4dda3098e08ab676e7f52627b587947ae69db1012d59a6da18e0c" } ], "signed": { "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { "version": 1 }, "role2.json": { "version": 1 }, "targets.json": { "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/targets.json000066400000000000000000000031301470074210500326250ustar00rootroot00000000000000{ "signatures": [ { "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", "sig": "d65f8db0c1a8f0976552b9742bbb393f24a5fa5eaf145c37aee047236c79dd0b83cfbb8b49fa7803689dfe0031dcf22c4d006b593acac07d69093b9b81722c08" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ "file3.txt" ], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file1.txt": { "custom": { "file_permissions": "0644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" }, "length": 31 }, "file2.txt": { "hashes": { "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" }, "length": 39 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/current/timestamp.json000066400000000000000000000010551470074210500331630ustar00rootroot00000000000000{ "signatures": [ { "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", "sig": "de0e16920f87bf5500cc65736488ac17e09788cce808f6a4e85eb9e4e478a312b4c1a2d7723af56f7bfb1df533c67d8c93b6f49d39eabe7fae391a08e1f72f01" } ], "signed": { "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { "sha256": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" }, "length": 515, "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/000077500000000000000000000000001470074210500304565ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/1.root.json000066400000000000000000000065041470074210500325000ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/role1.json000066400000000000000000000023441470074210500323760ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "9408b46569e622a46f1d35d9fa3c10e17a9285631ced4f2c9c2bba2c2842413fcb796db4e81d6f988fc056c21c407fdc3c10441592cf1e837e088f2e2dfd5403" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file3.txt": { "hashes": { "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, "length": 28 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/role2.json000066400000000000000000000006711470074210500324000ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "6c32f8cc2c642803a7b3b022ede0cf727e82964c1aa934571ef366bd5050ed02cfe3fdfe5477c08d0cbcc2dd17bb786d37ab1ce2b27e01ad79faf087594e0300" } ], "signed": { "_type": "targets", "delegations": { "keys": {}, "roles": [] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/root.json000066400000000000000000000065041470074210500323410ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/snapshot.json000066400000000000000000000010031470074210500332020ustar00rootroot00000000000000{ "signatures": [ { "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", "sig": "085672c70dffe26610e58542ee552843633cfed973abdad94c56138dbf0cd991644f2d3f27e4dda3098e08ab676e7f52627b587947ae69db1012d59a6da18e0c" } ], "signed": { "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { "version": 1 }, "role2.json": { "version": 1 }, "targets.json": { "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/targets.json000066400000000000000000000031301470074210500330170ustar00rootroot00000000000000{ "signatures": [ { "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", "sig": "d65f8db0c1a8f0976552b9742bbb393f24a5fa5eaf145c37aee047236c79dd0b83cfbb8b49fa7803689dfe0031dcf22c4d006b593acac07d69093b9b81722c08" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ "file3.txt" ], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file1.txt": { "custom": { "file_permissions": "0644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" }, "length": 31 }, "file2.txt": { "hashes": { "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" }, "length": 39 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/client/test_repository2/metadata/previous/timestamp.json000066400000000000000000000010551470074210500333550ustar00rootroot00000000000000{ "signatures": [ { "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", "sig": "de0e16920f87bf5500cc65736488ac17e09788cce808f6a4e85eb9e4e478a312b4c1a2d7723af56f7bfb1df533c67d8c93b6f49d39eabe7fae391a08e1f72f01" } ], "signed": { "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { "sha256": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" }, "length": 515, "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/000077500000000000000000000000001470074210500233735ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/fishy_rolenames/1.a.json000066400000000000000000000006041470074210500246450ustar00rootroot00000000000000{ "signatures": [ { "keyid": "f9b50dd62b5540788b5c5cde0842124b64fa467261bc349dd77de49568eed0ef", "sig": "a36aa69e0c35d8b5b9578bc656ce5d8a76ea05a2c814f59cc710a11f5e3fe6c7bcbef2bfba4812e3b2936f99e89f10862f6320c901e213f1343e79525474920a" } ], "signed": { "_type": "targets", "expires": "2050-10-22T11:21:56Z", "spec_version": "1.0.19", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/000077500000000000000000000000001470074210500251535ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/1...json000066400000000000000000000006041470074210500263420ustar00rootroot00000000000000{ "signatures": [ { "keyid": "80a5bda93ec130c2fda8ce0c619d7b122b24cc2e0743afedf98a8e368d32019c", "sig": "8fff438c2347dd7c4fb94c43ec347bcd6b0e79521bd11d95121cb8cc25723efa38565a959a6123da0a2375a2093e53f13a5412df9e51397e06b313837d0d590c" } ], "signed": { "_type": "targets", "expires": "2050-10-22T11:21:56Z", "spec_version": "1.0.19", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/1.root.json000066400000000000000000000035701470074210500271750ustar00rootroot00000000000000{ "signatures": [ { "keyid": "72b70899257dc30b596af3a9fe141a924af821aff28ed58d1aea0db9f70a70f7", "sig": "53ae844137dd04abf9d3ed10380ba46fa2726f328963ffe006aa955804afa3b0d100bc59610c1584234a9598ab4b9af762b533174b8b8d8aaf2be8e413c1b304" } ], "signed": { "_type": "root", "consistent_snapshot": true, "expires": "2050-10-22T11:21:56Z", "keys": { "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "72b70899257dc30b596af3a9fe141a924af821aff28ed58d1aea0db9f70a70f7": { "keytype": "ed25519", "keyval": { "public": "3ba219e69666298bce5d1d653a166346aef807c02e32a846aaefcb5190fddeb4" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "72b70899257dc30b596af3a9fe141a924af821aff28ed58d1aea0db9f70a70f7" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.19", "version": 1 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/1.targets.json000066400000000000000000000034211470074210500276560ustar00rootroot00000000000000{ "signatures": [ { "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", "sig": "b390c5d9d5355b963e94dfa30ce04520c462fd869fad968d01f0a3b185db5895807b14435e725ff376adc793fd21ef8f01890ac722c94e9c05ab3797c4887101" } ], "signed": { "_type": "targets", "delegations": { "keys": { "426edf0d9fa383688c5b40b7b7d15a7cd11a991f12cc20da87f1b48dd6c036a1": { "keytype": "ed25519", "keyval": { "public": "d38eef769f6dee77b6d898dce548c0ea0f90add0072dc28a20769b6421552ec3" }, "scheme": "ed25519" }, "80a5bda93ec130c2fda8ce0c619d7b122b24cc2e0743afedf98a8e368d32019c": { "keytype": "ed25519", "keyval": { "public": "bb256c0b6d5226a5a9ae8377c0bf68e958fb668d063971f48638b9bae5251f3b" }, "scheme": "ed25519" }, "f9b50dd62b5540788b5c5cde0842124b64fa467261bc349dd77de49568eed0ef": { "keytype": "ed25519", "keyval": { "public": "da1b8586dc0cdd5fe0d8d428bde62dc63e06138f58cfc39770c424a4636f59f4" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "f9b50dd62b5540788b5c5cde0842124b64fa467261bc349dd77de49568eed0ef" ], "name": "../a", "paths": [ "*" ], "terminating": false, "threshold": 1 }, { "keyids": [ "80a5bda93ec130c2fda8ce0c619d7b122b24cc2e0743afedf98a8e368d32019c" ], "name": ".", "paths": [ "*" ], "terminating": false, "threshold": 1 }, { "keyids": [ "426edf0d9fa383688c5b40b7b7d15a7cd11a991f12cc20da87f1b48dd6c036a1" ], "name": "\u00f6", "paths": [ "*" ], "terminating": false, "threshold": 1 } ] }, "expires": "2050-10-22T11:21:56Z", "spec_version": "1.0.19", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/1.ö.json000066400000000000000000000006041470074210500270550ustar00rootroot00000000000000{ "signatures": [ { "keyid": "426edf0d9fa383688c5b40b7b7d15a7cd11a991f12cc20da87f1b48dd6c036a1", "sig": "faada7f8c9a238955d5b27dbd88032a6c9068742cb114a66f97c730235a8033dd1ff0647f4bbc2b49210c33655a3d7755e754e245799683b3f4e00a59f3da006" } ], "signed": { "_type": "targets", "expires": "2050-10-22T11:21:56Z", "spec_version": "1.0.19", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/2.snapshot.json000066400000000000000000000010521470074210500300430ustar00rootroot00000000000000{ "signatures": [ { "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", "sig": "5b00100e9cf1c083f8347371ab840cf60124780305124ed7a53fe31bf43473c90b1d2c802ed2f11f5057ba21e6b7a05118b1907f737d2e29c9692aa3345f9801" } ], "signed": { "_type": "snapshot", "expires": "2050-10-22T11:21:56Z", "meta": { "../a.json": { "version": 1 }, "..json": { "version": 1 }, "targets.json": { "version": 1 }, "\u00f6.json": { "version": 1 } }, "spec_version": "1.0.19", "version": 2 } }python-tuf-5.1.0/tests/repository_data/fishy_rolenames/metadata/timestamp.json000066400000000000000000000006621470074210500300550ustar00rootroot00000000000000{ "signatures": [ { "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", "sig": "f7003e848366c7e55f474df2c0c68471c44c68a87c0d3c1aa56f64778c91e9c8f22c3adc4dd9ec0535b6b4dc04783f7fa4ca992bed2445c7395a58acff152f0d" } ], "signed": { "_type": "timestamp", "expires": "2050-10-22T11:21:56Z", "meta": { "snapshot.json": { "version": 2 } }, "spec_version": "1.0.19", "version": 2 } }python-tuf-5.1.0/tests/repository_data/keystore/000077500000000000000000000000001470074210500220515ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/keystore/delegation_key000066400000000000000000000001671470074210500247630ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MC4CAQAwBQYDK2VwBCIEIJ12nHk+mGJcC5/tw3PzDZq9gDr6NW/b4ezXfx5dSgsM -----END PRIVATE KEY----- python-tuf-5.1.0/tests/repository_data/keystore/root_key000066400000000000000000000046701470074210500236360ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MIIG/wIBADANBgkqhkiG9w0BAQEFAASCBukwggblAgEAAoIBgQDQaM+hWuNL14Kr OhDxVF4+QLRwjqS2ISCo98cRIXPLHKMLj3QK7LX2e7E9wm5l83rgwLjyg6RH5baa rikznWLGZ0bnGO804FGQnnBQJzx8MKW0xRMGWq33Ll4ux//j8SgFT7MLhJbWI9T7 6YKyK3J9BDtTos6fcRgLKuQfnWFn928oLij1M9gxXE15wncIvWrTZDXjkmXKMFO4 zdd2qxqd1MehdJE+abOAH/V0v9zhCMycKjCspbTqYUur1EBgYholZ8z/QJus4mlU OQBOYRwx2kYlgN9b+xFC9F2Uc0+jbhlbu+RBQKp1HG+dNaQrlTzXLtG2Mu3XKyRt ZcEhs5XP3gZeyDLMA3IJP2pGMdcMPPaaQur70jaIq0SpoR49xZG7cfpEtqGkz9PE XPDrzhhHjvB5d0N5w83FbKJaLIwZK1EU/gKD22tzAAacSbXDxHaCCZamHOcJ3l1c aE82N8BrzbI0Vjy5uc5RUk5/SdcaDcR3D2JjfHKMlvb6euyTl/cCAwEAAQKCAYEA kQzxvb7RRd7n3h6a3iw3L6K/MzvEXdKutYtGbKDYw7vZqtkcDeJ0PuoWEQL67VBJ 7JWV44xF0ZiKwBuJJ5hZv/bvfTZ4flfFzR7I0rCMQ29kVW14cUq5m7kU6gBfFBmr Hg87cT/F76KewPnj8feVRnekhvBgWM5Qyqz+exaBTegD4HZIIWkFBk3UynLTgCy9 ZgVwEES7Pb7m9k+lr70k2EbY7oF/+W199iXII4rJw4HpTqN6nx7xzNMM5LnkWHDN uj+g9cCRCPS8BNXcbUmBNthVpaDU79NhHwoFFaYswAOeW1jKpssF9hf1cLpQyaLp jQqSEF5VMdygEOzuKijq5oJef5zyuSgqkBpvtuUFLkcz9RkJQk3lTpIO5QUy9sek iikGjucVay5f3N1iJOQi+D+qDAI7cIJTi9hIL/0Xrt0PmSbcAPTvTGP/05I/wyi6 VD4ClpQFgyZ7OiCiDuwOjv+/mWusN4+mxNyJqtr2b4YZNupRBmsmTvjXSWuqHiih AoHBAOnnLy9MbeN+WDkbteHs4XE09NR4D6yEbMpEXTzJygZj8DPmoEAn6ojfanKC NipHvJ0JX+uphzDJ3ZlAdYjZr1ny2VziQNBcfcmf3o1VVxW0KZ8WI4eRmsljFJka Av+YaLtI+nKvNQxPgD3mS5t/Y6p/kxnGOMIpjbUhKT4HP1u/DdyzIuC5Ur+KJxlJ pvauHXz0xx6bszNvMIiuddDG0AG8jwZuiZzYGBEsFmscWDgrG3Hk90ir1416m1+7 jpgIMQKBwQDkGRO7qXNSYtfsWL9UcFTnjb+OYwoKMTppMNb2u+aBZXkWjTJFdT0H aJp1lsfsFARsNWCq/4uRQute+CMbxDDlXP72jZB407FAWlQie7UWsnRy6/+WeHRM 5gSeRl9n8NSOmb/EH5bsV0sjkLt4VXD0FTeDnu2SwhqVNZ+qdWnbhKmwxxTd2dAA VoEEftohucYDdRfKrp+YbZn8Sa8Dfge9QiLgE28MrHhy/ZlRUlhiSg9Bl4CDFlpL sn0wFV56QKcCgcEAnBhETPRcgW1XwwTTJKrI6JvGp+RX0XGuiG2HK4Ie6JTZQEmw uB/rTNyMVU7AhwbIwKP493RzXAPbduKljWZ4tzZyCKKVTnfrGhsuknNZYoqRHDHS FC7/dVZB8MqDJb+4ZQQW32I9rLGBi82ct3EUOjxZFuJKDolcoHw44cREbB3cSmTh 6cbDij/QR/f3DLi1xSY1nB+cP778TLrgtSt4tS/44vnxrFIp/YvGikSoOxPJhQCg ZkcH2srv1bt9NciBAoHAU0JcE5oMwDvYOStD25yNQWBaVa0NEx9ZBOCQ9ssrnnvd sT+k4/mhZzzldJqvKxs7agwp1wEkfseAhs/ocNAyUOabIoAWBiSvhJ/0Kgoh1cEa BIDkcJZTTWaAtQ1W8efUjqDMgNhPDMHoaXkBFTGK422DMAYpDfLQJTrHpz7ofvpz vlVM5pYE+LqaqXtsP/dBsi1hm9gV5VvMY2y593pfdNPZSxWM6YFjDgZHmomGPYpu +zBD9pWILC1gyNZkABftAoHBANXJibUQ35zhQJEnvHXZ5WJqzJN6R5Zv4sQVd6+o NM5EotwNMvmjHcW+Q13M2UtaH2mFsUJuOnYCwqqNarJw5+LLEq+CgO0GaJ0wE4TU 1n2/11vAdKMUqvsj92YYq5Y+L/sue9PAYHUMvPsTG75u6fv3ZhJEfneNRqen3jco 3uxlzo/Yjv1fPO6RD9821dRwZDawaoLFidj/Gnqm9PKHux2papWnfP/dkWKLQwl2 Vu3D0GBOEF8YB2ae3BSVpM+T1Q== -----END PRIVATE KEY----- python-tuf-5.1.0/tests/repository_data/keystore/root_key2000066400000000000000000000014661470074210500237200ustar00rootroot0000000000000077c02ab5647ee765d5f6c5fc202a5b32@@@@100000@@@@7c73c1100fab52dc8695c1b955d31770ed6e53f1820d9020aeb6541c948573d9@@@@98280307ffa9c5f6ff1fea1a4b79d0ea@@@@f3342882b1cf842e3377ab4205c0ca8fab564cc55fa742f55b364a1ac597e93d8c56a9a6e6bbb6a812556077be44a1066ac6781a6ed34b86beaf3985f846f007dab31c46af562e921f03c1ea8d299f15324ab137aa426ee61d396a7e20191aa71a70b670775b2ad48f25de367fb48881c55e93f468c6e59402907e82985c27c94c715161c85c5c1904353ba33c3d129988029f03a2d7d00720118697baaf73a3c4e72f8e538b4323866fe525ddccfcfc6dd45598545f65cd7ab581f5172bc253416283a66621eb03dbabaf33923bb1963f9f8cbae6fd6a1c86736a8f80c8d1ba3cbc3f53b0123ba9b0bdd44f25b65033b19a50ee978d2687d6a2ee724515a20026d0213ced59cda9bfdf37c82c59e1356795fd603d85996f448a3c9357b32de2042997a1d27353ee3866c0ed5218d633e0b28991119d77d147354c7fa2de8a168d17efdfd5fa9a8e528bd47ede4ff697python-tuf-5.1.0/tests/repository_data/keystore/root_key3000066400000000000000000000026061470074210500237160ustar00rootroot00000000000000a3d266f446cb23c0248feed240a8a85a@@@@100000@@@@61ea41c73d4b1d8bd7566a9884a2fdb88c1d4e48550341e532768f98c8f4bd3c@@@@46b15764c50c934fcfc041a5fa207337@@@@d84b8c473d5f42d2bbceca28b0087c2c5908673b2a92eb8f1ca91dacc27c1cfac24c98d06191f6f54633dd428e9ca0987f183e8f34322a104dc38a0f4fefcc168f21e203e3abc5842f132df2dcb61d6b31dc19d0ecb50e898655f81e9b8a9730f2bff4c5ca4b6fc0b572a7e3672b6dc814ed127c964d960a57155c29eccf44824442d3c6761662ed2d8a1c48a3222d0f0cb1a58f543ccd852c247522595d987d95d1bf49dfdffaf33f18085460dac791d81347cc576a83c6ebca2625d26ddd294e74fa67f676a02d533b52fc9702237b2c898469a30753d98b091cd6aa713aa7b0c4c741684674084b27862e64adf4b1e88fa22cfcf6eeae8608dd818a4cba020058fa7271028ea9d9a7302c9e50e82972a82ac2080201c0fb9f2fb1cadfe97d62470414428227add1c40594f5135a8169d0d7d0889cb4a1949b015e65f5dc656204c58c463acc5b7872f4a078d0bc5a09a7795187e360e7b225892601aa9065086b24397f653d20e59a656ec86ef94e64d5baf16080f12a7f2461b92f99dfb5bf2e4dadec91cc72d8eede952449fd586c863734d84f31e036ecc96c55ab7baa9b049c20b8281a7c28f5ca42d9cfad6498f51ee907bfd9dc17e2a1bc9b69145ee82a86a90817394c01770581727889d3ba1791592c7ac2e74753485f1811cc4477078732873185240fc1572927d2fef210066bdf015471bd9d1683e8074b3fb6957246589dc62dea4843a17a7c734ae45ae20d31f0083a32d3310fae459fe3fbf7c763e5e4ead4acd9b0233e45237f4465576e85ff707fe316488f329d5bc73596b104cc28b926d6b1f5a3d26a0a6ec534a3cbc54cab97f5cea51f17b8d7f1cc6c9977275c34ee4942dd3e22a19ae1e4252199226cc4fd60python-tuf-5.1.0/tests/repository_data/keystore/snapshot_key000066400000000000000000000001671470074210500245070ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MC4CAQAwBQYDK2VwBCIEIIOSksDAfmq3o/kDq7QpZ3/Kg1bium+Svw5pvR2ZBhs6 -----END PRIVATE KEY----- python-tuf-5.1.0/tests/repository_data/keystore/targets_key000066400000000000000000000001671470074210500243210ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MC4CAQAwBQYDK2VwBCIEIMKnhTXOqdJvhJ2bJd5dn80MvCykZTplwJ0SUpKiHfI5 -----END PRIVATE KEY----- python-tuf-5.1.0/tests/repository_data/keystore/timestamp_key000066400000000000000000000001671470074210500246530ustar00rootroot00000000000000-----BEGIN PRIVATE KEY----- MC4CAQAwBQYDK2VwBCIEIB5Zzk1MbB0e30cDCjV7H3c712RsaRJgLn5GgUvbSRzH -----END PRIVATE KEY----- python-tuf-5.1.0/tests/repository_data/repository/000077500000000000000000000000001470074210500224235ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/repository/metadata/000077500000000000000000000000001470074210500242035ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/repository/metadata/1.root.json000066400000000000000000000065041470074210500262250ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/metadata/role1.json000066400000000000000000000023441470074210500261230ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "9408b46569e622a46f1d35d9fa3c10e17a9285631ced4f2c9c2bba2c2842413fcb796db4e81d6f988fc056c21c407fdc3c10441592cf1e837e088f2e2dfd5403" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role2", "paths": [], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file3.txt": { "hashes": { "sha256": "141f740f53781d1ca54b8a50af22cbf74e44c21a998fa2a8a05aaac2c002886b", "sha512": "ef5beafa16041bcdd2937140afebd485296cd54f7348ecd5a4d035c09759608de467a7ac0eb58753d0242df873c305e8bffad2454aa48f44480f15efae1cacd0" }, "length": 28 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/metadata/role2.json000066400000000000000000000006031470074210500261200ustar00rootroot00000000000000{ "signatures": [ { "keyid": "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a", "sig": "75b196a224fd200e46e738b1216b3316c5384f61083872f8d14b8b0a378b2344e64b1a6f1a89a711206a66a0b199d65ac0e30fe15ddbc4de89fa8ff645f99403" } ], "signed": { "_type": "targets", "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": {}, "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/metadata/root.json000066400000000000000000000065041470074210500260660ustar00rootroot00000000000000{ "signatures": [ { "keyid": "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb", "sig": "a337d6375fedd2eabfcd6c2ef6c8a9c3bb85dc5a857715f6a6bd41123e7670c4972d8548bcd7248154f3d864bf25f1823af59d74c459f41ea09a02db057ca1245612ebbdb97e782c501dc3e094f7fa8aa1402b03c6ed0635f565e2a26f9f543a89237e15a2faf0c267e2b34c3c38f2a43a28ddcdaf8308a12ead8c6dc47d1b762de313e9ddda8cc5bc25aea1b69d0e5b9199ca02f5dda48c3bff615fd12a7136d00634b9abc6e75c3256106c4d6f12e6c43f6195071355b2857bbe377ce028619b58837696b805040ce144b393d50a472531f430fadfb68d3081b6a8b5e49337e328c9a0a3f11e80b0bc8eb2dc6e78d1451dd857e6e6e6363c3fd14c590aa95e083c9bfc77724d78af86eb7a7ef635eeddaa353030c79f66b3ba9ea11fab456cfe896a826fdfb50a43cd444f762821aada9bcd7b022c0ee85b8768f960343d5a1d3d76374cc0ac9e12a500de0bf5d48569e5398cadadadab045931c398e3bcb6cec88af2437ba91959f956079cbed159fed3938016e6c3b5e446131f81cc5981" } ], "signed": { "_type": "root", "consistent_snapshot": false, "expires": "2030-01-01T00:00:00Z", "keys": { "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "rsa", "keyval": { "public": "-----BEGIN PUBLIC KEY-----\nMIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEA0GjPoVrjS9eCqzoQ8VRe\nPkC0cI6ktiEgqPfHESFzyxyjC490Cuy19nuxPcJuZfN64MC48oOkR+W2mq4pM51i\nxmdG5xjvNOBRkJ5wUCc8fDCltMUTBlqt9y5eLsf/4/EoBU+zC4SW1iPU++mCsity\nfQQ7U6LOn3EYCyrkH51hZ/dvKC4o9TPYMVxNecJ3CL1q02Q145JlyjBTuM3Xdqsa\nndTHoXSRPmmzgB/1dL/c4QjMnCowrKW06mFLq9RAYGIaJWfM/0CbrOJpVDkATmEc\nMdpGJYDfW/sRQvRdlHNPo24ZW7vkQUCqdRxvnTWkK5U81y7RtjLt1yskbWXBIbOV\nz94GXsgyzANyCT9qRjHXDDz2mkLq+9I2iKtEqaEePcWRu3H6RLahpM/TxFzw684Y\nR47weXdDecPNxWyiWiyMGStRFP4Cg9trcwAGnEm1w8R2ggmWphznCd5dXGhPNjfA\na82yNFY8ubnOUVJOf0nXGg3Edw9iY3xyjJb2+nrsk5f3AgMBAAE=\n-----END PUBLIC KEY-----" }, "scheme": "rsassa-pss-sha256" }, "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519" }, "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "89f28bd4ede5ec3786ab923fd154f39588d20881903e69c7b08fb504c6750815" }, "scheme": "ed25519" }, "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "82ccf6ac47298ff43bfa0cd639868894e305a99c723ff0515ae2e9856eb5bbf4" }, "scheme": "ed25519" } }, "roles": { "root": { "keyids": [ "4e777de0d275f9d28588dd9a1606cc748e548f9e22b6795b7cb3f63f98035fcb" ], "threshold": 1 }, "snapshot": { "keyids": [ "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d" ], "threshold": 1 }, "targets": { "keyids": [ "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093" ], "threshold": 1 }, "timestamp": { "keyids": [ "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758" ], "threshold": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/metadata/snapshot.json000066400000000000000000000010031470074210500267270ustar00rootroot00000000000000{ "signatures": [ { "keyid": "59a4df8af818e9ed7abe0764c0b47b4240952aa0d179b5b78346c470ac30278d", "sig": "085672c70dffe26610e58542ee552843633cfed973abdad94c56138dbf0cd991644f2d3f27e4dda3098e08ab676e7f52627b587947ae69db1012d59a6da18e0c" } ], "signed": { "_type": "snapshot", "expires": "2030-01-01T00:00:00Z", "meta": { "role1.json": { "version": 1 }, "role2.json": { "version": 1 }, "targets.json": { "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/metadata/targets.json000066400000000000000000000031301470074210500265440ustar00rootroot00000000000000{ "signatures": [ { "keyid": "65171251a9aff5a8b3143a813481cb07f6e0de4eb197c767837fe4491b739093", "sig": "d65f8db0c1a8f0976552b9742bbb393f24a5fa5eaf145c37aee047236c79dd0b83cfbb8b49fa7803689dfe0031dcf22c4d006b593acac07d69093b9b81722c08" } ], "signed": { "_type": "targets", "delegations": { "keys": { "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a": { "keyid_hash_algorithms": [ "sha256", "sha512" ], "keytype": "ed25519", "keyval": { "public": "fcf224e55fa226056adf113ef1eb3d55e308b75b321c8c8316999d8c4fd9e0d9" }, "scheme": "ed25519" } }, "roles": [ { "keyids": [ "c8022fa1e9b9cb239a6b362bbdffa9649e61ad2cb699d2e4bc4fdf7930a0e64a" ], "name": "role1", "paths": [ "file3.txt" ], "terminating": false, "threshold": 1 } ] }, "expires": "2030-01-01T00:00:00Z", "spec_version": "1.0.0", "targets": { "file1.txt": { "custom": { "file_permissions": "0644" }, "hashes": { "sha256": "65b8c67f51c993d898250f40aa57a317d854900b3a04895464313e48785440da", "sha512": "467430a68afae8e9f9c0771ea5d78bf0b3a0d79a2d3d3b40c69fde4dd42c461448aef76fcef4f5284931a1ffd0ac096d138ba3a0d6ca83fa8d7285a47a296f77" }, "length": 31 }, "file2.txt": { "hashes": { "sha256": "452ce8308500d83ef44248d8e6062359211992fd837ea9e370e561efb1a4ca99", "sha512": "052b49a21e03606b28942db69aa597530fe52d47ee3d748ba65afcd14b857738e36bc1714c4f4adde46c3e683548552fe5c96722e0e0da3acd9050c2524902d8" }, "length": 39 } }, "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/metadata/timestamp.json000066400000000000000000000010551470074210500271020ustar00rootroot00000000000000{ "signatures": [ { "keyid": "8a1c4a3ac2d515dec982ba9910c5fd79b91ae57f625b9cff25d06bf0a61c1758", "sig": "de0e16920f87bf5500cc65736488ac17e09788cce808f6a4e85eb9e4e478a312b4c1a2d7723af56f7bfb1df533c67d8c93b6f49d39eabe7fae391a08e1f72f01" } ], "signed": { "_type": "timestamp", "expires": "2030-01-01T00:00:00Z", "meta": { "snapshot.json": { "hashes": { "sha256": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" }, "length": 515, "version": 1 } }, "spec_version": "1.0.0", "version": 1 } }python-tuf-5.1.0/tests/repository_data/repository/targets/000077500000000000000000000000001470074210500240745ustar00rootroot00000000000000python-tuf-5.1.0/tests/repository_data/repository/targets/file1.txt000066400000000000000000000000371470074210500256350ustar00rootroot00000000000000This is an example target file.python-tuf-5.1.0/tests/repository_data/repository/targets/file2.txt000066400000000000000000000000471470074210500256370ustar00rootroot00000000000000This is an another example target file.python-tuf-5.1.0/tests/repository_data/repository/targets/file3.txt000066400000000000000000000000341470074210500256340ustar00rootroot00000000000000This is role1's target file.python-tuf-5.1.0/tests/repository_simulator.py000066400000000000000000000356151470074210500216760ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test utility to simulate a repository RepositorySimulator provides methods to modify repository metadata so that it's easy to "publish" new repository versions with modified metadata, while serving the versions to client test code. RepositorySimulator implements FetcherInterface so Updaters in tests can use it as a way to "download" new metadata from remote: in practice no downloading, network connections or even file access happens as RepositorySimulator serves everything from memory. Metadata and targets "hosted" by the simulator are made available in URL paths "/metadata/..." and "/targets/..." respectively. Example:: # constructor creates repository with top-level metadata sim = RepositorySimulator() # metadata can be modified directly: it is immediately available to clients sim.snapshot.version += 1 # As an exception, new root versions require explicit publishing sim.root.version += 1 sim.publish_root() # there are helper functions sim.add_target("targets", b"content", "targetpath") sim.targets.version += 1 sim.update_snapshot() # Use the simulated repository from an Updater: updater = Updater( dir, "https://example.com/metadata/", "https://example.com/targets/", sim ) updater.refresh() """ import datetime import logging import os import tempfile from dataclasses import dataclass, field from typing import Dict, Iterator, List, Optional, Tuple from urllib import parse import securesystemslib.hash as sslib_hash from securesystemslib.signer import CryptoSigner, Signer from tuf.api.exceptions import DownloadHTTPError from tuf.api.metadata import ( SPECIFICATION_VERSION, TOP_LEVEL_ROLE_NAMES, DelegatedRole, Delegations, Metadata, MetaFile, Root, Snapshot, SuccinctRoles, TargetFile, Targets, Timestamp, ) from tuf.api.serialization.json import JSONSerializer from tuf.ngclient.fetcher import FetcherInterface logger = logging.getLogger(__name__) SPEC_VER = ".".join(SPECIFICATION_VERSION) @dataclass class FetchTracker: """Fetcher counter for metadata and targets.""" metadata: List[Tuple[str, Optional[int]]] = field(default_factory=list) targets: List[Tuple[str, Optional[str]]] = field(default_factory=list) @dataclass class RepositoryTarget: """Contains actual target data and the related target metadata.""" data: bytes target_file: TargetFile class RepositorySimulator(FetcherInterface): """Simulates a repository that can be used for testing.""" def __init__(self) -> None: self.md_delegates: Dict[str, Metadata[Targets]] = {} # other metadata is signed on-demand (when fetched) but roots must be # explicitly published with publish_root() which maintains this list self.signed_roots: List[bytes] = [] # signers are used on-demand at fetch time to sign metadata # keys are roles, values are dicts of {keyid: signer} self.signers: Dict[str, Dict[str, Signer]] = {} # target downloads are served from this dict self.target_files: Dict[str, RepositoryTarget] = {} # Whether to compute hashes and length for meta in snapshot/timestamp self.compute_metafile_hashes_length = False # Enable hash-prefixed target file names self.prefix_targets_with_hash = True self.dump_dir: Optional[str] = None self.dump_version = 0 self.fetch_tracker = FetchTracker() now = datetime.datetime.now(datetime.timezone.utc) self.safe_expiry = now.replace(microsecond=0) + datetime.timedelta( days=30 ) self._initialize() @property def root(self) -> Root: return self.md_root.signed @property def timestamp(self) -> Timestamp: return self.md_timestamp.signed @property def snapshot(self) -> Snapshot: return self.md_snapshot.signed @property def targets(self) -> Targets: return self.md_targets.signed def all_targets(self) -> Iterator[Tuple[str, Targets]]: """Yield role name and signed portion of targets one by one.""" yield Targets.type, self.md_targets.signed for role, md in self.md_delegates.items(): yield role, md.signed def add_signer(self, role: str, signer: Signer) -> None: if role not in self.signers: self.signers[role] = {} self.signers[role][signer.public_key.keyid] = signer def rotate_keys(self, role: str) -> None: """remove all keys for role, then add threshold of new keys""" self.root.roles[role].keyids.clear() self.signers[role].clear() for _ in range(self.root.roles[role].threshold): signer = CryptoSigner.generate_ed25519() self.root.add_key(signer.public_key, role) self.add_signer(role, signer) def _initialize(self) -> None: """Setup a minimal valid repository.""" self.md_targets = Metadata(Targets(expires=self.safe_expiry)) self.md_snapshot = Metadata(Snapshot(expires=self.safe_expiry)) self.md_timestamp = Metadata(Timestamp(expires=self.safe_expiry)) self.md_root = Metadata(Root(expires=self.safe_expiry)) for role in TOP_LEVEL_ROLE_NAMES: signer = CryptoSigner.generate_ed25519() self.md_root.signed.add_key(signer.public_key, role) self.add_signer(role, signer) self.publish_root() def publish_root(self) -> None: """Sign and store a new serialized version of root.""" self.md_root.signatures.clear() for signer in self.signers[Root.type].values(): self.md_root.sign(signer, append=True) self.signed_roots.append(self.md_root.to_bytes(JSONSerializer())) logger.debug("Published root v%d", self.root.version) def _fetch(self, url: str) -> Iterator[bytes]: """Fetches data from the given url and returns an Iterator (or yields bytes). """ path = parse.urlparse(url).path if path.startswith("/metadata/") and path.endswith(".json"): # figure out rolename and version ver_and_name = path[len("/metadata/") :][: -len(".json")] version_str, _, role = ver_and_name.partition(".") # root is always version-prefixed while timestamp is always NOT if role == Root.type or ( self.root.consistent_snapshot and ver_and_name != Timestamp.type ): version: Optional[int] = int(version_str) else: # the file is not version-prefixed role = ver_and_name version = None yield self.fetch_metadata(role, version) elif path.startswith("/targets/"): # figure out target path and hash prefix target_path = path[len("/targets/") :] dir_parts, sep, prefixed_filename = target_path.rpartition("/") # extract the hash prefix, if any prefix: Optional[str] = None filename = prefixed_filename if self.root.consistent_snapshot and self.prefix_targets_with_hash: prefix, _, filename = prefixed_filename.partition(".") target_path = f"{dir_parts}{sep}{filename}" yield self.fetch_target(target_path, prefix) else: raise DownloadHTTPError(f"Unknown path '{path}'", 404) def fetch_target( self, target_path: str, target_hash: Optional[str] ) -> bytes: """Return data for 'target_path', checking 'target_hash' if it is given. If hash is None, then consistent_snapshot is not used. """ self.fetch_tracker.targets.append((target_path, target_hash)) repo_target = self.target_files.get(target_path) if repo_target is None: raise DownloadHTTPError(f"No target {target_path}", 404) if ( target_hash and target_hash not in repo_target.target_file.hashes.values() ): raise DownloadHTTPError(f"hash mismatch for {target_path}", 404) logger.debug("fetched target %s", target_path) return repo_target.data def fetch_metadata(self, role: str, version: Optional[int] = None) -> bytes: """Return signed metadata for 'role', using 'version' if it is given. If version is None, non-versioned metadata is being requested. """ self.fetch_tracker.metadata.append((role, version)) # decode role for the metadata role = parse.unquote(role, encoding="utf-8") if role == Root.type: # return a version previously serialized in publish_root() if version is None or version > len(self.signed_roots): raise DownloadHTTPError(f"Unknown root version {version}", 404) logger.debug("fetched root version %d", version) return self.signed_roots[version - 1] # sign and serialize the requested metadata md: Optional[Metadata] if role == Timestamp.type: md = self.md_timestamp elif role == Snapshot.type: md = self.md_snapshot elif role == Targets.type: md = self.md_targets else: md = self.md_delegates.get(role) if md is None: raise DownloadHTTPError(f"Unknown role {role}", 404) md.signatures.clear() for signer in self.signers[role].values(): md.sign(signer, append=True) logger.debug( "fetched %s v%d with %d sigs", role, md.signed.version, len(self.signers[role]), ) return md.to_bytes(JSONSerializer()) def _compute_hashes_and_length( self, role: str ) -> Tuple[Dict[str, str], int]: data = self.fetch_metadata(role) digest_object = sslib_hash.digest(sslib_hash.DEFAULT_HASH_ALGORITHM) digest_object.update(data) hashes = {sslib_hash.DEFAULT_HASH_ALGORITHM: digest_object.hexdigest()} return hashes, len(data) def update_timestamp(self) -> None: """Update timestamp and assign snapshot version to snapshot_meta version. """ hashes = None length = None if self.compute_metafile_hashes_length: hashes, length = self._compute_hashes_and_length(Snapshot.type) self.timestamp.snapshot_meta = MetaFile( self.snapshot.version, length, hashes ) self.timestamp.version += 1 def update_snapshot(self) -> None: """Update snapshot, assign targets versions and update timestamp.""" for role, delegate in self.all_targets(): hashes = None length = None if self.compute_metafile_hashes_length: hashes, length = self._compute_hashes_and_length(role) self.snapshot.meta[f"{role}.json"] = MetaFile( delegate.version, length, hashes ) self.snapshot.version += 1 self.update_timestamp() def _get_delegator(self, delegator_name: str) -> Targets: """Given a delegator name return, its corresponding Targets object.""" if delegator_name == Targets.type: return self.targets return self.md_delegates[delegator_name].signed def add_target(self, role: str, data: bytes, path: str) -> None: """Create a target from data and add it to the target_files.""" targets = self._get_delegator(role) target = TargetFile.from_data(path, data, ["sha256"]) targets.targets[path] = target self.target_files[path] = RepositoryTarget(data, target) def add_delegation( self, delegator_name: str, role: DelegatedRole, targets: Targets ) -> None: """Add delegated target role to the repository.""" delegator = self._get_delegator(delegator_name) if ( delegator.delegations is not None and delegator.delegations.succinct_roles is not None ): raise ValueError("Can't add a role when succinct_roles is used") # Create delegation if delegator.delegations is None: delegator.delegations = Delegations({}, roles={}) assert delegator.delegations.roles is not None # put delegation last by default delegator.delegations.roles[role.name] = role # By default add one new key for the role signer = CryptoSigner.generate_ed25519() delegator.add_key(signer.public_key, role.name) self.add_signer(role.name, signer) # Add metadata for the role if role.name not in self.md_delegates: self.md_delegates[role.name] = Metadata(targets, {}) def add_succinct_roles( self, delegator_name: str, bit_length: int, name_prefix: str ) -> None: """Add succinct roles info to a delegator with name "delegator_name". Note that for each delegated role represented by succinct roles an empty Targets instance is created. """ delegator = self._get_delegator(delegator_name) if ( delegator.delegations is not None and delegator.delegations.roles is not None ): raise ValueError( "Can't add a succinct_roles when delegated roles are used" ) signer = CryptoSigner.generate_ed25519() succinct_roles = SuccinctRoles([], 1, bit_length, name_prefix) delegator.delegations = Delegations({}, None, succinct_roles) # Add targets metadata for all bins. for delegated_name in succinct_roles.get_roles(): self.md_delegates[delegated_name] = Metadata( Targets(expires=self.safe_expiry) ) self.add_signer(delegated_name, signer) delegator.add_key(signer.public_key) def write(self) -> None: """Dump current repository metadata to self.dump_dir This is a debugging tool: dumping repository state before running Updater refresh may be useful while debugging a test. """ if self.dump_dir is None: self.dump_dir = tempfile.mkdtemp() print(f"Repository Simulator dumps in {self.dump_dir}") self.dump_version += 1 dest_dir = os.path.join(self.dump_dir, str(self.dump_version)) os.makedirs(dest_dir) for ver in range(1, len(self.signed_roots) + 1): with open(os.path.join(dest_dir, f"{ver}.root.json"), "wb") as f: f.write(self.fetch_metadata(Root.type, ver)) for role in [Timestamp.type, Snapshot.type, Targets.type]: with open(os.path.join(dest_dir, f"{role}.json"), "wb") as f: f.write(self.fetch_metadata(role)) for role in self.md_delegates: quoted_role = parse.quote(role, "") with open(os.path.join(dest_dir, f"{quoted_role}.json"), "wb") as f: f.write(self.fetch_metadata(role)) python-tuf-5.1.0/tests/simple_server.py000077500000000000000000000012141470074210500202260ustar00rootroot00000000000000#!/usr/bin/env python # Copyright 2012 - 2017, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Simple HTTP server for python-tuf tests""" import socketserver from http.server import SimpleHTTPRequestHandler # Allow re-use so you can re-run tests as often as you want even if the # tests re-use ports. Otherwise TCP TIME-WAIT prevents reuse for ~1 minute socketserver.TCPServer.allow_reuse_address = True httpd = socketserver.TCPServer(("localhost", 0), SimpleHTTPRequestHandler) port_message = "bind succeeded, server port is: " + str(httpd.server_address[1]) print(port_message) httpd.serve_forever() python-tuf-5.1.0/tests/test_api.py000066400000000000000000001413651470074210500171700ustar00rootroot00000000000000# Copyright 2020, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Unit tests for api/metadata.py""" import json import logging import os import shutil import sys import tempfile import unittest from copy import copy, deepcopy from datetime import datetime, timedelta, timezone from pathlib import Path from typing import ClassVar, Dict, Optional from securesystemslib import exceptions as sslib_exceptions from securesystemslib import hash as sslib_hash from securesystemslib.signer import ( CryptoSigner, Key, SecretsHandler, Signer, ) from tests import utils from tuf.api import exceptions from tuf.api.dsse import SimpleEnvelope from tuf.api.metadata import ( TOP_LEVEL_ROLE_NAMES, DelegatedRole, Delegations, Metadata, MetaFile, Root, RootVerificationResult, Signature, Snapshot, SuccinctRoles, TargetFile, Targets, Timestamp, VerificationResult, ) from tuf.api.serialization import DeserializationError, SerializationError from tuf.api.serialization.json import JSONSerializer logger = logging.getLogger(__name__) class TestMetadata(unittest.TestCase): """Tests for public API of all classes in 'tuf/api/metadata.py'.""" temporary_directory: ClassVar[str] repo_dir: ClassVar[str] keystore_dir: ClassVar[str] signers: ClassVar[Dict[str, Signer]] @classmethod def setUpClass(cls) -> None: # Create a temporary directory to store the repository, metadata, and # target files. 'temporary_directory' must be deleted in # TearDownClass() so that temporary files are always removed, even when # exceptions occur. cls.temporary_directory = tempfile.mkdtemp(dir=os.getcwd()) test_repo_data = os.path.join( os.path.dirname(os.path.realpath(__file__)), "repository_data" ) cls.repo_dir = os.path.join(cls.temporary_directory, "repository") shutil.copytree( os.path.join(test_repo_data, "repository"), cls.repo_dir ) cls.keystore_dir = os.path.join(cls.temporary_directory, "keystore") shutil.copytree( os.path.join(test_repo_data, "keystore"), cls.keystore_dir ) path = os.path.join(cls.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(path).signed # Load signers cls.signers = {} for role in [Snapshot.type, Targets.type, Timestamp.type]: uri = f"file2:{os.path.join(cls.keystore_dir, role + '_key')}" role_obj = root.get_delegated_role(role) key = root.get_key(role_obj.keyids[0]) cls.signers[role] = CryptoSigner.from_priv_key_uri(uri, key) @classmethod def tearDownClass(cls) -> None: # Remove the temporary repository directory, which should contain all # the metadata, targets, and key files generated for the test cases. shutil.rmtree(cls.temporary_directory) def test_generic_read(self) -> None: for metadata, inner_metadata_cls in [ (Root.type, Root), (Snapshot.type, Snapshot), (Timestamp.type, Timestamp), (Targets.type, Targets), ]: # Load JSON-formatted metdata of each supported type from file # and from out-of-band read JSON string path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) with open(path, "rb") as f: md_obj2 = Metadata.from_bytes(f.read()) # Assert that both methods instantiate the right inner class for # each metadata type and ... self.assertTrue(isinstance(md_obj.signed, inner_metadata_cls)) self.assertTrue(isinstance(md_obj2.signed, inner_metadata_cls)) # ... and return the same object (compared by dict representation) self.assertDictEqual(md_obj.to_dict(), md_obj2.to_dict()) # Assert that it chokes correctly on an unknown metadata type bad_metadata_path = "bad-metadata.json" bad_metadata = {"signed": {"_type": "bad-metadata"}} bad_string = json.dumps(bad_metadata).encode("utf-8") with open(bad_metadata_path, "wb") as f: f.write(bad_string) with self.assertRaises(DeserializationError): Metadata.from_file(bad_metadata_path) with self.assertRaises(DeserializationError): Metadata.from_bytes(bad_string) os.remove(bad_metadata_path) def test_md_read_write_file_exceptions(self) -> None: # Test writing to a file with bad filename with self.assertRaises(exceptions.StorageError): Metadata.from_file("bad-metadata.json") # Test serializing to a file with bad filename with self.assertRaises(exceptions.StorageError): md = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "root.json") ) md.to_file("") def test_compact_json(self) -> None: path = os.path.join(self.repo_dir, "metadata", "targets.json") md_obj = Metadata.from_file(path) self.assertTrue( len(JSONSerializer(compact=True).serialize(md_obj)) < len(JSONSerializer().serialize(md_obj)) ) def test_read_write_read_compare(self) -> None: for metadata in TOP_LEVEL_ROLE_NAMES: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") md_obj = Metadata.from_file(path) path_2 = path + ".tmp" md_obj.to_file(path_2) md_obj_2 = Metadata.from_file(path_2) self.assertDictEqual(md_obj.to_dict(), md_obj_2.to_dict()) os.remove(path_2) def test_serialize_with_validate(self) -> None: # Assert that by changing one required attribute validation will fail. root = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "root.json") ) root.signed.version = 0 with self.assertRaises(SerializationError): root.to_bytes(JSONSerializer(validate=True)) def test_to_from_bytes(self) -> None: for metadata in TOP_LEVEL_ROLE_NAMES: path = os.path.join(self.repo_dir, "metadata", metadata + ".json") with open(path, "rb") as f: metadata_bytes = f.read() md_obj = Metadata.from_bytes(metadata_bytes) # Comparate that from_bytes/to_bytes doesn't change the content # for two cases for the serializer: noncompact and compact. # Case 1: test noncompact by overriding the default serializer. self.assertEqual(md_obj.to_bytes(JSONSerializer()), metadata_bytes) # Case 2: test compact by using the default serializer. obj_bytes = md_obj.to_bytes() metadata_obj_2 = Metadata.from_bytes(obj_bytes) self.assertEqual(metadata_obj_2.to_bytes(), obj_bytes) def test_sign_verify(self) -> None: path = os.path.join(self.repo_dir, "metadata") root = Metadata[Root].from_file(os.path.join(path, "root.json")).signed # Locate the public keys we need from root targets_keyid = next(iter(root.roles[Targets.type].keyids)) targets_key = root.keys[targets_keyid] snapshot_keyid = next(iter(root.roles[Snapshot.type].keyids)) snapshot_key = root.keys[snapshot_keyid] timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (targets) and assert ... md_obj = Metadata.from_file(os.path.join(path, "targets.json")) sig = md_obj.signatures[targets_keyid] data = md_obj.signed_bytes # ... it has a single existing signature, self.assertEqual(len(md_obj.signatures), 1) # ... which is valid for the correct key. targets_key.verify_signature(sig, data) with self.assertRaises(sslib_exceptions.VerificationError): snapshot_key.verify_signature(sig, data) # Append a new signature with the unrelated key and assert that ... snapshot_sig = md_obj.sign(self.signers[Snapshot.type], append=True) # ... there are now two signatures, and self.assertEqual(len(md_obj.signatures), 2) # ... both are valid for the corresponding keys. targets_key.verify_signature(sig, data) snapshot_key.verify_signature(snapshot_sig, data) # ... the returned (appended) signature is for snapshot key self.assertEqual(snapshot_sig.keyid, snapshot_keyid) # Create and assign (don't append) a new signature and assert that ... ts_sig = md_obj.sign(self.signers[Timestamp.type], append=False) # ... there now is only one signature, self.assertEqual(len(md_obj.signatures), 1) # ... valid for that key. timestamp_key.verify_signature(ts_sig, data) with self.assertRaises(sslib_exceptions.VerificationError): targets_key.verify_signature(ts_sig, data) def test_sign_failures(self) -> None: # Test throwing UnsignedMetadataError because of signing problems md = Metadata.from_file( os.path.join(self.repo_dir, "metadata", "snapshot.json") ) class FailingSigner(Signer): @classmethod def from_priv_key_uri( cls, priv_key_uri: str, public_key: Key, secrets_handler: Optional[SecretsHandler] = None, ) -> "Signer": pass @property def public_key(self) -> Key: raise RuntimeError("Not a real signer") def sign(self, _payload: bytes) -> Signature: raise RuntimeError("signing failed") failing_signer = FailingSigner() with self.assertRaises(exceptions.UnsignedMetadataError): md.sign(failing_signer) def test_key_verify_failures(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path).signed # Locate the timestamp public key we need from root timestamp_keyid = next(iter(root.roles[Timestamp.type].keyids)) timestamp_key = root.keys[timestamp_keyid] # Load sample metadata (timestamp) path = os.path.join(self.repo_dir, "metadata", "timestamp.json") md_obj = Metadata.from_file(path) sig = md_obj.signatures[timestamp_keyid] data = md_obj.signed_bytes # Test failure on unknown scheme (securesystemslib # UnsupportedAlgorithmError) scheme = timestamp_key.scheme timestamp_key.scheme = "foo" with self.assertRaises(sslib_exceptions.VerificationError): timestamp_key.verify_signature(sig, data) timestamp_key.scheme = scheme # Test failure on broken public key data (securesystemslib # CryptoError) public = timestamp_key.keyval["public"] timestamp_key.keyval["public"] = "ffff" with self.assertRaises(sslib_exceptions.VerificationError): timestamp_key.verify_signature(sig, data) timestamp_key.keyval["public"] = public # Test failure with invalid signature (securesystemslib # FormatError) incorrect_sig = copy(sig) incorrect_sig.signature = "foo" with self.assertRaises(sslib_exceptions.VerificationError): timestamp_key.verify_signature(incorrect_sig, data) # Test failure with valid but incorrect signature incorrect_sig.signature = "ff" * 64 with self.assertRaises(sslib_exceptions.UnverifiedSignatureError): timestamp_key.verify_signature(incorrect_sig, data) def test_metadata_signed_is_expired(self) -> None: # Use of Snapshot is arbitrary, we're just testing the base class # features with real data snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") md = Metadata.from_file(snapshot_path) expected_expiry = datetime(2030, 1, 1, 0, 0, tzinfo=timezone.utc) self.assertEqual(md.signed.expires, expected_expiry) # Test is_expired with reference_time provided is_expired = md.signed.is_expired(md.signed.expires) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires + timedelta(days=1)) self.assertTrue(is_expired) is_expired = md.signed.is_expired(md.signed.expires - timedelta(days=1)) self.assertFalse(is_expired) # Test is_expired without reference_time, # manipulating md.signed.expires expires = md.signed.expires md.signed.expires = datetime.now(timezone.utc) is_expired = md.signed.is_expired() self.assertTrue(is_expired) md.signed.expires = datetime.now(timezone.utc) + timedelta(days=1) is_expired = md.signed.is_expired() self.assertFalse(is_expired) md.signed.expires = expires def test_metadata_verify_delegate(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") snapshot = Metadata[Snapshot].from_file(snapshot_path) targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets = Metadata[Targets].from_file(targets_path) role1_path = os.path.join(self.repo_dir, "metadata", "role1.json") role1 = Metadata[Targets].from_file(role1_path) role2_path = os.path.join(self.repo_dir, "metadata", "role2.json") role2 = Metadata[Targets].from_file(role2_path) # test the expected delegation tree root.verify_delegate(Root.type, root) root.verify_delegate(Snapshot.type, snapshot) root.verify_delegate(Targets.type, targets) targets.verify_delegate("role1", role1) role1.verify_delegate("role2", role2) # only root and targets can verify delegates with self.assertRaises(TypeError): snapshot.verify_delegate(Snapshot.type, snapshot) # verify fails for roles that are not delegated by delegator with self.assertRaises(ValueError): root.verify_delegate("role1", role1) with self.assertRaises(ValueError): targets.verify_delegate(Targets.type, targets) # verify fails when delegator has no delegations with self.assertRaises(ValueError): role2.verify_delegate("role1", role1) def test_signed_verify_delegate(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root_md = Metadata[Root].from_file(root_path) root = root_md.signed snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") snapshot_md = Metadata[Snapshot].from_file(snapshot_path) snapshot = snapshot_md.signed targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets_md = Metadata[Targets].from_file(targets_path) targets = targets_md.signed role1_path = os.path.join(self.repo_dir, "metadata", "role1.json") role1_md = Metadata[Targets].from_file(role1_path) role1 = role1_md.signed role2_path = os.path.join(self.repo_dir, "metadata", "role2.json") role2_md = Metadata[Targets].from_file(role2_path) role2 = role2_md.signed # test the expected delegation tree root.verify_delegate( Root.type, root_md.signed_bytes, root_md.signatures ) root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) root.verify_delegate( Targets.type, targets_md.signed_bytes, targets_md.signatures ) targets.verify_delegate( "role1", role1_md.signed_bytes, role1_md.signatures ) role1.verify_delegate( "role2", role2_md.signed_bytes, role2_md.signatures ) # only root and targets can verify delegates with self.assertRaises(AttributeError): snapshot.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) # verify fails for roles that are not delegated by delegator with self.assertRaises(ValueError): root.verify_delegate( "role1", role1_md.signed_bytes, role1_md.signatures ) with self.assertRaises(ValueError): targets.verify_delegate( Targets.type, targets_md.signed_bytes, targets_md.signatures ) # verify fails when delegator has no delegations with self.assertRaises(ValueError): role2.verify_delegate( "role1", role1_md.signed_bytes, role1_md.signatures ) # verify fails when delegate content is modified expires = snapshot.expires snapshot.expires = expires + timedelta(days=1) with self.assertRaises(exceptions.UnsignedMetadataError): root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) snapshot.expires = expires # verify fails if sslib verify fails with VerificationError # (in this case signature is malformed) keyid = next(iter(root.roles[Snapshot.type].keyids)) good_sig = snapshot_md.signatures[keyid].signature snapshot_md.signatures[keyid].signature = "foo" with self.assertRaises(exceptions.UnsignedMetadataError): root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) snapshot_md.signatures[keyid].signature = good_sig # verify fails if roles keys do not sign the metadata with self.assertRaises(exceptions.UnsignedMetadataError): root.verify_delegate( Timestamp.type, snapshot_md.signed_bytes, snapshot_md.signatures ) # Add a key to snapshot role, make sure the new sig fails to verify ts_keyid = next(iter(root.roles[Timestamp.type].keyids)) root.add_key(root.keys[ts_keyid], Snapshot.type) snapshot_md.signatures[ts_keyid] = Signature(ts_keyid, "ff" * 64) # verify succeeds if threshold is reached even if some signatures # fail to verify root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) # verify fails if threshold of signatures is not reached root.roles[Snapshot.type].threshold = 2 with self.assertRaises(exceptions.UnsignedMetadataError): root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) # verify succeeds when we correct the new signature and reach the # threshold of 2 keys snapshot_md.sign(self.signers[Timestamp.type], append=True) root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures ) def test_verification_result(self) -> None: vr = VerificationResult(3, {"a": None}, {"b": None}) self.assertEqual(vr.missing, 2) self.assertFalse(vr.verified) self.assertFalse(vr) # Add a signature vr.signed["c"] = None self.assertEqual(vr.missing, 1) self.assertFalse(vr.verified) self.assertFalse(vr) # Add last missing signature vr.signed["d"] = None self.assertEqual(vr.missing, 0) self.assertTrue(vr.verified) self.assertTrue(vr) # Add one more signature vr.signed["e"] = None self.assertEqual(vr.missing, 0) self.assertTrue(vr.verified) self.assertTrue(vr) def test_root_verification_result(self) -> None: vr1 = VerificationResult(3, {"a": None}, {"b": None}) vr2 = VerificationResult(1, {"c": None}, {"b": None}) vr = RootVerificationResult(vr1, vr2) self.assertEqual(vr.signed, {"a": None, "c": None}) self.assertEqual(vr.unsigned, {"b": None}) self.assertFalse(vr.verified) self.assertFalse(vr) vr1.signed["c"] = None vr1.signed["f"] = None self.assertEqual(vr.signed, {"a": None, "c": None, "f": None}) self.assertEqual(vr.unsigned, {"b": None}) self.assertTrue(vr.verified) self.assertTrue(vr) def test_signed_get_verification_result(self) -> None: # Setup: Load test metadata and keys root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) key1_id = root.signed.roles[Root.type].keyids[0] key1 = root.signed.get_key(key1_id) key2_id = root.signed.roles[Timestamp.type].keyids[0] key2 = root.signed.get_key(key2_id) key3_id = "123456789abcdefg" key4_id = self.signers[Snapshot.type].public_key.keyid # Test: 1 authorized key, 1 valid signature result = root.signed.get_verification_result( Root.type, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1}) self.assertEqual(result.unsigned, {}) # Test: 2 authorized keys, 1 invalid signature # Adding a key, i.e. metadata change, invalidates existing signature root.signed.add_key(key2, Root.type) result = root.signed.get_verification_result( Root.type, root.signed_bytes, root.signatures ) self.assertFalse(result) self.assertEqual(result.signed, {}) self.assertEqual(result.unsigned, {key1_id: key1, key2_id: key2}) # Test: 3 authorized keys, 1 invalid signature, 1 key missing key data # Adding a keyid w/o key, fails verification but this key is not listed # in unsigned root.signed.roles[Root.type].keyids.append(key3_id) result = root.signed.get_verification_result( Root.type, root.signed_bytes, root.signatures ) self.assertFalse(result) self.assertEqual(result.signed, {}) self.assertEqual(result.unsigned, {key1_id: key1, key2_id: key2}) # Test: 3 authorized keys, 1 valid signature, 1 invalid signature, 1 # key missing key data root.sign(self.signers[Timestamp.type], append=True) result = root.signed.get_verification_result( Root.type, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key2_id: key2}) self.assertEqual(result.unsigned, {key1_id: key1}) # Test: 3 authorized keys, 1 valid signature, 1 invalid signature, 1 # key missing key data, 1 ignored unrelated signature root.sign(self.signers[Snapshot.type], append=True) self.assertEqual( set(root.signatures.keys()), {key1_id, key2_id, key4_id} ) self.assertTrue(result) self.assertEqual(result.signed, {key2_id: key2}) self.assertEqual(result.unsigned, {key1_id: key1}) # See test_signed_verify_delegate for more related tests ... def test_root_get_root_verification_result(self) -> None: # Setup: Load test metadata and keys root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) key1_id = root.signed.roles[Root.type].keyids[0] key1 = root.signed.get_key(key1_id) key2_id = root.signed.roles[Timestamp.type].keyids[0] key2 = root.signed.get_key(key2_id) # Test: Verify with no previous root version result = root.signed.get_root_verification_result( None, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1}) self.assertEqual(result.unsigned, {}) # Test: Verify with other root that is not version N-1 prev_root: Metadata[Root] = deepcopy(root) with self.assertRaises(ValueError): result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) # Test: Verify with previous root prev_root.signed.version -= 1 result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1}) self.assertEqual(result.unsigned, {}) # Test: Add a signer to previous root (threshold still 1) prev_root.signed.add_key(key2, Root.type) result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1}) self.assertEqual(result.unsigned, {key2_id: key2}) # Test: Increase threshold in previous root prev_root.signed.roles[Root.type].threshold += 1 result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertFalse(result) self.assertEqual(result.signed, {key1_id: key1}) self.assertEqual(result.unsigned, {key2_id: key2}) # Test: Sign root with both keys root.sign(self.signers[Timestamp.type], append=True) result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1, key2_id: key2}) self.assertEqual(result.unsigned, {}) # Test: Sign root with an unrelated key root.sign(self.signers[Snapshot.type], append=True) result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1, key2_id: key2}) self.assertEqual(result.unsigned, {}) # Test: Remove key1 from previous root prev_root.signed.revoke_key(key1_id, Root.type) result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertFalse(result) self.assertEqual(result.signed, {key1_id: key1, key2_id: key2}) self.assertEqual(result.unsigned, {}) # Test: Lower threshold in previous root prev_root.signed.roles[Root.type].threshold -= 1 result = root.signed.get_root_verification_result( prev_root.signed, root.signed_bytes, root.signatures ) self.assertTrue(result) self.assertEqual(result.signed, {key1_id: key1, key2_id: key2}) self.assertEqual(result.unsigned, {}) def test_root_add_key_and_revoke_key(self) -> None: root_path = os.path.join(self.repo_dir, "metadata", "root.json") root = Metadata[Root].from_file(root_path) # Create a new key signer = CryptoSigner.generate_ecdsa() key = signer.public_key # Assert that root does not contain the new key self.assertNotIn(key.keyid, root.signed.roles[Root.type].keyids) self.assertNotIn(key.keyid, root.signed.keys) # Assert that add_key with old argument order will raise an error with self.assertRaises(ValueError): root.signed.add_key(Root.type, key) # Add new root key root.signed.add_key(key, Root.type) # Assert that key is added self.assertIn(key.keyid, root.signed.roles[Root.type].keyids) self.assertIn(key.keyid, root.signed.keys) # Confirm that the newly added key does not break # the object serialization root.to_dict() # Try adding the same key again and assert its ignored. pre_add_keyid = root.signed.roles[Root.type].keyids.copy() root.signed.add_key(key, Root.type) self.assertEqual(pre_add_keyid, root.signed.roles[Root.type].keyids) # Add the same key to targets role as well root.signed.add_key(key, Targets.type) # Add the same key to a nonexistent role. with self.assertRaises(ValueError): root.signed.add_key(key, "nosuchrole") # Remove the key from root role (targets role still uses it) root.signed.revoke_key(key.keyid, Root.type) self.assertNotIn(key.keyid, root.signed.roles[Root.type].keyids) self.assertIn(key.keyid, root.signed.keys) # Remove the key from targets as well root.signed.revoke_key(key.keyid, Targets.type) self.assertNotIn(key.keyid, root.signed.roles[Targets.type].keyids) self.assertNotIn(key.keyid, root.signed.keys) with self.assertRaises(ValueError): root.signed.revoke_key("nosuchkey", Root.type) with self.assertRaises(ValueError): root.signed.revoke_key(key.keyid, "nosuchrole") def test_is_target_in_pathpattern(self) -> None: supported_use_cases = [ ("foo.tgz", "foo.tgz"), ("foo.tgz", "*"), ("foo.tgz", "*.tgz"), ("foo-version-a.tgz", "foo-version-?.tgz"), ("targets/foo.tgz", "targets/*.tgz"), ("foo/bar/zoo/k.tgz", "foo/bar/zoo/*"), ("foo/bar/zoo/k.tgz", "foo/*/zoo/*"), ("foo/bar/zoo/k.tgz", "*/*/*/*"), ("foo/bar", "f?o/bar"), ("foo/bar", "*o/bar"), ] for targetpath, pathpattern in supported_use_cases: self.assertTrue( DelegatedRole._is_target_in_pathpattern(targetpath, pathpattern) ) invalid_use_cases = [ ("targets/foo.tgz", "*.tgz"), ("/foo.tgz", "*.tgz"), ("targets/foo.tgz", "*"), ("foo-version-alpha.tgz", "foo-version-?.tgz"), ("foo//bar", "*/bar"), ("foo/bar", "f?/bar"), ] for targetpath, pathpattern in invalid_use_cases: self.assertFalse( DelegatedRole._is_target_in_pathpattern(targetpath, pathpattern) ) def test_targets_key_api(self) -> None: targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets: Targets = Metadata[Targets].from_file(targets_path).signed # Add a new delegated role "role2" in targets delegated_role = DelegatedRole.from_dict( { "keyids": [], "name": "role2", "paths": ["fn3", "fn4"], "terminating": False, "threshold": 1, } ) assert isinstance(targets.delegations, Delegations) assert isinstance(targets.delegations.roles, Dict) targets.delegations.roles["role2"] = delegated_role key_dict = { "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519", } key = Key.from_dict("id2", key_dict) # Assert that add_key with old argument order will raise an error with self.assertRaises(ValueError): targets.add_key("role1", key) # Assert that delegated role "role1" does not contain the new key self.assertNotIn(key.keyid, targets.delegations.roles["role1"].keyids) targets.add_key(key, "role1") # Assert that the new key is added to the delegated role "role1" self.assertIn(key.keyid, targets.delegations.roles["role1"].keyids) # Confirm that the newly added key does not break the obj serialization targets.to_dict() # Try adding the same key again and assert its ignored. past_keyid = targets.delegations.roles["role1"].keyids.copy() targets.add_key(key, "role1") self.assertEqual(past_keyid, targets.delegations.roles["role1"].keyids) # Try adding a key to a delegated role that doesn't exists with self.assertRaises(ValueError): targets.add_key(key, "nosuchrole") # Add the same key to "role2" as well targets.add_key(key, "role2") # Remove the key from "role1" role ("role2" still uses it) targets.revoke_key(key.keyid, "role1") # Assert that delegated role "role1" doesn't contain the key. self.assertNotIn(key.keyid, targets.delegations.roles["role1"].keyids) self.assertIn(key.keyid, targets.delegations.roles["role2"].keyids) # Remove the key from "role2" as well targets.revoke_key(key.keyid, "role2") self.assertNotIn(key.keyid, targets.delegations.roles["role2"].keyids) # Try remove key not used by "role1" with self.assertRaises(ValueError): targets.revoke_key(key.keyid, "role1") # Try removing a key from delegated role that doesn't exists with self.assertRaises(ValueError): targets.revoke_key(key.keyid, "nosuchrole") # Remove delegations as a whole targets.delegations = None # Test that calling add_key and revoke_key throws an error # and that delegations is still None after each of the api calls with self.assertRaises(ValueError): targets.add_key(key, "role1") self.assertTrue(targets.delegations is None) with self.assertRaises(ValueError): targets.revoke_key(key.keyid, "role1") self.assertTrue(targets.delegations is None) def test_targets_key_api_with_succinct_roles(self) -> None: targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets: Targets = Metadata[Targets].from_file(targets_path).signed key_dict = { "keytype": "ed25519", "keyval": { "public": "edcd0a32a07dce33f7c7873aaffbff36d20ea30787574ead335eefd337e4dacd" }, "scheme": "ed25519", } key = Key.from_dict("id2", key_dict) # Remove delegated roles. assert targets.delegations is not None assert targets.delegations.roles is not None targets.delegations.roles = None targets.delegations.keys = {} # Add succinct_roles information. targets.delegations.succinct_roles = SuccinctRoles([], 1, 8, "foo") self.assertEqual(len(targets.delegations.keys), 0) self.assertEqual(len(targets.delegations.succinct_roles.keyids), 0) # Add a key to succinct_roles and verify it's saved. targets.add_key(key) self.assertIn(key.keyid, targets.delegations.keys) self.assertIn(key.keyid, targets.delegations.succinct_roles.keyids) self.assertEqual(len(targets.delegations.keys), 1) # Try adding the same key again and verify that noting is added. targets.add_key(key) self.assertEqual(len(targets.delegations.keys), 1) # Remove the key and verify it's not stored anymore. targets.revoke_key(key.keyid) self.assertNotIn(key.keyid, targets.delegations.keys) self.assertNotIn(key.keyid, targets.delegations.succinct_roles.keyids) self.assertEqual(len(targets.delegations.keys), 0) # Try removing it again. with self.assertRaises(ValueError): targets.revoke_key(key.keyid) def test_length_and_hash_validation(self) -> None: # Test metadata files' hash and length verification. # Use timestamp to get a MetaFile object and snapshot # for untrusted metadata file to verify. timestamp_path = os.path.join( self.repo_dir, "metadata", "timestamp.json" ) timestamp = Metadata[Timestamp].from_file(timestamp_path) snapshot_metafile = timestamp.signed.snapshot_meta snapshot_path = os.path.join(self.repo_dir, "metadata", "snapshot.json") with open(snapshot_path, "rb") as file: # test with data as a file object snapshot_metafile.verify_length_and_hashes(file) file.seek(0) data = file.read() # test with data as bytes snapshot_metafile.verify_length_and_hashes(data) # test exceptions expected_length = snapshot_metafile.length snapshot_metafile.length = 2345 with self.assertRaises(exceptions.LengthOrHashMismatchError): snapshot_metafile.verify_length_and_hashes(data) snapshot_metafile.length = expected_length snapshot_metafile.hashes = {"sha256": "incorrecthash"} with self.assertRaises(exceptions.LengthOrHashMismatchError): snapshot_metafile.verify_length_and_hashes(data) snapshot_metafile.hashes = { "unsupported-alg": "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" } with self.assertRaises(exceptions.LengthOrHashMismatchError): snapshot_metafile.verify_length_and_hashes(data) # Test wrong algorithm format (sslib.FormatError) snapshot_metafile.hashes = { 256: "8f88e2ba48b412c3843e9bb26e1b6f8fc9e98aceb0fbaa97ba37b4c98717d7ab" # type: ignore[dict-item] } with self.assertRaises(exceptions.LengthOrHashMismatchError): snapshot_metafile.verify_length_and_hashes(data) # test optional length and hashes snapshot_metafile.length = None snapshot_metafile.hashes = None snapshot_metafile.verify_length_and_hashes(data) # Test target files' hash and length verification targets_path = os.path.join(self.repo_dir, "metadata", "targets.json") targets = Metadata[Targets].from_file(targets_path) file1_targetfile = targets.signed.targets["file1.txt"] filepath = os.path.join(self.repo_dir, Targets.type, "file1.txt") with open(filepath, "rb") as file1: file1_targetfile.verify_length_and_hashes(file1) # test exceptions expected_length = file1_targetfile.length file1_targetfile.length = 2345 with self.assertRaises(exceptions.LengthOrHashMismatchError): file1_targetfile.verify_length_and_hashes(file1) file1_targetfile.length = expected_length file1_targetfile.hashes = {"sha256": "incorrecthash"} with self.assertRaises(exceptions.LengthOrHashMismatchError): file1_targetfile.verify_length_and_hashes(file1) def test_targetfile_from_file(self) -> None: # Test with an existing file and valid hash algorithm file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") targetfile_from_file = TargetFile.from_file( file_path, file_path, ["sha256"] ) with open(file_path, "rb") as file: targetfile_from_file.verify_length_and_hashes(file) # Test with a non-existing file file_path = os.path.join(self.repo_dir, Targets.type, "file123.txt") with self.assertRaises(FileNotFoundError): TargetFile.from_file( file_path, file_path, [sslib_hash.DEFAULT_HASH_ALGORITHM] ) # Test with an unsupported algorithm file_path = os.path.join(self.repo_dir, Targets.type, "file1.txt") with self.assertRaises(ValueError): TargetFile.from_file(file_path, file_path, ["123"]) def test_targetfile_custom(self) -> None: # Test creating TargetFile and accessing custom. targetfile = TargetFile( 100, {"sha256": "abc"}, "file.txt", {"custom": "foo"} ) self.assertEqual(targetfile.custom, "foo") def test_targetfile_from_data(self) -> None: data = b"Inline test content" target_file_path = os.path.join( self.repo_dir, Targets.type, "file1.txt" ) # Test with a valid hash algorithm targetfile_from_data = TargetFile.from_data( target_file_path, data, ["sha256"] ) targetfile_from_data.verify_length_and_hashes(data) # Test with no algorithms specified targetfile_from_data = TargetFile.from_data(target_file_path, data) targetfile_from_data.verify_length_and_hashes(data) def test_metafile_from_data(self) -> None: data = b"Inline test content" # Test with a valid hash algorithm metafile = MetaFile.from_data(1, data, ["sha256"]) metafile.verify_length_and_hashes(data) # Test with an invalid hash algorithm with self.assertRaises(ValueError): metafile = MetaFile.from_data(1, data, ["invalid_algorithm"]) metafile.verify_length_and_hashes(data) self.assertEqual( metafile, MetaFile( 1, 19, { "sha256": "fcee2e6d56ab08eab279016f7db7e4e1d172ccea78e15f4cf8bd939991a418fa" }, ), ) def test_targetfile_get_prefixed_paths(self) -> None: target = TargetFile(100, {"sha256": "abc", "md5": "def"}, "a/b/f.ext") self.assertEqual( target.get_prefixed_paths(), ["a/b/abc.f.ext", "a/b/def.f.ext"] ) target = TargetFile(100, {"sha256": "abc", "md5": "def"}, "") self.assertEqual(target.get_prefixed_paths(), ["abc.", "def."]) target = TargetFile(100, {"sha256": "abc", "md5": "def"}, "a/b/") self.assertEqual(target.get_prefixed_paths(), ["a/b/abc.", "a/b/def."]) target = TargetFile(100, {"sha256": "abc", "md5": "def"}, "f.ext") self.assertEqual( target.get_prefixed_paths(), ["abc.f.ext", "def.f.ext"] ) target = TargetFile(100, {"sha256": "abc", "md5": "def"}, "a/b/.ext") self.assertEqual( target.get_prefixed_paths(), ["a/b/abc..ext", "a/b/def..ext"] ) target = TargetFile(100, {"sha256": "abc"}, "/root/file.ext") self.assertEqual(target.get_prefixed_paths(), ["/root/abc.file.ext"]) target = TargetFile(100, {"sha256": "abc"}, "/") self.assertEqual(target.get_prefixed_paths(), ["/abc."]) def test_is_delegated_role(self) -> None: # test path matches # see more extensive tests in test_is_target_in_pathpattern() for paths in [ ["a/path"], ["otherpath", "a/path"], ["*/?ath"], ]: role = DelegatedRole("", [], 1, False, paths, None) self.assertFalse(role.is_delegated_path("a/non-matching path")) self.assertTrue(role.is_delegated_path("a/path")) # test path hash prefix matches: sha256 sum of "a/path" is 927b0ecf9... for hash_prefixes in [ ["927b0ecf9"], ["other prefix", "927b0ecf9"], ["927b0"], ["92"], ]: role = DelegatedRole("", [], 1, False, None, hash_prefixes) self.assertFalse(role.is_delegated_path("a/non-matching path")) self.assertTrue(role.is_delegated_path("a/path")) def test_is_delegated_role_in_succinct_roles(self) -> None: succinct_roles = SuccinctRoles([], 1, 5, "bin") false_role_name_examples = [ "foo", "bin-", "bin-s", "bin-0t", "bin-20", "bin-100", ] for role_name in false_role_name_examples: msg = f"Error for {role_name}" self.assertFalse(succinct_roles.is_delegated_role(role_name), msg) # delegated role name suffixes are in hex format. true_name_examples = ["bin-00", "bin-0f", "bin-1f"] for role_name in true_name_examples: msg = f"Error for {role_name}" self.assertTrue(succinct_roles.is_delegated_role(role_name), msg) def test_get_roles_in_succinct_roles(self) -> None: succinct_roles = SuccinctRoles([], 1, 16, "bin") # bin names are in hex format and 4 hex digits are enough to represent # all bins between 0 and 2^16 - 1 meaning suffix_len must be 4 expected_suffix_length = 4 self.assertEqual(succinct_roles.suffix_len, expected_suffix_length) for bin_numer, role_name in enumerate(succinct_roles.get_roles()): # This adds zero-padding if the bin_numer is represented by a hex # number with a length less than expected_suffix_length. expected_bin_suffix = f"{bin_numer:0{expected_suffix_length}x}" self.assertEqual(role_name, f"bin-{expected_bin_suffix}") def test_delegations_get_delegated_role(self) -> None: delegations = Delegations({}, {}) targets = Targets(delegations=delegations) with self.assertRaises(ValueError): targets.get_delegated_role("abc") # test "normal" delegated role (path or path_hash_prefix) role = DelegatedRole("delegated", [], 1, False, []) delegations.roles = {"delegated": role} with self.assertRaises(ValueError): targets.get_delegated_role("not-delegated") self.assertEqual(targets.get_delegated_role("delegated"), role) delegations.roles = None # test succinct delegation bit_len = 3 role2 = SuccinctRoles([], 1, bit_len, "prefix") delegations.succinct_roles = role2 for name in ["prefix-", "prefix--1", f"prefix-{2**bit_len:0x}"]: with self.assertRaises(ValueError, msg=f"role name '{name}'"): targets.get_delegated_role(name) for i in range(2**bit_len): self.assertEqual( targets.get_delegated_role(f"prefix-{i:0x}"), role2 ) class TestSimpleEnvelope(unittest.TestCase): """Tests for public API in 'tuf/api/dsse.py'.""" @classmethod def setUpClass(cls) -> None: repo_data_dir = Path(utils.TESTS_DIR) / "repository_data" cls.metadata_dir = repo_data_dir / "repository" / "metadata" cls.keystore_dir = repo_data_dir / "keystore" cls.signers = {} root_path = os.path.join(cls.metadata_dir, "root.json") root: Root = Metadata.from_file(root_path).signed for role in [Snapshot, Targets, Timestamp]: uri = f"file2:{os.path.join(cls.keystore_dir, role.type + '_key')}" role_obj = root.get_delegated_role(role.type) key = root.get_key(role_obj.keyids[0]) cls.signers[role.type] = CryptoSigner.from_priv_key_uri(uri, key) def test_serialization(self) -> None: """Basic de/serialization test. 1. Load test metadata for each role 2. Wrap metadata payloads in envelope serializing the payload 3. Serialize envelope 4. De-serialize envelope 5. De-serialize payload """ for role in [Root, Timestamp, Snapshot, Targets]: metadata_path = self.metadata_dir / f"{role.type}.json" metadata = Metadata.from_file(str(metadata_path)) self.assertIsInstance(metadata.signed, role) envelope = SimpleEnvelope.from_signed(metadata.signed) envelope_bytes = envelope.to_bytes() envelope2 = SimpleEnvelope.from_bytes(envelope_bytes) payload = envelope2.get_signed() self.assertEqual(metadata.signed, payload) def test_fail_envelope_serialization(self) -> None: envelope = SimpleEnvelope(b"foo", "bar", ["baz"]) with self.assertRaises(SerializationError): envelope.to_bytes() def test_fail_envelope_deserialization(self) -> None: with self.assertRaises(DeserializationError): SimpleEnvelope.from_bytes(b"[") def test_fail_payload_serialization(self) -> None: with self.assertRaises(SerializationError): SimpleEnvelope.from_signed("foo") # type: ignore[type-var] def test_fail_payload_deserialization(self) -> None: payloads = [b"[", b'{"_type": "foo"}'] for payload in payloads: envelope = SimpleEnvelope(payload, "bar", []) with self.assertRaises(DeserializationError): envelope.get_signed() def test_verify_delegate(self) -> None: """Basic verification test. 1. Load test metadata for each role 2. Wrap non-root payloads in envelope serializing the payload 3. Sign with correct delegated key 4. Verify delegate with root """ root_path = self.metadata_dir / "root.json" root = Metadata[Root].from_file(str(root_path)).signed for role in [Timestamp, Snapshot, Targets]: metadata_path = self.metadata_dir / f"{role.type}.json" metadata = Metadata.from_file(str(metadata_path)) self.assertIsInstance(metadata.signed, role) signer = self.signers[role.type] self.assertIn(signer.public_key.keyid, root.roles[role.type].keyids) envelope = SimpleEnvelope.from_signed(metadata.signed) envelope.sign(signer) self.assertTrue(len(envelope.signatures) == 1) root.verify_delegate(role.type, envelope.pae(), envelope.signatures) # Run unit test. if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_examples.py000066400000000000000000000131431470074210500202250ustar00rootroot00000000000000# Copyright 2020, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Unit tests for 'examples' scripts.""" import glob import os import shutil import sys import tempfile import unittest from pathlib import Path from typing import ClassVar, List from tests import utils class TestRepoExamples(unittest.TestCase): """Unit test class for 'manual_repo' scripts. Provides a '_run_example_script' method to run (exec) a script located in the 'manual_repo' directory. """ repo_examples_dir: ClassVar[Path] @classmethod def setUpClass(cls) -> None: """Locate the example dir.""" base = Path(__file__).resolve().parents[1] cls.repo_examples_dir = base / "examples" / "manual_repo" def setUp(self) -> None: """Create and change into test dir. NOTE: Test scripts are expected to create dirs/files in new CWD.""" self.original_cwd = os.getcwd() self.base_test_dir = os.path.realpath(tempfile.mkdtemp()) os.chdir(self.base_test_dir) def tearDown(self) -> None: """Change back to original dir and remove test dir, which may contain dirs/files the test created at test-time CWD.""" os.chdir(self.original_cwd) shutil.rmtree(self.base_test_dir) def _run_script_and_assert_files( self, script_name: str, filenames_created: List[str] ) -> None: """Run script in exmple dir and assert that it created the files corresponding to the passed filenames inside a 'tmp*' test dir at CWD.""" script_path = str(self.repo_examples_dir / script_name) with open(script_path, "rb") as f: exec( compile(f.read(), script_path, "exec"), {"__file__": script_path}, ) test_dirs = glob.glob("tmp*") self.assertTrue( len(test_dirs) == 1, f"expected 1 'tmp*' test dir, got {test_dirs}" ) test_dir = test_dirs.pop() for name in filenames_created: metadata_path = Path(test_dir) / f"{name}" self.assertTrue( metadata_path.exists(), f"missing '{metadata_path}' file" ) def test_basic_repo(self) -> None: """Run 'basic_repo.py' and assert creation of metadata files.""" self._run_script_and_assert_files( "basic_repo.py", [ "1.python-scripts.json", "1.root.json", "1.snapshot.json", "1.targets.json", "2.root.json", "2.snapshot.json", "2.targets.json", "timestamp.json", ], ) def test_hashed_bin_delegation(self) -> None: """Run 'hashed_bin_delegation.py' and assert creation of metadata files.""" self._run_script_and_assert_files( "hashed_bin_delegation.py", [ "1.bins.json", "1.00-07.json", "1.08-0f.json", "1.10-17.json", "1.18-1f.json", "1.20-27.json", "1.28-2f.json", "1.30-37.json", "1.38-3f.json", "1.40-47.json", "1.48-4f.json", "1.50-57.json", "1.58-5f.json", "1.60-67.json", "1.68-6f.json", "1.70-77.json", "1.78-7f.json", "1.80-87.json", "1.88-8f.json", "1.90-97.json", "1.98-9f.json", "1.a0-a7.json", "1.a8-af.json", "1.b0-b7.json", "1.b8-bf.json", "1.c0-c7.json", "1.c8-cf.json", "1.d0-d7.json", "1.d8-df.json", "1.e0-e7.json", "1.e8-ef.json", "1.f0-f7.json", "1.f8-ff.json", ], ) def test_succinct_hash_bin_delegation(self) -> None: self._run_script_and_assert_files( "succinct_hash_bin_delegations.py", [ "1.targets.json", "1.delegated_bin-00.json", "1.delegated_bin-01.json", "1.delegated_bin-02.json", "1.delegated_bin-03.json", "1.delegated_bin-04.json", "1.delegated_bin-05.json", "1.delegated_bin-06.json", "1.delegated_bin-07.json", "1.delegated_bin-08.json", "1.delegated_bin-09.json", "1.delegated_bin-0a.json", "1.delegated_bin-0b.json", "1.delegated_bin-0c.json", "1.delegated_bin-0d.json", "1.delegated_bin-0e.json", "1.delegated_bin-0f.json", "1.delegated_bin-10.json", "1.delegated_bin-11.json", "1.delegated_bin-12.json", "1.delegated_bin-13.json", "1.delegated_bin-14.json", "1.delegated_bin-15.json", "1.delegated_bin-16.json", "1.delegated_bin-17.json", "1.delegated_bin-18.json", "1.delegated_bin-19.json", "1.delegated_bin-1a.json", "1.delegated_bin-1b.json", "1.delegated_bin-1c.json", "1.delegated_bin-1d.json", "1.delegated_bin-1e.json", "1.delegated_bin-1f.json", ], ) if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_fetcher_ng.py000066400000000000000000000140451470074210500205150ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Unit test for RequestsFetcher.""" import io import logging import math import os import sys import tempfile import unittest from typing import Any, ClassVar, Iterator from unittest.mock import Mock, patch import requests from tests import utils from tuf.api import exceptions from tuf.ngclient import RequestsFetcher logger = logging.getLogger(__name__) class TestFetcher(unittest.TestCase): """Test RequestsFetcher class.""" server_process_handler: ClassVar[utils.TestServerProcess] @classmethod def setUpClass(cls) -> None: """ Create a temporary file and launch a simple server in the current working directory. """ cls.server_process_handler = utils.TestServerProcess(log=logger) cls.file_contents = b"junk data" cls.file_length = len(cls.file_contents) with tempfile.NamedTemporaryFile( dir=os.getcwd(), delete=False ) as cls.target_file: cls.target_file.write(cls.file_contents) cls.url_prefix = ( f"http://{utils.TEST_HOST_ADDRESS}:" f"{cls.server_process_handler.port!s}" ) target_filename = os.path.basename(cls.target_file.name) cls.url = f"{cls.url_prefix}/{target_filename}" @classmethod def tearDownClass(cls) -> None: # Stop server process and perform clean up. cls.server_process_handler.clean() os.remove(cls.target_file.name) def setUp(self) -> None: # Instantiate a concrete instance of FetcherInterface self.fetcher = RequestsFetcher() # Simple fetch. def test_fetch(self) -> None: with tempfile.TemporaryFile() as temp_file: for chunk in self.fetcher.fetch(self.url): temp_file.write(chunk) temp_file.seek(0) self.assertEqual(self.file_contents, temp_file.read()) # URL data downloaded in more than one chunk def test_fetch_in_chunks(self) -> None: # Set a smaller chunk size to ensure that the file will be downloaded # in more than one chunk self.fetcher.chunk_size = 4 # expected_chunks_count: 3 (depends on length of self.file_length) expected_chunks_count = math.ceil( self.file_length / self.fetcher.chunk_size ) self.assertEqual(expected_chunks_count, 3) chunks_count = 0 with tempfile.TemporaryFile() as temp_file: for chunk in self.fetcher.fetch(self.url): temp_file.write(chunk) chunks_count += 1 temp_file.seek(0) self.assertEqual(self.file_contents, temp_file.read()) # Check that we calculate chunks as expected self.assertEqual(chunks_count, expected_chunks_count) # Incorrect URL parsing def test_url_parsing(self) -> None: with self.assertRaises(exceptions.DownloadError): self.fetcher.fetch("missing-scheme-and-hostname-in-url") # File not found error def test_http_error(self) -> None: with self.assertRaises(exceptions.DownloadHTTPError) as cm: self.url = f"{self.url_prefix}/non-existing-path" self.fetcher.fetch(self.url) self.assertEqual(cm.exception.status_code, 404) # Response read timeout error @patch.object(requests.Session, "get") def test_response_read_timeout(self, mock_session_get: Mock) -> None: mock_response = Mock() attr = { "iter_content.side_effect": requests.exceptions.ConnectionError( "Simulated timeout" ) } mock_response.configure_mock(**attr) mock_session_get.return_value = mock_response with self.assertRaises(exceptions.SlowRetrievalError): next(self.fetcher.fetch(self.url)) mock_response.iter_content.assert_called_once() # Read/connect session timeout error @patch.object( requests.Session, "get", side_effect=requests.exceptions.Timeout("Simulated timeout"), ) def test_session_get_timeout(self, mock_session_get: Mock) -> None: with self.assertRaises(exceptions.SlowRetrievalError): self.fetcher.fetch(self.url) mock_session_get.assert_called_once() # Simple bytes download def test_download_bytes(self) -> None: data = self.fetcher.download_bytes(self.url, self.file_length) self.assertEqual(self.file_contents, data) # Download file smaller than required max_length def test_download_bytes_upper_length(self) -> None: data = self.fetcher.download_bytes(self.url, self.file_length + 4) self.assertEqual(self.file_contents, data) # Download a file bigger than expected def test_download_bytes_length_mismatch(self) -> None: with self.assertRaises(exceptions.DownloadLengthMismatchError): self.fetcher.download_bytes(self.url, self.file_length - 4) # Simple file download def test_download_file(self) -> None: with self.fetcher.download_file( self.url, self.file_length ) as temp_file: temp_file.seek(0, io.SEEK_END) self.assertEqual(self.file_length, temp_file.tell()) # Download file smaller than required max_length def test_download_file_upper_length(self) -> None: with self.fetcher.download_file( self.url, self.file_length + 4 ) as temp_file: temp_file.seek(0, io.SEEK_END) self.assertEqual(self.file_length, temp_file.tell()) # Download a file bigger than expected def test_download_file_length_mismatch(self) -> Iterator[Any]: with self.assertRaises(exceptions.DownloadLengthMismatchError): # Force download_file to execute and raise the error since it is a # context manager and returns Iterator[IO] yield self.fetcher.download_file(self.url, self.file_length - 4) # Run unit test. if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_metadata_eq_.py000066400000000000000000000152341470074210500210160ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test __eq__ implementations of classes inside tuf/api/metadata.py.""" import copy import os import sys import unittest from typing import Any, ClassVar, Dict from securesystemslib.signer import SSlibKey from tests import utils from tuf.api.metadata import ( TOP_LEVEL_ROLE_NAMES, DelegatedRole, Delegations, Metadata, MetaFile, Role, Signature, SuccinctRoles, TargetFile, ) class TestMetadataComparisions(unittest.TestCase): """Test __eq__ for all classes inside tuf/api/metadata.py.""" metadata: ClassVar[Dict[str, bytes]] @classmethod def setUpClass(cls) -> None: cls.repo_dir = os.path.join( utils.TESTS_DIR, "repository_data", "repository", "metadata" ) # Store class instances in this dict instead of creating them inside the # test function in order to escape the need for reinitialization of the # instances on each run of the test function. cls.objects = {} for md in TOP_LEVEL_ROLE_NAMES: with open(os.path.join(cls.repo_dir, f"{md}.json"), "rb") as f: data = f.read() cls.objects[md.capitalize()] = Metadata.from_bytes(data).signed cls.objects["Metadata"] = Metadata(cls.objects["Timestamp"], {}) cls.objects["Signed"] = cls.objects["Timestamp"] cls.objects["Key"] = SSlibKey( "id", "rsa", "rsassa-pss-sha256", {"public": "foo"} ) cls.objects["Role"] = Role(["keyid1", "keyid2"], 3) cls.objects["MetaFile"] = MetaFile(1, 12, {"sha256": "abc"}) cls.objects["DelegatedRole"] = DelegatedRole("a", [], 1, False, ["d"]) cls.objects["SuccinctRoles"] = SuccinctRoles(["keyid"], 1, 8, "foo") cls.objects["Delegations"] = Delegations( {"keyid": cls.objects["Key"]}, {"a": cls.objects["DelegatedRole"]} ) cls.objects["TargetFile"] = TargetFile( 1, {"sha256": "abc"}, "file1.txt" ) # Keys are class names. # Values are dictionaries containing attribute names and their new values. classes_attributes_modifications: utils.DataSet = { "Metadata": {"signed": None, "signatures": None}, "Signed": {"version": -1, "spec_version": "0.0.0"}, "Key": {"keyid": "a", "keytype": "foo", "scheme": "b", "keyval": "b"}, "Role": {"keyids": [], "threshold": 10}, "Root": {"consistent_snapshot": None, "keys": {}}, "MetaFile": {"version": None, "length": None, "hashes": {}}, "Timestamp": {"snapshot_meta": None}, "Snapshot": {"meta": None}, "DelegatedRole": { "name": "", "terminating": None, "paths": [""], "path_hash_prefixes": [""], }, "SuccinctRoles": {"bit_length": 0, "name_prefix": ""}, "Delegations": {"keys": {}, "roles": {}}, "TargetFile": {"length": 0, "hashes": {}, "path": ""}, "Targets": {"targets": {}, "delegations": []}, } @utils.run_sub_tests_with_dataset(classes_attributes_modifications) def test_classes_eq_(self, test_case_data: Dict[str, Any]) -> None: obj = self.objects[self.case_name] # Assert that obj is not equal to an object from another type self.assertNotEqual(obj, "") obj_2 = copy.deepcopy(obj) # Assert that __eq__ works for equal objects. self.assertEqual(obj, obj_2) for attr, value in test_case_data.items(): original_value = getattr(obj_2, attr) setattr(obj_2, attr, value) # Assert that the original object != modified one. self.assertNotEqual(obj, obj_2, f"Failed case: {attr}") # Restore the old value of the attribute. setattr(obj_2, attr, original_value) def test_md_eq_signatures_reversed_order(self) -> None: # Test comparing objects with same signatures but different order. # Remove all signatures and create new ones. md: Metadata = self.objects["Metadata"] md.signatures = {"a": Signature("a", "a"), "b": Signature("b", "b")} md_2 = copy.deepcopy(md) # Reverse signatures order in md_2. md_2.signatures = dict(reversed(md_2.signatures.items())) # Assert that both objects are not the same because of signatures order. self.assertNotEqual(md, md_2) # but if we fix the signatures order they will be equal md_2.signatures = {"a": Signature("a", "a"), "b": Signature("b", "b")} self.assertEqual(md, md_2) def test_md_eq_special_signatures_tests(self) -> None: # Test that metadata objects with different signatures are not equal. md: Metadata = self.objects["Metadata"] md_2 = copy.deepcopy(md) md_2.signatures = {} self.assertNotEqual(md, md_2) # Test that metadata objects with empty signatures are equal md.signatures = {} self.assertEqual(md, md_2) # Metadata objects with different signatures types are not equal. md_2.signatures = "" # type: ignore[assignment] self.assertNotEqual(md, md_2) def test_delegations_eq_roles_reversed_order(self) -> None: # Test comparing objects with same delegated roles but different order. role_one_dict = { "keyids": ["keyid1"], "name": "a", "terminating": False, "paths": ["fn1"], "threshold": 1, } role_two_dict = { "keyids": ["keyid2"], "name": "b", "terminating": True, "paths": ["fn2"], "threshold": 4, } delegations_dict = { "keys": { "keyid2": { "keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}, } }, "roles": [role_one_dict, role_two_dict], } delegations = Delegations.from_dict(copy.deepcopy(delegations_dict)) # Create a second delegations obj with reversed roles order delegations_2 = copy.deepcopy(delegations) assert isinstance(delegations.roles, dict) delegations_2.roles = dict(reversed(delegations.roles.items())) # Both objects are not the equal because of delegated roles order. self.assertNotEqual(delegations, delegations_2) # but if we fix the delegated roles order they will be equal delegations_2.roles = delegations.roles self.assertEqual(delegations, delegations_2) # Run unit test. if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_metadata_generation.py000066400000000000000000000012561470074210500224040ustar00rootroot00000000000000"""Unit tests for 'tests/generated_data/generate_md.py'.""" # Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 import sys import unittest from tests import utils from tests.generated_data.generate_md import generate_all_files class TestMetadataGeneration(unittest.TestCase): """Test metadata files generation.""" @staticmethod def test_compare_static_md_to_generated() -> None: # md_generator = MetadataGenerator("generated_data/ed25519_metadata") generate_all_files(dump=False, verify=True) # Run unit test. if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_metadata_serialization.py000066400000000000000000001015231470074210500231240ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Unit tests testing tuf/api/metadata.py classes serialization and deserialization. """ import copy import json import logging import sys import unittest from securesystemslib.signer import Signature from tests import utils from tuf.api.metadata import ( DelegatedRole, Delegations, Key, Metadata, MetaFile, Role, Root, Snapshot, SuccinctRoles, TargetFile, Targets, Timestamp, ) from tuf.api.serialization import DeserializationError logger = logging.getLogger(__name__) class TestSerialization(unittest.TestCase): """Test serialization for all classes in 'tuf/api/metadata.py'.""" invalid_metadata: utils.DataSet = { "no signatures field": b'{"signed": \ { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}} \ }', "non unique duplicating signatures": b'{"signed": \ { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ "signatures": [{"keyid": "id", "sig": "b"}, {"keyid": "id", "sig": "b"}] \ }', } @utils.run_sub_tests_with_dataset(invalid_metadata) def test_invalid_metadata_serialization(self, test_data: bytes) -> None: # We expect a DeserializationError reraised from ValueError or KeyError. with self.assertRaises(DeserializationError): Metadata.from_bytes(test_data) valid_metadata: utils.DataSet = { "multiple signatures": b'{ \ "signed": \ { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ "signatures": [{ "keyid": "id", "sig": "b"}, {"keyid": "id2", "sig": "d" }] \ }', "no signatures": b'{ \ "signed": \ { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ "signatures": [] \ }', "unrecognized fields": b'{ \ "signed": \ { "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}, \ "signatures": [{"keyid": "id", "sig": "b"}], \ "foo": "bar" \ }', } @utils.run_sub_tests_with_dataset(valid_metadata) def test_valid_metadata_serialization(self, test_case_data: bytes) -> None: md = Metadata.from_bytes(test_case_data) # Convert to a JSON and sort the keys the way we do in JSONSerializer. separators = (",", ":") test_json = json.loads(test_case_data) test_bytes = json.dumps( test_json, separators=separators, sort_keys=True ).encode("utf-8") self.assertEqual(test_bytes, md.to_bytes()) invalid_signatures: utils.DataSet = { "missing keyid attribute in a signature": '{ "sig": "abc" }', "missing sig attribute in a signature": '{ "keyid": "id" }', } @utils.run_sub_tests_with_dataset(invalid_signatures) def test_invalid_signature_serialization(self, test_data: str) -> None: case_dict = json.loads(test_data) with self.assertRaises(KeyError): Signature.from_dict(case_dict) valid_signatures: utils.DataSet = { "all": '{ "keyid": "id", "sig": "b"}', "unrecognized fields": '{ "keyid": "id", "sig": "b", "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_signatures) def test_signature_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) signature = Signature.from_dict(copy.copy(case_dict)) self.assertEqual(case_dict, signature.to_dict()) # Snapshot instances with meta = {} are valid, but for a full valid # repository it's required that meta has at least one element inside it. invalid_signed: utils.DataSet = { "no _type": '{"spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}', "no spec_version": '{"_type": "snapshot", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "no version": '{"_type": "snapshot", "spec_version": "1.0.0", "expires": "2030-01-01T00:00:00Z", "meta": {}}', "no expires": '{"_type": "snapshot", "spec_version": "1.0.0", "version": 1, "meta": {}}', "empty str _type": '{"_type": "", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "empty str spec_version": '{"_type": "snapshot", "spec_version": "", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "_type wrong type": '{"_type": "foo", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "version wrong type": '{"_type": "snapshot", "spec_version": "1.0.0", "version": "a", "expires": "2030-01-01T00:00:00Z", "meta": {}}', "invalid spec_version str": '{"_type": "snapshot", "spec_version": "abc", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "non-number spec_version": '{"_type": "snapshot", "spec_version": "1.2.a", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "one part spec_version": '{"_type": "snapshot", "spec_version": "1", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "different major spec_version": '{"_type": "snapshot", "spec_version": "0.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "version 0": '{"_type": "snapshot", "spec_version": "1.0.0", "version": 0, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "version below 0": '{"_type": "snapshot", "spec_version": "1.0.0", "version": -1, "expires": "2030-01-01T00:00:00Z", "meta": {}}', "wrong datetime string": '{"_type": "snapshot", "spec_version": "1.0.0", "version": 1, "expires": "abc", "meta": {}}', } @utils.run_sub_tests_with_dataset(invalid_signed) def test_invalid_signed_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((KeyError, ValueError, TypeError)): Snapshot.from_dict(case_dict) valid_keys: utils.DataSet = { "all": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \ "keyval": {"public": "foo"}}', "unrecognized field": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \ "keyval": {"public": "foo"}, "foo": "bar"}', "unrecognized field in keyval": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", \ "keyval": {"public": "foo", "foo": "bar"}}', } @utils.run_sub_tests_with_dataset(valid_keys) def test_valid_key_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) key = Key.from_dict("id", copy.copy(case_dict)) self.assertDictEqual(case_dict, key.to_dict()) invalid_keys: utils.DataSet = { "no keyid": '{"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', "no keytype": '{"keyid": "id", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}', "no scheme": '{"keyid": "id", "keytype": "rsa", "keyval": {"public": "foo"}}', "no keyval": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256"}', "keyid wrong type": '{"keyid": 1, "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', "keytype wrong type": '{"keyid": "id", "keytype": 1, "scheme": "rsassa-pss-sha256", "keyval": {"public": "abc"}}', "scheme wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": 1, "keyval": {"public": "abc"}}', "keyval wrong type": '{"keyid": "id", "keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": 1}', } @utils.run_sub_tests_with_dataset(invalid_keys) def test_invalid_key_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((TypeError, KeyError, ValueError)): keyid = case_dict.pop("keyid") Key.from_dict(keyid, case_dict) invalid_roles: utils.DataSet = { "no threshold": '{"keyids": ["keyid"]}', "no keyids": '{"threshold": 3}', "wrong threshold type": '{"keyids": ["keyid"], "threshold": "a"}', "wrong keyids type": '{"keyids": 1, "threshold": 3}', "threshold below 1": '{"keyids": ["keyid"], "threshold": 0}', "duplicate keyids": '{"keyids": ["keyid", "keyid"], "threshold": 3}', } @utils.run_sub_tests_with_dataset(invalid_roles) def test_invalid_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((KeyError, TypeError, ValueError)): Role.from_dict(case_dict) valid_roles: utils.DataSet = { "all": '{"keyids": ["keyid"], "threshold": 3}', "many keyids": '{"keyids": ["a", "b", "c", "d", "e"], "threshold": 1}', "ordered keyids": '{"keyids": ["c", "b", "a"], "threshold": 1}', "empty keyids": '{"keyids": [], "threshold": 1}', "unrecognized field": '{"keyids": ["keyid"], "threshold": 3, "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_roles) def test_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) role = Role.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, role.to_dict()) valid_roots: utils.DataSet = { "all": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": { \ "root": {"keyids": ["keyid1"], "threshold": 1}, \ "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ "targets": {"keyids": ["keyid1"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid2"], "threshold": 1}} \ }', "no consistent_snapshot": '{ "_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", \ "keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"} }}, \ "roles": { \ "root": {"keyids": ["keyid"], "threshold": 1}, \ "timestamp": {"keyids": ["keyid"], "threshold": 1}, \ "targets": {"keyids": ["keyid"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid"], "threshold": 1}} \ }', "empty keys": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": {}, \ "roles": { \ "root": {"keyids": [], "threshold": 1}, \ "timestamp": {"keyids": [], "threshold": 1}, \ "targets": {"keyids": [], "threshold": 1}, \ "snapshot": {"keyids": [], "threshold": 1}} \ }', "unrecognized field": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": { \ "root": {"keyids": ["keyid"], "threshold": 1}, \ "timestamp": {"keyids": ["keyid"], "threshold": 1}, \ "targets": {"keyids": ["keyid"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid"], "threshold": 1} \ }, \ "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_roots) def test_root_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) root = Root.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, root.to_dict()) invalid_roots: utils.DataSet = { "invalid role name": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": { \ "bar": {"keyids": ["keyid1"], "threshold": 1}, \ "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ "targets": {"keyids": ["keyid1"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid2"], "threshold": 1}} \ }', "missing root role": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": { \ "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ "targets": {"keyids": ["keyid1"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid2"], "threshold": 1}} \ }', "one additional role": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T00:00:00Z", "consistent_snapshot": false, \ "keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": { \ "root": {"keyids": ["keyid1"], "threshold": 1}, \ "timestamp": {"keyids": ["keyid2"], "threshold": 1}, \ "targets": {"keyids": ["keyid1"], "threshold": 1}, \ "snapshot": {"keyids": ["keyid2"], "threshold": 1}, \ "foo": {"keyids": ["keyid2"], "threshold": 1}} \ }', "invalid expiry with microseconds": '{"_type": "root", "spec_version": "1.0.0", "version": 1, \ "expires": "2030-01-01T12:00:00.123456Z", "consistent_snapshot": false, \ "keys": {}, "roles": {"root": {}, "timestamp": {}, "targets": {}, "snapshot": {}}}', } @utils.run_sub_tests_with_dataset(invalid_roots) def test_invalid_root_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(ValueError): Root.from_dict(case_dict) invalid_metafiles: utils.DataSet = { "wrong length type": '{"version": 1, "length": "a", "hashes": {"sha256" : "abc"}}', "version 0": '{"version": 0, "length": 1, "hashes": {"sha256" : "abc"}}', "length below 0": '{"version": 1, "length": -1, "hashes": {"sha256" : "abc"}}', "empty hashes dict": '{"version": 1, "length": 1, "hashes": {}}', "hashes wrong type": '{"version": 1, "length": 1, "hashes": 1}', "hashes values wrong type": '{"version": 1, "length": 1, "hashes": {"sha256": 1}}', } @utils.run_sub_tests_with_dataset(invalid_metafiles) def test_invalid_metafile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((TypeError, ValueError, AttributeError)): MetaFile.from_dict(case_dict) valid_metafiles: utils.DataSet = { "all": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1}', "no length": '{"hashes": {"sha256" : "abc"}, "version": 1 }', "length 0": '{"version": 1, "length": 0, "hashes": {"sha256" : "abc"}}', "no hashes": '{"length": 12, "version": 1}', "unrecognized field": '{"hashes": {"sha256" : "abc"}, "length": 12, "version": 1, "foo": "bar"}', "many hashes": '{"hashes": {"sha256" : "abc", "sha512": "cde"}, "length": 12, "version": 1}', } @utils.run_sub_tests_with_dataset(valid_metafiles) def test_metafile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) metafile = MetaFile.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, metafile.to_dict()) invalid_timestamps: utils.DataSet = { "no metafile": '{ "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z"}', } @utils.run_sub_tests_with_dataset(invalid_timestamps) def test_invalid_timestamp_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((ValueError, KeyError)): Timestamp.from_dict(case_dict) valid_timestamps: utils.DataSet = { "all": '{ "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}', "legacy spec_version": '{ "_type": "timestamp", "spec_version": "1.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}}', "unrecognized field": '{ "_type": "timestamp", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {"snapshot.json": {"hashes": {"sha256" : "abc"}, "version": 1}}, "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_timestamps) def test_timestamp_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) timestamp = Timestamp.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, timestamp.to_dict()) valid_snapshots: utils.DataSet = { "all": '{ "_type": "snapshot", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": { \ "file1.txt": {"hashes": {"sha256" : "abc"}, "version": 1}, \ "file2.txt": {"hashes": {"sha256" : "cde"}, "version": 1} \ }}', "empty meta": '{ "_type": "snapshot", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": {} \ }', "unrecognized field": '{ "_type": "snapshot", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "meta": { "file.txt": { "hashes": {"sha256" : "abc"}, "version": 1 }}, "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_snapshots) def test_snapshot_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) snapshot = Snapshot.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, snapshot.to_dict()) valid_delegated_roles: utils.DataSet = { # DelegatedRole inherits Role and some use cases can be found in the valid_roles. "no hash prefix attribute": '{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], \ "terminating": false, "threshold": 1}', "no path attribute": '{"keyids": ["keyid"], "name": "a", "terminating": false, \ "path_hash_prefixes": ["h1", "h2"], "threshold": 99}', "empty paths": '{"keyids": ["keyid"], "name": "a", "paths": [], \ "terminating": false, "threshold": 1}', "empty path_hash_prefixes": '{"keyids": ["keyid"], "name": "a", "terminating": false, \ "path_hash_prefixes": [], "threshold": 99}', "unrecognized field": '{"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3, "foo": "bar"}', "many keyids": '{"keyids": ["keyid1", "keyid2"], "name": "a", "paths": ["fn1", "fn2"], \ "terminating": false, "threshold": 1}', "ordered keyids": '{"keyids": ["keyid2", "keyid1"], "name": "a", "paths": ["fn1", "fn2"], \ "terminating": false, "threshold": 1}', } @utils.run_sub_tests_with_dataset(valid_delegated_roles) def test_delegated_role_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) deserialized_role = DelegatedRole.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, deserialized_role.to_dict()) invalid_delegated_roles: utils.DataSet = { # DelegatedRole inherits Role and some use cases can be found in the invalid_roles. "missing hash prefixes and paths": '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false}', "both hash prefixes and paths": '{"name": "a", "keyids": ["keyid"], "threshold": 1, "terminating": false, \ "paths": ["fn1", "fn2"], "path_hash_prefixes": ["h1", "h2"]}', "invalid path type": '{"keyids": ["keyid"], "name": "a", "paths": [1,2,3], \ "terminating": false, "threshold": 1}', "invalid path_hash_prefixes type": '{"keyids": ["keyid"], "name": "a", "path_hash_prefixes": [1,2,3], \ "terminating": false, "threshold": 1}', } @utils.run_sub_tests_with_dataset(invalid_delegated_roles) def test_invalid_delegated_role_serialization( self, test_case_data: str ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(ValueError): DelegatedRole.from_dict(case_dict) valid_succinct_roles: utils.DataSet = { # SuccinctRoles inherits Role and some use cases can be found in the valid_roles. "standard succinct_roles information": '{"keyids": ["keyid"], "threshold": 1, \ "bit_length": 8, "name_prefix": "foo"}', "succinct_roles with unrecognized fields": '{"keyids": ["keyid"], "threshold": 1, \ "bit_length": 8, "name_prefix": "foo", "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_succinct_roles) def test_succinct_roles_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) succinct_roles = SuccinctRoles.from_dict(copy.copy(case_dict)) self.assertDictEqual(case_dict, succinct_roles.to_dict()) invalid_succinct_roles: utils.DataSet = { # SuccinctRoles inherits Role and some use cases can be found in the invalid_roles. "missing bit_length from succinct_roles": '{"keyids": ["keyid"], "threshold": 1, "name_prefix": "foo"}', "missing name_prefix from succinct_roles": '{"keyids": ["keyid"], "threshold": 1, "bit_length": 8}', "succinct_roles with invalid bit_length type": '{"keyids": ["keyid"], "threshold": 1, "bit_length": "a", "name_prefix": "foo"}', "succinct_roles with invalid name_prefix type": '{"keyids": ["keyid"], "threshold": 1, "bit_length": 8, "name_prefix": 1}', "succinct_roles with high bit_length value": '{"keyids": ["keyid"], "threshold": 1, "bit_length": 50, "name_prefix": "foo"}', "succinct_roles with low bit_length value": '{"keyids": ["keyid"], "threshold": 1, "bit_length": 0, "name_prefix": "foo"}', } @utils.run_sub_tests_with_dataset(invalid_succinct_roles) def test_invalid_succinct_roles_serialization(self, test_data: str) -> None: case_dict = json.loads(test_data) with self.assertRaises((ValueError, KeyError, TypeError)): SuccinctRoles.from_dict(case_dict) invalid_delegations: utils.DataSet = { "empty delegations": "{}", "missing keys": '{ "roles": [ \ {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ }', "missing roles": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}}', "bad keys": '{"keys": "foo", \ "roles": [{"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": false, "threshold": 3}]}', "bad roles": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": ["foo"]}', "duplicate role names": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ \ {"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": false, "threshold": 3}, \ {"keyids": ["keyid2"], "name": "a", "paths": ["fn3"], "terminating": false, "threshold": 2} \ ] \ }', "using empty string role name": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ \ {"keyids": ["keyid1"], "name": "", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ }', "using root as delegate role name": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ \ {"keyids": ["keyid1"], "name": "root", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ }', "using snapshot as delegate role name": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ \ {"keyids": ["keyid1"], "name": "snapshot", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ }', "using targets as delegate role name": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ \ {"keyids": ["keyid1"], "name": "targets", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ }', "using timestamp as delegate role name": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ \ {"keyids": ["keyid1"], "name": "timestamp", "terminating": true, "paths": ["fn1"], "threshold": 3}] \ }', "using valid and top-level role name": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": [ \ {"keyids": ["keyid1"], "name": "b", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ {"keyids": ["keyid2"], "name": "root", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ }', "roles and succinct_roles set": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": [ \ {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ], \ "succinct_roles": {"keyids": ["keyid"], "threshold": 1, "bit_length": 8, "name_prefix": "foo"}}', } @utils.run_sub_tests_with_dataset(invalid_delegations) def test_invalid_delegation_serialization( self, test_case_data: str ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises((ValueError, KeyError, AttributeError)): Delegations.from_dict(case_dict) valid_delegations: utils.DataSet = { "with roles": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": [ \ {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ }', "with succinct_roles": '{"keys": { \ "keyid1" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2" : {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "succinct_roles": {"keyids": ["keyid"], "threshold": 1, "bit_length": 8, "name_prefix": "foo"}}', "unrecognized field": '{"keys": {"keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}}, \ "roles": [ {"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": true, "threshold": 3} ], \ "foo": "bar"}', "empty keys and roles": '{"keys": {}, \ "roles": [] \ }', } @utils.run_sub_tests_with_dataset(valid_delegations) def test_delegation_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) delegation = Delegations.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, delegation.to_dict()) invalid_targetfiles: utils.DataSet = { "no hashes": '{"length": 1}', "no length": '{"hashes": {"sha256": "abc"}}', # The remaining cases are the same as for invalid_hashes and # invalid_length datasets. } @utils.run_sub_tests_with_dataset(invalid_targetfiles) def test_invalid_targetfile_serialization( self, test_case_data: str ) -> None: case_dict = json.loads(test_case_data) with self.assertRaises(KeyError): TargetFile.from_dict(case_dict, "file1.txt") valid_targetfiles: utils.DataSet = { "all": '{"length": 12, "hashes": {"sha256" : "abc"}, \ "custom" : {"foo": "bar"} }', "no custom": '{"length": 12, "hashes": {"sha256" : "abc"}}', "unrecognized field": '{"length": 12, "hashes": {"sha256" : "abc"}, \ "custom" : {"foo": "bar"}, "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_targetfiles) def test_targetfile_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) target_file = TargetFile.from_dict(copy.copy(case_dict), "file1.txt") self.assertDictEqual(case_dict, target_file.to_dict()) valid_targets: utils.DataSet = { "all attributes": '{"_type": "targets", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "targets": { \ "file.txt": {"length": 12, "hashes": {"sha256" : "abc"} }, \ "file2.txt": {"length": 50, "hashes": {"sha256" : "cde"} } }, \ "delegations": { \ "keys": { \ "keyid" : {"keytype": "rsa", "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"}}, \ "keyid2": {"keytype": "ed25519", "scheme": "ed25519", "keyval": {"public": "bar"}}}, \ "roles": [ \ {"keyids": ["keyid"], "name": "a", "terminating": true, "paths": ["fn1"], "threshold": 3}, \ {"keyids": ["keyid2"], "name": "b", "terminating": true, "paths": ["fn2"], "threshold": 4} ] \ }}', "empty targets": '{"_type": "targets", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "targets": {}, \ "delegations": {"keys": {"keyid" : {"keytype": "rsa", \ "scheme": "rsassa-pss-sha256", "keyval": {"public": "foo"} }}, \ "roles": [ {"keyids": ["keyid"], "name": "a", "paths": ["fn1", "fn2"], "terminating": true, "threshold": 3} ]} \ }', "no delegations": '{"_type": "targets", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "targets": { "file.txt": {"length": 12, "hashes": {"sha256" : "abc"} } } \ }', "unrecognized_field": '{"_type": "targets", "spec_version": "1.0.0", "version": 1, "expires": "2030-01-01T00:00:00Z", \ "targets": {}, "foo": "bar"}', } @utils.run_sub_tests_with_dataset(valid_targets) def test_targets_serialization(self, test_case_data: str) -> None: case_dict = json.loads(test_case_data) targets = Targets.from_dict(copy.deepcopy(case_dict)) self.assertDictEqual(case_dict, targets.to_dict()) # Run unit test. if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_repository.py000066400000000000000000000220361470074210500206270ustar00rootroot00000000000000# Copyright 2024 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Tests for tuf.repository module""" import copy import logging import sys import unittest from collections import defaultdict from datetime import datetime, timedelta, timezone from typing import Dict, List from securesystemslib.signer import CryptoSigner, Signer from tests import utils from tuf.api.metadata import ( TOP_LEVEL_ROLE_NAMES, DelegatedRole, Delegations, Metadata, MetaFile, Root, Snapshot, TargetFile, Targets, Timestamp, ) from tuf.repository import Repository logger = logging.getLogger(__name__) _signed_init = { Root.type: Root, Snapshot.type: Snapshot, Targets.type: Targets, Timestamp.type: Timestamp, } class TestingRepository(Repository): """Very simple in-memory repository implementation This repository keeps the metadata for all versions of all roles in memory. It also keeps all target content in memory. Mostly copied from examples/repository. Attributes: role_cache: Every historical metadata version of every role in this repository. Keys are role names and values are lists of Metadata signer_cache: All signers available to the repository. Keys are role names, values are lists of signers """ expiry_period = timedelta(days=1) def __init__(self) -> None: # all versions of all metadata self.role_cache: Dict[str, List[Metadata]] = defaultdict(list) # all current keys self.signer_cache: Dict[str, List[Signer]] = defaultdict(list) # version cache for snapshot and all targets, updated in close(). # The 'defaultdict(lambda: ...)' trick allows close() to easily modify # the version without always creating a new MetaFile self._snapshot_info = MetaFile(1) self._targets_infos: Dict[str, MetaFile] = defaultdict( lambda: MetaFile(1) ) # setup a basic repository, generate signing key per top-level role with self.edit_root() as root: for role in ["root", "timestamp", "snapshot", "targets"]: signer = CryptoSigner.generate_ecdsa() self.signer_cache[role].append(signer) root.add_key(signer.public_key, role) for role in ["timestamp", "snapshot", "targets"]: with self.edit(role): pass @property def targets_infos(self) -> Dict[str, MetaFile]: return self._targets_infos @property def snapshot_info(self) -> MetaFile: return self._snapshot_info def open(self, role: str) -> Metadata: """Return current Metadata for role from 'storage' (or create a new one) """ if role not in self.role_cache: signed_init = _signed_init.get(role, Targets) md = Metadata(signed_init()) # this makes version bumping in close() simpler md.signed.version = 0 return md # return a _copy_ of latest metadata from storage return copy.deepcopy(self.role_cache[role][-1]) def close(self, role: str, md: Metadata) -> None: """Store a version of metadata. Handle version bumps, expiry, signing""" md.signed.version += 1 md.signed.expires = datetime.now(timezone.utc) + self.expiry_period md.signatures.clear() for signer in self.signer_cache[role]: md.sign(signer, append=True) # store new metadata version, update version caches self.role_cache[role].append(md) if role == "snapshot": self._snapshot_info.version = md.signed.version elif role not in ["root", "timestamp"]: self._targets_infos[f"{role}.json"].version = md.signed.version class TestRepository(unittest.TestCase): """Tests for tuf.repository module.""" def setUp(self) -> None: self.repo = TestingRepository() def test_initial_repo_setup(self) -> None: # check that we have metadata for top level roles self.assertEqual(4, len(self.repo.role_cache)) for role in TOP_LEVEL_ROLE_NAMES: # There should be a single version for each role role_versions = self.repo.role_cache[role] self.assertEqual(1, len(role_versions)) self.assertEqual(1, role_versions[-1].signed.version) # test the Repository helpers: self.assertIsInstance(self.repo.root(), Root) self.assertIsInstance(self.repo.timestamp(), Timestamp) self.assertIsInstance(self.repo.snapshot(), Snapshot) self.assertIsInstance(self.repo.targets(), Targets) def test_do_snapshot(self) -> None: # Expect no-op because targets have not changed and snapshot is still valid created, _ = self.repo.do_snapshot() self.assertFalse(created) snapshot_versions = self.repo.role_cache["snapshot"] self.assertEqual(1, len(snapshot_versions)) self.assertEqual(1, snapshot_versions[-1].signed.version) def test_do_snapshot_after_targets_change(self) -> None: # do a targets change, expect do_snapshot to create a new snapshot with self.repo.edit_targets() as targets: targets.targets["path"] = TargetFile.from_data("path", b"data") created, _ = self.repo.do_snapshot() self.assertTrue(created) snapshot_versions = self.repo.role_cache["snapshot"] self.assertEqual(2, len(snapshot_versions)) self.assertEqual(2, snapshot_versions[-1].signed.version) def test_do_snapshot_after_new_targets_delegation(self) -> None: # Add new delegated target, expect do_snapshot to create a new snapshot signer = CryptoSigner.generate_ecdsa() self.repo.signer_cache["delegated"].append(signer) # Add a new delegation to targets with self.repo.edit_targets() as targets: role = DelegatedRole("delegated", [], 1, True, []) targets.delegations = Delegations({}, {"delegated": role}) targets.add_key(signer.public_key, "delegated") # create a version of the delegated metadata with self.repo.edit("delegated") as _: pass created, _ = self.repo.do_snapshot() self.assertTrue(created) snapshot_versions = self.repo.role_cache["snapshot"] self.assertEqual(2, len(snapshot_versions)) self.assertEqual(2, snapshot_versions[-1].signed.version) def test_do_snapshot_after_snapshot_key_change(self) -> None: # change snapshot signing keys with self.repo.edit_root() as root: # remove key keyid = root.roles["snapshot"].keyids[0] root.revoke_key(keyid, "snapshot") self.repo.signer_cache["snapshot"].clear() # add new key signer = CryptoSigner.generate_ecdsa() self.repo.signer_cache["snapshot"].append(signer) root.add_key(signer.public_key, "snapshot") # snapshot is no longer signed correctly, expect do_snapshot to create a new snapshot created, _ = self.repo.do_snapshot() self.assertTrue(created) snapshot_versions = self.repo.role_cache["snapshot"] self.assertEqual(2, len(snapshot_versions)) self.assertEqual(2, snapshot_versions[-1].signed.version) def test_do_timestamp(self) -> None: # Expect no-op because snpashot has not changed and timestamp is still valid created, _ = self.repo.do_timestamp() self.assertFalse(created) timestamp_versions = self.repo.role_cache["timestamp"] self.assertEqual(1, len(timestamp_versions)) self.assertEqual(1, timestamp_versions[-1].signed.version) def test_do_timestamp_after_snapshot_change(self) -> None: # do a snapshot change, expect do_timestamp to create a new timestamp self.repo.do_snapshot(force=True) created, _ = self.repo.do_timestamp() self.assertTrue(created) timestamp_versions = self.repo.role_cache["timestamp"] self.assertEqual(2, len(timestamp_versions)) self.assertEqual(2, timestamp_versions[-1].signed.version) def test_do_timestamp_after_timestamp_key_change(self) -> None: # change timestamp signing keys with self.repo.edit_root() as root: # remove key keyid = root.roles["timestamp"].keyids[0] root.revoke_key(keyid, "timestamp") self.repo.signer_cache["timestamp"].clear() # add new key signer = CryptoSigner.generate_ecdsa() self.repo.signer_cache["timestamp"].append(signer) root.add_key(signer.public_key, "timestamp") # timestamp is no longer signed correctly, expect do_timestamp to create a new timestamp created, _ = self.repo.do_timestamp() self.assertTrue(created) timestamp_versions = self.repo.role_cache["timestamp"] self.assertEqual(2, len(timestamp_versions)) self.assertEqual(2, timestamp_versions[-1].signed.version) if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_trusted_metadata_set.py000066400000000000000000000543341470074210500226230ustar00rootroot00000000000000"""Unit tests for 'tuf/ngclient/_internal/trusted_metadata_set.py'.""" import logging import os import sys import unittest from datetime import datetime, timezone from typing import Callable, ClassVar, Dict, List, Optional, Tuple from securesystemslib.signer import Signer from tests import utils from tuf.api import exceptions from tuf.api.dsse import SimpleEnvelope from tuf.api.metadata import ( Metadata, MetaFile, Root, Signed, Snapshot, Targets, Timestamp, ) from tuf.api.serialization.json import JSONSerializer from tuf.ngclient._internal.trusted_metadata_set import ( TrustedMetadataSet, _load_from_simple_envelope, ) from tuf.ngclient.config import EnvelopeType logger = logging.getLogger(__name__) class TestTrustedMetadataSet(unittest.TestCase): """Tests for all public API of the TrustedMetadataSet class.""" keystore: ClassVar[Dict[str, Signer]] metadata: ClassVar[Dict[str, bytes]] repo_dir: ClassVar[str] @classmethod def modify_metadata( cls, rolename: str, modification_func: Callable ) -> bytes: """Instantiate metadata from rolename type, call modification_func and sign it again with self.keystore[rolename] signer. Attributes: rolename: Denoting the name of the metadata which will be modified. modification_func: Function that will be called to modify the signed portion of metadata bytes. """ metadata = Metadata.from_bytes(cls.metadata[rolename]) modification_func(metadata.signed) metadata.sign(cls.keystore[rolename]) return metadata.to_bytes(JSONSerializer(validate=True)) @classmethod def setUpClass(cls) -> None: cls.repo_dir = os.path.join( utils.TESTS_DIR, "repository_data", "repository", "metadata" ) cls.metadata = {} for md in [ Root.type, Timestamp.type, Snapshot.type, Targets.type, "role1", "role2", ]: with open(os.path.join(cls.repo_dir, f"{md}.json"), "rb") as f: cls.metadata[md] = f.read() keystore_dir = os.path.join( utils.TESTS_DIR, "repository_data", "keystore" ) root = Metadata[Root].from_bytes(cls.metadata[Root.type]).signed cls.keystore = {} for role in [ Root.type, Snapshot.type, Targets.type, Timestamp.type, ]: uri = f"file2:{os.path.join(keystore_dir, role + '_key')}" role_obj = root.get_delegated_role(role) key = root.get_key(role_obj.keyids[0]) cls.keystore[role] = Signer.from_priv_key_uri(uri, key) def hashes_length_modifier(timestamp: Timestamp) -> None: timestamp.snapshot_meta.hashes = None timestamp.snapshot_meta.length = None cls.metadata[Timestamp.type] = cls.modify_metadata( Timestamp.type, hashes_length_modifier ) def setUp(self) -> None: self.trusted_set = TrustedMetadataSet( self.metadata[Root.type], EnvelopeType.METADATA ) def _update_all_besides_targets( self, timestamp_bytes: Optional[bytes] = None, snapshot_bytes: Optional[bytes] = None, ) -> None: """Update all metadata roles besides targets. Args: timestamp_bytes: Bytes used when calling trusted_set.update_timestamp(). Default self.metadata[Timestamp.type]. snapshot_bytes: Bytes used when calling trusted_set.update_snapshot(). Default self.metadata[Snapshot.type]. """ timestamp_bytes = timestamp_bytes or self.metadata[Timestamp.type] self.trusted_set.update_timestamp(timestamp_bytes) snapshot_bytes = snapshot_bytes or self.metadata[Snapshot.type] self.trusted_set.update_snapshot(snapshot_bytes) def test_update(self) -> None: self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) self.trusted_set.update_targets(self.metadata[Targets.type]) self.trusted_set.update_delegated_targets( self.metadata["role1"], "role1", Targets.type ) self.trusted_set.update_delegated_targets( self.metadata["role2"], "role2", "role1" ) # the 4 top level metadata objects + 2 additional delegated targets self.assertTrue(len(self.trusted_set), 6) count = 0 for md in self.trusted_set: self.assertIsInstance(md, Signed) count += 1 self.assertTrue(count, 6) def test_update_metadata_output(self) -> None: timestamp = self.trusted_set.update_timestamp( self.metadata["timestamp"] ) snapshot = self.trusted_set.update_snapshot(self.metadata["snapshot"]) targets = self.trusted_set.update_targets(self.metadata["targets"]) delegeted_targets_1 = self.trusted_set.update_delegated_targets( self.metadata["role1"], "role1", "targets" ) delegeted_targets_2 = self.trusted_set.update_delegated_targets( self.metadata["role2"], "role2", "role1" ) self.assertIsInstance(timestamp, Timestamp) self.assertIsInstance(snapshot, Snapshot) self.assertIsInstance(targets, Targets) self.assertIsInstance(delegeted_targets_1, Targets) self.assertIsInstance(delegeted_targets_2, Targets) def test_out_of_order_ops(self) -> None: # Update snapshot before timestamp with self.assertRaises(RuntimeError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Update root after timestamp with self.assertRaises(RuntimeError): self.trusted_set.update_root(self.metadata[Root.type]) # Update targets before snapshot with self.assertRaises(RuntimeError): self.trusted_set.update_targets(self.metadata[Targets.type]) self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) # update timestamp after snapshot with self.assertRaises(RuntimeError): self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Update delegated targets before targets with self.assertRaises(RuntimeError): self.trusted_set.update_delegated_targets( self.metadata["role1"], "role1", Targets.type ) self.trusted_set.update_targets(self.metadata[Targets.type]) # Update snapshot after sucessful targets update with self.assertRaises(RuntimeError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) self.trusted_set.update_delegated_targets( self.metadata["role1"], "role1", Targets.type ) def test_bad_initial_root(self) -> None: # root is not json with self.assertRaises(exceptions.RepositoryError): TrustedMetadataSet(b"", EnvelopeType.METADATA) # root is invalid root = Metadata.from_bytes(self.metadata[Root.type]) root.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): TrustedMetadataSet(root.to_bytes(), EnvelopeType.METADATA) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): TrustedMetadataSet( self.metadata[Snapshot.type], EnvelopeType.METADATA ) def test_bad_root_update(self) -> None: # root is not json with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_root(b"") # root is invalid root = Metadata.from_bytes(self.metadata[Root.type]) root.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_root(root.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_root(self.metadata[Snapshot.type]) def test_top_level_md_with_invalid_json(self) -> None: top_level_md: List[Tuple[bytes, Callable[[bytes], Signed]]] = [ (self.metadata[Timestamp.type], self.trusted_set.update_timestamp), (self.metadata[Snapshot.type], self.trusted_set.update_snapshot), (self.metadata[Targets.type], self.trusted_set.update_targets), ] for metadata, update_func in top_level_md: md = Metadata.from_bytes(metadata) # metadata is not json with self.assertRaises(exceptions.RepositoryError): update_func(b"") # metadata is invalid md.signed.version += 1 with self.assertRaises(exceptions.UnsignedMetadataError): update_func(md.to_bytes()) # metadata is of wrong type with self.assertRaises(exceptions.RepositoryError): update_func(self.metadata[Root.type]) update_func(metadata) def test_update_root_new_root(self) -> None: # test that root can be updated with a new valid version def root_new_version_modifier(root: Root) -> None: root.version += 1 root = self.modify_metadata(Root.type, root_new_version_modifier) self.trusted_set.update_root(root) def test_update_root_new_root_fail_threshold_verification(self) -> None: # Increase threshold in new root, do not add enough keys def root_threshold_bump(root: Root) -> None: root.version += 1 root.roles[Root.type].threshold += 1 root = self.modify_metadata(Root.type, root_threshold_bump) with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_root(root) def test_update_root_new_root_ver_same_as_trusted_root_ver(self) -> None: with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_root(self.metadata[Root.type]) def test_root_expired_final_root(self) -> None: def root_expired_modifier(root: Root) -> None: root.expires = datetime(1970, 1, 1, tzinfo=timezone.utc) # intermediate root can be expired root = self.modify_metadata(Root.type, root_expired_modifier) tmp_trusted_set = TrustedMetadataSet(root, EnvelopeType.METADATA) # update timestamp to trigger final root expiry check with self.assertRaises(exceptions.ExpiredMetadataError): tmp_trusted_set.update_timestamp(self.metadata[Timestamp.type]) def test_update_timestamp_new_timestamp_ver_below_trusted_ver(self) -> None: # new_timestamp.version < trusted_timestamp.version def version_modifier(timestamp: Timestamp) -> None: timestamp.version = 3 timestamp = self.modify_metadata(Timestamp.type, version_modifier) self.trusted_set.update_timestamp(timestamp) with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def test_update_timestamp_with_same_timestamp(self) -> None: # Test that timestamp is NOT updated if: # new_timestamp.version == trusted_timestamp.version self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) initial_timestamp = self.trusted_set.timestamp # Update timestamp with the same version. with self.assertRaises(exceptions.EqualVersionNumberError): self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) # Every object has a unique id() if they are equal, this means timestamp # was not updated. self.assertEqual(id(initial_timestamp), id(self.trusted_set.timestamp)) def test_update_timestamp_snapshot_ver_below_current(self) -> None: def bump_snapshot_version(timestamp: Timestamp) -> None: timestamp.snapshot_meta.version = 2 # The timestamp version must be increased to initiate a update. timestamp.version += 1 # set current known snapshot.json version to 2 timestamp = self.modify_metadata(Timestamp.type, bump_snapshot_version) self.trusted_set.update_timestamp(timestamp) # newtimestamp.meta.version < trusted_timestamp.meta.version with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def test_update_timestamp_expired(self) -> None: # new_timestamp has expired def timestamp_expired_modifier(timestamp: Timestamp) -> None: timestamp.expires = datetime(1970, 1, 1, tzinfo=timezone.utc) # expired intermediate timestamp is loaded but raises timestamp = self.modify_metadata( Timestamp.type, timestamp_expired_modifier ) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_timestamp(timestamp) # snapshot update does start but fails because timestamp is expired with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) def test_update_snapshot_length_or_hash_mismatch(self) -> None: def modify_snapshot_length(timestamp: Timestamp) -> None: timestamp.snapshot_meta.length = 1 # set known snapshot.json length to 1 timestamp = self.modify_metadata(Timestamp.type, modify_snapshot_length) self.trusted_set.update_timestamp(timestamp) with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) def test_update_snapshot_fail_threshold_verification(self) -> None: self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) snapshot = Metadata.from_bytes(self.metadata[Snapshot.type]) snapshot.signatures.clear() with self.assertRaises(exceptions.UnsignedMetadataError): self.trusted_set.update_snapshot(snapshot.to_bytes()) def test_update_snapshot_version_diverge_timestamp_snapshot_version( self, ) -> None: def timestamp_version_modifier(timestamp: Timestamp) -> None: timestamp.snapshot_meta.version = 2 timestamp = self.modify_metadata( Timestamp.type, timestamp_version_modifier ) self.trusted_set.update_timestamp(timestamp) # if intermediate snapshot version is incorrect, load it but also raise with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) # targets update starts but fails if snapshot version does not match with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_targets(self.metadata[Targets.type]) def test_update_snapshot_file_removed_from_meta(self) -> None: self._update_all_besides_targets(self.metadata[Timestamp.type]) def remove_file_from_meta(snapshot: Snapshot) -> None: del snapshot.meta["targets.json"] # Test removing a meta_file in new_snapshot compared to the old snapshot snapshot = self.modify_metadata(Snapshot.type, remove_file_from_meta) with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_snapshot(snapshot) def test_update_snapshot_meta_version_decreases(self) -> None: self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def version_meta_modifier(snapshot: Snapshot) -> None: snapshot.meta["targets.json"].version += 1 snapshot = self.modify_metadata(Snapshot.type, version_meta_modifier) self.trusted_set.update_snapshot(snapshot) with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) def test_update_snapshot_expired_new_snapshot(self) -> None: self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) def snapshot_expired_modifier(snapshot: Snapshot) -> None: snapshot.expires = datetime(1970, 1, 1, tzinfo=timezone.utc) # expired intermediate snapshot is loaded but will raise snapshot = self.modify_metadata( Snapshot.type, snapshot_expired_modifier ) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_snapshot(snapshot) # targets update does start but fails because snapshot is expired with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_targets(self.metadata[Targets.type]) def test_update_snapshot_successful_rollback_checks(self) -> None: def meta_version_bump(timestamp: Timestamp) -> None: timestamp.snapshot_meta.version += 1 # The timestamp version must be increased to initiate a update. timestamp.version += 1 def version_bump(snapshot: Snapshot) -> None: snapshot.version += 1 # load a "local" timestamp, then update to newer one: self.trusted_set.update_timestamp(self.metadata[Timestamp.type]) new_timestamp = self.modify_metadata(Timestamp.type, meta_version_bump) self.trusted_set.update_timestamp(new_timestamp) # load a "local" snapshot with mismatching version (loading happens but # BadVersionNumberError is raised), then update to newer one: with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_snapshot(self.metadata[Snapshot.type]) new_snapshot = self.modify_metadata(Snapshot.type, version_bump) self.trusted_set.update_snapshot(new_snapshot) # update targets to trigger final snapshot meta version check self.trusted_set.update_targets(self.metadata[Targets.type]) def test_update_targets_no_meta_in_snapshot(self) -> None: def no_meta_modifier(snapshot: Snapshot) -> None: snapshot.meta = {} snapshot = self.modify_metadata(Snapshot.type, no_meta_modifier) self._update_all_besides_targets( self.metadata[Timestamp.type], snapshot ) # remove meta information with information about targets from snapshot with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_targets(self.metadata[Targets.type]) def test_update_targets_hash_diverge_from_snapshot_meta_hash(self) -> None: def meta_length_modifier(snapshot: Snapshot) -> None: for metafile_path in snapshot.meta: snapshot.meta[metafile_path] = MetaFile(version=1, length=1) snapshot = self.modify_metadata(Snapshot.type, meta_length_modifier) self._update_all_besides_targets( self.metadata[Timestamp.type], snapshot ) # observed_hash != stored hash in snapshot meta for targets with self.assertRaises(exceptions.RepositoryError): self.trusted_set.update_targets(self.metadata[Targets.type]) def test_update_targets_version_diverge_snapshot_meta_version(self) -> None: def meta_modifier(snapshot: Snapshot) -> None: for metafile_path in snapshot.meta: snapshot.meta[metafile_path] = MetaFile(version=2) snapshot = self.modify_metadata(Snapshot.type, meta_modifier) self._update_all_besides_targets( self.metadata[Timestamp.type], snapshot ) # new_delegate.signed.version != meta.version stored in snapshot with self.assertRaises(exceptions.BadVersionNumberError): self.trusted_set.update_targets(self.metadata[Targets.type]) def test_update_targets_expired_new_target(self) -> None: self._update_all_besides_targets() # new_delegated_target has expired def target_expired_modifier(target: Targets) -> None: target.expires = datetime(1970, 1, 1, tzinfo=timezone.utc) targets = self.modify_metadata(Targets.type, target_expired_modifier) with self.assertRaises(exceptions.ExpiredMetadataError): self.trusted_set.update_targets(targets) # TODO test updating over initial metadata (new keys, newer timestamp, etc) def test_load_from_simple_envelope(self) -> None: """Basic unit test for ``_load_from_simple_envelope`` helper. TODO: Test via trusted metadata set tests like for traditional metadata """ metadata = Metadata.from_bytes(self.metadata[Root.type]) root = metadata.signed envelope = SimpleEnvelope.from_signed(root) # Unwrap unsigned envelope without verification envelope_bytes = envelope.to_bytes() payload_obj, signed_bytes, signatures = _load_from_simple_envelope( Root, envelope_bytes ) self.assertEqual(payload_obj, root) self.assertEqual(signed_bytes, envelope.pae()) self.assertDictEqual(signatures, {}) # Unwrap correctly signed envelope (use default role name) sig = envelope.sign(self.keystore[Root.type]) envelope_bytes = envelope.to_bytes() _, _, signatures = _load_from_simple_envelope( Root, envelope_bytes, root ) self.assertDictEqual(signatures, {sig.keyid: sig}) # Load correctly signed envelope (with explicit role name) _, _, signatures = _load_from_simple_envelope( Root, envelope.to_bytes(), root, Root.type ) self.assertDictEqual(signatures, {sig.keyid: sig}) # Fail load envelope with unexpected 'payload_type' envelope_bad_type = SimpleEnvelope.from_signed(root) envelope_bad_type.payload_type = "foo" envelope_bad_type_bytes = envelope_bad_type.to_bytes() with self.assertRaises(exceptions.RepositoryError): _load_from_simple_envelope(Root, envelope_bad_type_bytes) # Fail load envelope with unexpected payload type envelope_bad_signed = SimpleEnvelope.from_signed(root) envelope_bad_signed_bytes = envelope_bad_signed.to_bytes() with self.assertRaises(exceptions.RepositoryError): _load_from_simple_envelope(Targets, envelope_bad_signed_bytes) if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_consistent_snapshot.py000066400000000000000000000232651470074210500242510ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test ngclient Updater toggling consistent snapshot""" import os import sys import tempfile import unittest from typing import Any, Dict, Iterable, List, Optional from tests import utils from tests.repository_simulator import RepositorySimulator from tuf.api.metadata import ( SPECIFICATION_VERSION, TOP_LEVEL_ROLE_NAMES, DelegatedRole, TargetFile, Targets, ) from tuf.ngclient import Updater class TestConsistentSnapshot(unittest.TestCase): """Test different combinations of 'consistent_snapshot' and 'prefix_targets_with_hash' and verify that the correct URLs are formed for each combination""" # set dump_dir to trigger repository state dumps dump_dir: Optional[str] = None def setUp(self) -> None: self.subtest_count = 0 self.temp_dir = tempfile.TemporaryDirectory() self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") self.targets_dir = os.path.join(self.temp_dir.name, "targets") os.mkdir(self.metadata_dir) os.mkdir(self.targets_dir) self.sim: RepositorySimulator def tearDown(self) -> None: self.temp_dir.cleanup() def setup_subtest( self, consistent_snapshot: bool, prefix_targets: bool = True ) -> None: self.sim = self._init_repo(consistent_snapshot, prefix_targets) self.subtest_count += 1 if self.dump_dir is not None: # create subtest dumpdir name = f"{self.id().split('.')[-1]}-{self.subtest_count}" self.sim.dump_dir = os.path.join(self.dump_dir, name) os.mkdir(self.sim.dump_dir) def teardown_subtest(self) -> None: if self.dump_dir is not None: self.sim.write() utils.cleanup_dir(self.metadata_dir) def _init_repo( self, consistent_snapshot: bool, prefix_targets: bool = True ) -> RepositorySimulator: """Create a new RepositorySimulator instance""" sim = RepositorySimulator() sim.root.consistent_snapshot = consistent_snapshot sim.root.version += 1 sim.publish_root() sim.prefix_targets_with_hash = prefix_targets # Init trusted root with the latest consistent_snapshot with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: f.write(sim.signed_roots[-1]) return sim def _init_updater(self) -> Updater: """Create a new Updater instance""" return Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", self.sim, ) def _assert_metadata_files_exist(self, roles: Iterable[str]) -> None: """Assert that local metadata files exist for 'roles'""" local_metadata_files = os.listdir(self.metadata_dir) for role in roles: self.assertIn(f"{role}.json", local_metadata_files) def _assert_targets_files_exist(self, filenames: Iterable[str]) -> None: """Assert that local files with 'filenames' exist""" local_target_files = os.listdir(self.targets_dir) for filename in filenames: self.assertIn(filename, local_target_files) top_level_roles_data: utils.DataSet = { "consistent_snaphot disabled": { "consistent_snapshot": False, "calls": [ ("root", 3), ("timestamp", None), ("snapshot", None), ("targets", None), ], }, "consistent_snaphot enabled": { "consistent_snapshot": True, "calls": [ ("root", 3), ("timestamp", None), ("snapshot", 1), ("targets", 1), ], }, } @utils.run_sub_tests_with_dataset(top_level_roles_data) def test_top_level_roles_update( self, test_case_data: Dict[str, Any] ) -> None: # Test if the client fetches and stores metadata files with the # correct version prefix, depending on 'consistent_snapshot' config try: consistent_snapshot: bool = test_case_data["consistent_snapshot"] exp_calls: List[Any] = test_case_data["calls"] self.setup_subtest(consistent_snapshot) updater = self._init_updater() # cleanup fetch tracker metadata self.sim.fetch_tracker.metadata.clear() updater.refresh() # metadata files are fetched with the expected version (or None) self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) # metadata files are always persisted without a version prefix self._assert_metadata_files_exist(TOP_LEVEL_ROLE_NAMES) finally: self.teardown_subtest() delegated_roles_data: utils.DataSet = { "consistent_snaphot disabled": { "consistent_snapshot": False, "expected_version": None, }, "consistent_snaphot enabled": { "consistent_snapshot": True, "expected_version": 1, }, } @utils.run_sub_tests_with_dataset(delegated_roles_data) def test_delegated_roles_update( self, test_case_data: Dict[str, Any] ) -> None: # Test if the client fetches and stores delegated metadata files with # the correct version prefix, depending on 'consistent_snapshot' config try: consistent_snapshot: bool = test_case_data["consistent_snapshot"] exp_version: Optional[int] = test_case_data["expected_version"] rolenames = ["role1", "..", "."] exp_calls = [(role, exp_version) for role in rolenames] self.setup_subtest(consistent_snapshot) # Add new delegated targets spec_version = ".".join(SPECIFICATION_VERSION) for role in rolenames: delegated_role = DelegatedRole(role, [], 1, False, ["*"], None) targets = Targets( 1, spec_version, self.sim.safe_expiry, {}, None ) self.sim.add_delegation("targets", delegated_role, targets) self.sim.update_snapshot() updater = self._init_updater() updater.refresh() # cleanup fetch tracker metadata self.sim.fetch_tracker.metadata.clear() # trigger updater to fetch the delegated metadata updater.get_targetinfo("anything") # metadata files are fetched with the expected version (or None) self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) # metadata files are always persisted without a version prefix self._assert_metadata_files_exist(rolenames) finally: self.teardown_subtest() targets_download_data: utils.DataSet = { "consistent_snaphot disabled": { "consistent_snapshot": False, "prefix_targets": True, "hash_algo": None, "targetpaths": ["file", "file.txt", "..file.ext", "f.le"], }, "consistent_snaphot enabled without prefixed targets": { "consistent_snapshot": True, "prefix_targets": False, "hash_algo": None, "targetpaths": ["file", "file.txt", "..file.ext", "f.le"], }, "consistent_snaphot enabled with prefixed targets": { "consistent_snapshot": True, "prefix_targets": True, "hash_algo": "sha256", "targetpaths": ["file", "file.txt", "..file.ext", "f.le"], }, } @utils.run_sub_tests_with_dataset(targets_download_data) def test_download_targets(self, test_case_data: Dict[str, Any]) -> None: # Test if the client fetches and stores target files with # the correct hash prefix, depending on 'consistent_snapshot' # and 'prefix_targets_with_hash' config try: consistent_snapshot: bool = test_case_data["consistent_snapshot"] prefix_targets_with_hash: bool = test_case_data["prefix_targets"] hash_algo: Optional[str] = test_case_data["hash_algo"] targetpaths: List[str] = test_case_data["targetpaths"] self.setup_subtest(consistent_snapshot, prefix_targets_with_hash) # Add targets to repository for targetpath in targetpaths: self.sim.targets.version += 1 self.sim.add_target("targets", b"content", targetpath) self.sim.update_snapshot() updater = self._init_updater() updater.config.prefix_targets_with_hash = prefix_targets_with_hash updater.refresh() for path in targetpaths: info = updater.get_targetinfo(path) assert isinstance(info, TargetFile) updater.download_target(info) # target files are always persisted without hash prefix self._assert_targets_files_exist([info.path]) # files are fetched with the expected hash prefix (or None) exp_calls = [ (path, None if not hash_algo else info.hashes[hash_algo]) ] self.assertListEqual(self.sim.fetch_tracker.targets, exp_calls) self.sim.fetch_tracker.targets.clear() finally: self.teardown_subtest() if __name__ == "__main__": if "--dump" in sys.argv: TestConsistentSnapshot.dump_dir = tempfile.mkdtemp() print( f"Repository Simulator dumps in {TestConsistentSnapshot.dump_dir}" ) sys.argv.remove("--dump") utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_delegation_graphs.py000066400000000000000000000541201470074210500236120ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test updating delegated targets roles and searching for target files with various delegation graphs""" import os import sys import tempfile import unittest from dataclasses import astuple, dataclass, field from typing import Iterable, List, Optional from tests import utils from tests.repository_simulator import RepositorySimulator from tuf.api.exceptions import UnsignedMetadataError from tuf.api.metadata import ( SPECIFICATION_VERSION, TOP_LEVEL_ROLE_NAMES, DelegatedRole, Targets, ) from tuf.ngclient import Updater @dataclass class TestDelegation: delegator: str rolename: str keyids: List[str] = field(default_factory=list) threshold: int = 1 terminating: bool = False paths: Optional[List[str]] = field(default_factory=lambda: ["*"]) path_hash_prefixes: Optional[List[str]] = None @dataclass class TestTarget: rolename: str content: bytes targetpath: str @dataclass class DelegationsTestCase: """A delegations graph as lists of delegations and target files and the expected order of traversal as a list of role names.""" delegations: List[TestDelegation] target_files: List[TestTarget] = field(default_factory=list) visited_order: List[str] = field(default_factory=list) @dataclass class TargetTestCase: targetpath: str found: bool visited_order: List[str] = field(default_factory=list) class TestDelegations(unittest.TestCase): """Base class for delegation tests""" # set dump_dir to trigger repository state dumps dump_dir: Optional[str] = None def setUp(self) -> None: self.subtest_count = 0 self.temp_dir = tempfile.TemporaryDirectory() self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") self.targets_dir = os.path.join(self.temp_dir.name, "targets") os.mkdir(self.metadata_dir) os.mkdir(self.targets_dir) self.sim: RepositorySimulator def tearDown(self) -> None: self.temp_dir.cleanup() def setup_subtest(self) -> None: self.subtest_count += 1 if self.dump_dir is not None: # create subtest dumpdir name = f"{self.id().split('.')[-1]}-{self.subtest_count}" self.sim.dump_dir = os.path.join(self.dump_dir, name) os.mkdir(self.sim.dump_dir) # dump the repo simulator metadata self.sim.write() def teardown_subtest(self) -> None: utils.cleanup_dir(self.metadata_dir) def _init_repo(self, test_case: DelegationsTestCase) -> None: """Create a new RepositorySimulator instance and populate it with delegations and target files""" self.sim = RepositorySimulator() spec_version = ".".join(SPECIFICATION_VERSION) for d in test_case.delegations: if d.rolename in self.sim.md_delegates: targets = self.sim.md_delegates[d.rolename].signed else: targets = Targets( 1, spec_version, self.sim.safe_expiry, {}, None ) # unpack 'd' but skip "delegator" role = DelegatedRole(*astuple(d)[1:]) self.sim.add_delegation(d.delegator, role, targets) for target in test_case.target_files: self.sim.add_target(*astuple(target)) if test_case.target_files: self.sim.targets.version += 1 self.sim.update_snapshot() def _init_updater(self) -> Updater: """Create a new Updater instance""" # Init trusted root for Updater with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: f.write(self.sim.signed_roots[0]) return Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", self.sim, ) def _assert_files_exist(self, roles: Iterable[str]) -> None: """Assert that local metadata files exist for 'roles'""" expected_files = sorted([f"{role}.json" for role in roles]) local_metadata_files = sorted(os.listdir(self.metadata_dir)) self.assertListEqual(local_metadata_files, expected_files) class TestDelegationsGraphs(TestDelegations): """Test creating delegations graphs with different complexity and successfully updating the delegated roles metadata""" graphs: utils.DataSet = { "basic delegation": DelegationsTestCase( delegations=[TestDelegation("targets", "A")], visited_order=["A"], ), "single level delegations": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), ], visited_order=["A", "B"], ), "two-level delegations": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("B", "C"), ], visited_order=["A", "B", "C"], ), "two-level test DFS order of traversal": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("A", "C"), TestDelegation("A", "D"), ], visited_order=["A", "C", "D", "B"], ), "three-level delegation test DFS order of traversal": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("A", "C"), TestDelegation("C", "D"), ], visited_order=["A", "C", "D", "B"], ), "two-level terminating ignores all but role's descendants": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("A", "C", terminating=True), TestDelegation("A", "D"), ], visited_order=["A", "C"], ), "three-level terminating ignores all but role's descendants": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("A", "C", terminating=True), TestDelegation("C", "D"), ], visited_order=["A", "C", "D"], ), "two-level ignores all branches not matching 'paths'": DelegationsTestCase( delegations=[ TestDelegation("targets", "A", paths=["*.py"]), TestDelegation("targets", "B"), TestDelegation("A", "C"), ], visited_order=["B"], ), "three-level ignores all branches not matching 'paths'": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("A", "C", paths=["*.py"]), TestDelegation("C", "D"), ], visited_order=["A", "B"], ), "cyclic graph": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("B", "C"), TestDelegation("C", "D"), TestDelegation("D", "B"), ], visited_order=["A", "B", "C", "D"], ), "two roles delegating to a third": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("B", "C"), TestDelegation("A", "C"), ], # Under all same conditions, 'C' is reached through 'A' first" visited_order=["A", "C", "B"], ), "two roles delegating to a third different 'paths'": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("B", "C"), TestDelegation("A", "C", paths=["*.py"]), ], # 'C' is reached through 'B' since 'A' does not delegate a matching pattern" visited_order=["A", "B", "C"], ), "max number of delegations": DelegationsTestCase( delegations=[ TestDelegation("targets", "A"), TestDelegation("targets", "B"), TestDelegation("targets", "C"), TestDelegation("C", "D"), TestDelegation("C", "E"), ], # "E" is skipped, max_delegations is 4 visited_order=["A", "B", "C", "D"], ), } @utils.run_sub_tests_with_dataset(graphs) def test_graph_traversal(self, test_data: DelegationsTestCase) -> None: """Test that delegated roles are traversed in the order of appearance in the delegator's metadata, using pre-order depth-first search""" try: exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order] exp_calls = [(role, 1) for role in test_data.visited_order] self._init_repo(test_data) self.setup_subtest() updater = self._init_updater() # restrict the max number of delegations to simplify the test updater.config.max_delegations = 4 # Call explicitly refresh to simplify the expected_calls list updater.refresh() self.sim.fetch_tracker.metadata.clear() # Check that metadata dir contains only top-level roles self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) # Looking for a non-existing targetpath forces updater # to visit all possible delegated roles targetfile = updater.get_targetinfo("missingpath") self.assertIsNone(targetfile) # Check that the delegated roles were visited in the expected # order and the corresponding metadata files were persisted self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) self._assert_files_exist(exp_files) finally: self.teardown_subtest() invalid_metadata: utils.DataSet = { "unsigned delegated role": DelegationsTestCase( delegations=[ TestDelegation("targets", "invalid"), TestDelegation("targets", "B"), TestDelegation("invalid", "C"), ], # The traversal stops after visiting an invalid role visited_order=["invalid"], ) } @utils.run_sub_tests_with_dataset(invalid_metadata) def test_invalid_metadata(self, test_data: DelegationsTestCase) -> None: try: self._init_repo(test_data) # The invalid role is the last visited invalid_role = test_data.visited_order[-1] self.sim.signers[invalid_role].clear() self.setup_subtest() # The invalid role metadata must not be persisted exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order[:-1]] exp_calls = [(role, 1) for role in test_data.visited_order] updater = self._init_updater() # Call explicitly refresh to simplify the expected_calls list updater.refresh() self.sim.fetch_tracker.metadata.clear() with self.assertRaises(UnsignedMetadataError): updater.get_targetinfo("missingpath") # Check that there were no visited roles after the invalid one # and only the valid metadata files were persisted self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) self._assert_files_exist(exp_files) finally: self.teardown_subtest() def test_safely_encoded_rolenames(self) -> None: """Test that delegated roles names are safely encoded in the filenames and URLs. """ roles_to_filenames = { "../a": "..%2Fa.json", ".": "..json", "/": "%2F.json", "ö": "%C3%B6.json", } delegations = [] for rolename in roles_to_filenames: delegations.append(TestDelegation("targets", rolename)) delegated_rolenames = DelegationsTestCase(delegations) self._init_repo(delegated_rolenames) updater = self._init_updater() updater.refresh() # trigger updater to fetch the delegated metadata self.sim.fetch_tracker.metadata.clear() updater.get_targetinfo("anything") # assert that local delegated metadata filenames are expected local_metadata = os.listdir(self.metadata_dir) for fname in roles_to_filenames.values(): self.assertTrue(fname in local_metadata) # assert that requested URLs are quoted without extension exp_calls = [(quoted[:-5], 1) for quoted in roles_to_filenames.values()] self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) hash_bins_graph: utils.DataSet = { "delegations": DelegationsTestCase( delegations=[ TestDelegation( "targets", "role1", paths=None, path_hash_prefixes=["8", "9", "a", "b"], ), TestDelegation( "targets", "role2", paths=None, path_hash_prefixes=["0", "1", "2", "3"], ), TestDelegation( "targets", "role3", paths=None, path_hash_prefixes=["c", "d", "e", "f"], ), ], visited_order=["role1", "role2", "role3"], ), } @utils.run_sub_tests_with_dataset(hash_bins_graph) def test_hash_bins_graph_traversal( self, test_data: DelegationsTestCase ) -> None: """Test that delegated roles are traversed in the order of appearance in the delegator's metadata, using pre-order depth-first search and that they correctly reffer to the corresponding hash bin prefixes""" try: exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order] exp_calls = [(role, 1) for role in test_data.visited_order] self._init_repo(test_data) self.setup_subtest() updater = self._init_updater() # Call explicitly refresh to simplify the expected_calls list updater.refresh() self.sim.fetch_tracker.metadata.clear() # Check that metadata dir contains only top-level roles self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) # Looking for a non-existing targetpath forces updater # to visit a correspondning delegated role targetfile = updater.get_targetinfo("missingpath") self.assertIsNone(targetfile) targetfile = updater.get_targetinfo("othermissingpath") self.assertIsNone(targetfile) targetfile = updater.get_targetinfo("thirdmissingpath") self.assertIsNone(targetfile) # Check that the delegated roles were visited in the expected # order and the corresponding metadata files were persisted self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) self._assert_files_exist(exp_files) finally: self.teardown_subtest() @dataclass class SuccinctRolesTestCase: bit_length: int target_path: str expected_target_bin: str # By setting the bit_length the total number of bins is 2^bit_length. # In each test case target_path is a path to a random target we want to # fetch and expected_target_bin is the bin we are expecting to visit. succinct_bins_graph: utils.DataSet = { "bin amount = 2, taget bin index 0": SuccinctRolesTestCase( bit_length=1, target_path="boo", expected_target_bin="bin-0", ), "bin amount = 2, taget bin index 1": SuccinctRolesTestCase( bit_length=1, target_path="too", expected_target_bin="bin-1", ), "bin amount = 4, taget bin index 0": SuccinctRolesTestCase( bit_length=2, target_path="foo", expected_target_bin="bin-0", ), "bin amount = 4, taget bin index 1": SuccinctRolesTestCase( bit_length=2, target_path="doo", expected_target_bin="bin-1", ), "bin amount = 4, taget bin index 2": SuccinctRolesTestCase( bit_length=2, target_path="too", expected_target_bin="bin-2", ), "bin amount = 4, taget bin index 3": SuccinctRolesTestCase( bit_length=2, target_path="bar", expected_target_bin="bin-3", ), "bin amount = 256, taget bin index fc": SuccinctRolesTestCase( bit_length=8, target_path="bar", expected_target_bin="bin-fc", ), } @utils.run_sub_tests_with_dataset(succinct_bins_graph) def test_succinct_roles_graph_traversal( self, test_data: SuccinctRolesTestCase ) -> None: # Test traversing the delegation tree when succinct roles is used. For a # successful traversal all top level metadata files plus the expected # bin should exist locally and only one bin must be downloaded. try: exp_files = [*TOP_LEVEL_ROLE_NAMES, test_data.expected_target_bin] exp_calls = [(test_data.expected_target_bin, 1)] self.sim = RepositorySimulator() self.sim.add_succinct_roles("targets", test_data.bit_length, "bin") self.sim.update_snapshot() self.setup_subtest() updater = self._init_updater() # Call explicitly refresh to simplify the expected_calls list. updater.refresh() self.sim.fetch_tracker.metadata.clear() # Check that metadata dir contains only top-level roles self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) # Looking for a non-existing targetpath forces updater # to visit a corresponding delegated role. targetfile = updater.get_targetinfo(test_data.target_path) self.assertIsNone(targetfile) # Check that the delegated roles were visited in the expected # order and the corresponding metadata files were persisted. self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) self._assert_files_exist(exp_files) finally: self.teardown_subtest() class TestTargetFileSearch(TestDelegations): r""" Create a single repository with the following delegations: targets *.doc, *md / \ release/*/* A B release/x/* / \ release/y/*.zip C D Test that Updater successfully finds the target files metadata, traversing the delegations as expected. """ delegations_tree = DelegationsTestCase( delegations=[ TestDelegation("targets", "A", paths=["*.doc", "*.md"]), TestDelegation("targets", "B", paths=["releases/*/*"]), TestDelegation("B", "C", paths=["releases/x/*"]), TestDelegation("B", "D", paths=["releases/y/*.zip"]), ], target_files=[ TestTarget("targets", b"targetfile content", "targetfile"), TestTarget("A", b"README by A", "README.md"), TestTarget("C", b"x release by C", "releases/x/x_v1"), TestTarget("D", b"y release by D", "releases/y/y_v1.zip"), TestTarget("D", b"z release by D", "releases/z/z_v1.zip"), ], ) def setUp(self) -> None: super().setUp() self._init_repo(self.delegations_tree) # fmt: off targets: utils.DataSet = { "no delegations": TargetTestCase("targetfile", True, []), "targetpath matches wildcard": TargetTestCase("README.md", True, ["A"]), "targetpath with separators x": TargetTestCase("releases/x/x_v1", True, ["B", "C"]), "targetpath with separators y": TargetTestCase("releases/y/y_v1.zip", True, ["B", "D"]), "targetpath is not delegated by all roles in the chain": TargetTestCase("releases/z/z_v1.zip", False, ["B"]), } # fmt: on @utils.run_sub_tests_with_dataset(targets) def test_targetfile_search(self, test_data: TargetTestCase) -> None: try: self.setup_subtest() exp_files = [*TOP_LEVEL_ROLE_NAMES, *test_data.visited_order] exp_calls = [(role, 1) for role in test_data.visited_order] exp_target = self.sim.target_files[test_data.targetpath].target_file updater = self._init_updater() # Call explicitly refresh to simplify the expected_calls list updater.refresh() self.sim.fetch_tracker.metadata.clear() target = updater.get_targetinfo(test_data.targetpath) if target is not None: # Confirm that the expected TargetFile is found self.assertTrue(test_data.found) self.assertDictEqual(target.to_dict(), exp_target.to_dict()) else: self.assertFalse(test_data.found) # Check that the delegated roles were visited in the expected # order and the corresponding metadata files were persisted self.assertListEqual(self.sim.fetch_tracker.metadata, exp_calls) self._assert_files_exist(exp_files) finally: self.teardown_subtest() if __name__ == "__main__": if "--dump" in sys.argv: TestDelegations.dump_dir = tempfile.mkdtemp() print(f"Repository Simulator dumps in {TestDelegations.dump_dir}") sys.argv.remove("--dump") utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_fetch_target.py000066400000000000000000000204171470074210500225740ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test 'Fetch target' from 'Detailed client workflow' as well as target files storing/loading from cache. """ import os import sys import tempfile import unittest from dataclasses import dataclass from typing import Optional from tests import utils from tests.repository_simulator import RepositorySimulator from tuf.api.exceptions import RepositoryError from tuf.api.metadata import DelegatedRole, Delegations from tuf.ngclient import Updater @dataclass class TestTarget: path: str content: bytes encoded_path: str class TestFetchTarget(unittest.TestCase): """Test ngclient downloading and caching target files.""" # set dump_dir to trigger repository state dumps dump_dir: Optional[str] = None def setUp(self) -> None: self.temp_dir = tempfile.TemporaryDirectory() self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") self.targets_dir = os.path.join(self.temp_dir.name, "targets") os.mkdir(self.metadata_dir) os.mkdir(self.targets_dir) # Setup the repository, bootstrap client root.json self.sim = RepositorySimulator() with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: f.write(self.sim.signed_roots[0]) if self.dump_dir is not None: # create test specific dump directory name = self.id().split(".")[-1] self.sim.dump_dir = os.path.join(self.dump_dir, name) os.mkdir(self.sim.dump_dir) def tearDown(self) -> None: self.temp_dir.cleanup() def _init_updater(self) -> Updater: """Creates a new updater instance.""" if self.sim.dump_dir is not None: self.sim.write() return Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", self.sim, ) targets: utils.DataSet = { "standard case": TestTarget( path="targetpath", content=b"target content", encoded_path="targetpath", ), "non-asci case": TestTarget( path="åäö", content=b"more content", encoded_path="%C3%A5%C3%A4%C3%B6", ), "subdirectory case": TestTarget( path="a/b/c/targetpath", content=b"dir target content", encoded_path="a%2Fb%2Fc%2Ftargetpath", ), } @utils.run_sub_tests_with_dataset(targets) def test_fetch_target(self, target: TestTarget) -> None: path = os.path.join(self.targets_dir, target.encoded_path) updater = self._init_updater() # target does not exist yet self.assertIsNone(updater.get_targetinfo(target.path)) # Add targets to repository self.sim.targets.version += 1 self.sim.add_target("targets", target.content, target.path) self.sim.update_snapshot() updater = self._init_updater() # target now exists, is not in cache yet info = updater.get_targetinfo(target.path) assert info is not None # Test without and with explicit local filepath self.assertIsNone(updater.find_cached_target(info)) self.assertIsNone(updater.find_cached_target(info, path)) # download target, assert it is in cache and content is correct self.assertEqual(path, updater.download_target(info)) self.assertEqual(path, updater.find_cached_target(info)) self.assertEqual(path, updater.find_cached_target(info, path)) with open(path, "rb") as f: self.assertEqual(f.read(), target.content) # download using explicit filepath as well os.remove(path) self.assertEqual(path, updater.download_target(info, path)) self.assertEqual(path, updater.find_cached_target(info)) self.assertEqual(path, updater.find_cached_target(info, path)) def test_download_targets_with_succinct_roles(self) -> None: self.sim.add_succinct_roles("targets", 8, "bin") self.sim.update_snapshot() assert self.sim.targets.delegations is not None assert self.sim.targets.delegations.succinct_roles is not None succinct_roles = self.sim.targets.delegations.succinct_roles # Add lots of targets with unique data to imitate a real repository. for i in range(20): target_name = f"target-{i}" target_bin = succinct_roles.get_role_for_target(target_name) self.sim.add_target( target_bin, bytes(target_name, "utf-8"), target_name ) # download each target updater = self._init_updater() for i in range(20): target_name = f"target-{i}" # Verify that the target info was successfully found. target_info = updater.get_targetinfo(target_name) assert target_info is not None target_full_path = updater.download_target(target_info) # Verify that the target content is the same as the target name. with open(target_full_path, encoding="utf-8") as target: self.assertEqual(target.read(), target_name) def test_invalid_target_download(self) -> None: target = TestTarget("targetpath", b"content", "targetpath") # Add target to repository self.sim.targets.version += 1 self.sim.add_target("targets", target.content, target.path) self.sim.update_snapshot() updater = self._init_updater() info = updater.get_targetinfo(target.path) assert info is not None # Corrupt the file content to not match the hash self.sim.target_files[target.path].data = b"conten@" with self.assertRaises(RepositoryError): updater.download_target(info) # Corrupt the file content to not match the length self.sim.target_files[target.path].data = b"cont" with self.assertRaises(RepositoryError): updater.download_target(info) # Verify the file is not persisted in cache self.assertIsNone(updater.find_cached_target(info)) def test_invalid_target_cache(self) -> None: target = TestTarget("targetpath", b"content", "targetpath") # Add target to repository self.sim.targets.version += 1 self.sim.add_target("targets", target.content, target.path) self.sim.update_snapshot() # Download the target updater = self._init_updater() info = updater.get_targetinfo(target.path) assert info is not None path = updater.download_target(info) self.assertEqual(path, updater.find_cached_target(info)) # Add newer content to the same targetpath target.content = b"contentv2" self.sim.targets.version += 1 self.sim.add_target("targets", target.content, target.path) self.sim.update_snapshot() # Newer content is detected, old cached version is not used updater = self._init_updater() info = updater.get_targetinfo(target.path) assert info is not None self.assertIsNone(updater.find_cached_target(info)) # Download target, assert it is in cache and content is the newer path = updater.download_target(info) self.assertEqual(path, updater.find_cached_target(info)) with open(path, "rb") as f: self.assertEqual(f.read(), target.content) def test_meta_missing_delegated_role(self) -> None: """Test a delegation where the role is not part of the snapshot""" # Add new delegation, update snapshot. Do not add the actual role role = DelegatedRole("role1", [], 1, True, ["*"]) self.sim.targets.delegations = Delegations({}, roles={role.name: role}) self.sim.update_snapshot() # assert that RepositoryError is raised when role1 is needed updater = self._init_updater() with self.assertRaises(RepositoryError): updater.get_targetinfo("") if __name__ == "__main__": if "--dump" in sys.argv: TestFetchTarget.dump_dir = tempfile.mkdtemp() print(f"Repository Simulator dumps in {TestFetchTarget.dump_dir}") sys.argv.remove("--dump") utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_key_rotations.py000066400000000000000000000272021470074210500230260ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test ngclient Updater key rotation handling""" import os import sys import tempfile import unittest from dataclasses import dataclass from typing import ClassVar, Dict, List, Optional, Type from securesystemslib.signer import CryptoSigner, Signer from tests import utils from tests.repository_simulator import RepositorySimulator from tests.utils import run_sub_tests_with_dataset from tuf.api.exceptions import UnsignedMetadataError from tuf.api.metadata import Key, Root from tuf.ngclient import Updater @dataclass class MdVersion: keys: List[int] threshold: int sigs: List[int] res: Optional[Type[Exception]] = None class TestUpdaterKeyRotations(unittest.TestCase): """Test ngclient root rotation handling""" # set dump_dir to trigger repository state dumps dump_dir: Optional[str] = None temp_dir: ClassVar[tempfile.TemporaryDirectory] keys: ClassVar[List[Key]] signers: ClassVar[List[Signer]] @classmethod def setUpClass(cls) -> None: cls.temp_dir = tempfile.TemporaryDirectory() # Pre-create a bunch of keys and signers cls.signers = [] for _ in range(10): signer = CryptoSigner.generate_ed25519() cls.signers.append(signer) @classmethod def tearDownClass(cls) -> None: cls.temp_dir.cleanup() def setup_subtest(self) -> None: # Setup repository for subtest: make sure no roots have been published self.sim = RepositorySimulator() self.sim.signed_roots.clear() self.sim.root.version = 0 if self.dump_dir is not None: # create subtest dumpdir name = f"{self.id().split('.')[-1]}-{self.case_name}" self.sim.dump_dir = os.path.join(self.dump_dir, name) os.mkdir(self.sim.dump_dir) def _run_refresh(self) -> None: """Create new updater, run refresh""" if self.sim.dump_dir is not None: self.sim.write() # bootstrap with initial root self.metadata_dir = tempfile.mkdtemp(dir=self.temp_dir.name) with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: f.write(self.sim.signed_roots[0]) updater = Updater( self.metadata_dir, "https://example.com/metadata/", fetcher=self.sim, ) updater.refresh() # fmt: off root_rotation_cases = { "1-of-1 key rotation": [ MdVersion(keys=[1], threshold=1, sigs=[1]), MdVersion(keys=[2], threshold=1, sigs=[2, 1]), MdVersion(keys=[2], threshold=1, sigs=[2]), ], "1-of-1 key rotation, unused signatures": [ MdVersion(keys=[1], threshold=1, sigs=[3, 1, 4]), MdVersion(keys=[2], threshold=1, sigs=[3, 2, 1, 4]), MdVersion(keys=[2], threshold=1, sigs=[3, 2, 4]), ], "1-of-1 key rotation fail: not signed with old key": [ MdVersion(keys=[1], threshold=1, sigs=[1]), MdVersion(keys=[2], threshold=1, sigs=[2, 3, 4], res=UnsignedMetadataError), ], "1-of-1 key rotation fail: not signed with new key": [ MdVersion(keys=[1], threshold=1, sigs=[1]), MdVersion(keys=[2], threshold=1, sigs=[1, 3, 4], res=UnsignedMetadataError), ], "3-of-5, sign with different keycombos": [ MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 4, 1]), MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]), MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]), ], "3-of-5, one key rotated": [ MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 1]), ], "3-of-5, one key rotate fails: not signed with 3 new keys": [ MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4], res=UnsignedMetadataError), ], "3-of-5, one key rotate fails: not signed with 3 old keys": [ MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 5], res=UnsignedMetadataError), ], "3-of-5, one key rotated, with intermediate step": [ MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4, 5]), MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4, 5]), ], "3-of-5, all keys rotated, with intermediate step": [ MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), MdVersion(keys=[5, 6, 7, 8, 9], threshold=3, sigs=[0, 2, 4, 5, 6, 7]), MdVersion(keys=[5, 6, 7, 8, 9], threshold=3, sigs=[5, 6, 7]), ], "1-of-3 threshold increase to 2-of-3": [ MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]), MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]), ], "1-of-3 threshold bump to 2-of-3 fails: new threshold not reached": [ MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]), MdVersion(keys=[1, 2, 3], threshold=2, sigs=[2], res=UnsignedMetadataError), ], "2-of-3 threshold decrease to 1-of-3": [ MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]), MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1, 2]), MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1]), ], "2-of-3 threshold decr. to 1-of-3 fails: old threshold not reached": [ MdVersion(keys=[1, 2, 3], threshold=2, sigs=[1, 2]), MdVersion(keys=[1, 2, 3], threshold=1, sigs=[1], res=UnsignedMetadataError), ], "1-of-2 threshold increase to 2-of-2": [ MdVersion(keys=[1], threshold=1, sigs=[1]), MdVersion(keys=[1, 2], threshold=2, sigs=[1, 2]), ], } # fmt: on @run_sub_tests_with_dataset(root_rotation_cases) def test_root_rotation(self, root_versions: List[MdVersion]) -> None: """Test Updater.refresh() with various sequences of root updates Each MdVersion in the list describes root keys and signatures of a remote root metadata version. As an example: MdVersion([1,2,3], 2, [1,2]) defines a root that contains keys 1, 2 and 3 with threshold 2. The metadata is signed with keys 1 and 2. Assert that refresh() result is expected and that local root on disk is the expected one after all roots have been loaded from remote using the standard client update workflow. """ self.setup_subtest() # Publish all remote root versions defined in root_versions for rootver in root_versions: # clear root keys, signers self.sim.root.roles[Root.type].keyids.clear() self.sim.signers[Root.type].clear() self.sim.root.roles[Root.type].threshold = rootver.threshold for i in rootver.keys: self.sim.root.add_key(self.signers[i].public_key, Root.type) for i in rootver.sigs: self.sim.add_signer(Root.type, self.signers[i]) self.sim.root.version += 1 self.sim.publish_root() # run client workflow, assert success/failure expected_error = root_versions[-1].res if expected_error is None: self._run_refresh() expected_local_root = self.sim.signed_roots[-1] else: # failure expected: local root should be the root before last with self.assertRaises(expected_error): self._run_refresh() expected_local_root = self.sim.signed_roots[-2] # assert local root on disk is expected with open(os.path.join(self.metadata_dir, "root.json"), "rb") as f: self.assertEqual(f.read(), expected_local_root) # fmt: off non_root_rotation_cases: Dict[str, MdVersion] = { "1-of-1 key rotation": MdVersion(keys=[2], threshold=1, sigs=[2]), "1-of-1 key rotation, unused signatures": MdVersion(keys=[1], threshold=1, sigs=[3, 1, 4]), "1-of-1 key rotation fail: not signed with new key": MdVersion(keys=[2], threshold=1, sigs=[1, 3, 4], res=UnsignedMetadataError), "3-of-5, one key signature wrong: not signed with 3 expected keys": MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 2, 4], res=UnsignedMetadataError), "2-of-5, one key signature mising: threshold not reached": MdVersion(keys=[0, 1, 3, 4, 5], threshold=3, sigs=[0, 4], res=UnsignedMetadataError), "3-of-5, sign first combo": MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 2, 4]), "3-of-5, sign second combo": MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 4, 1]), "3-of-5, sign third combo": MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[0, 1, 3]), "3-of-5, sign fourth combo": MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[1, 2, 3]), "3-of-5, sign fifth combo": MdVersion(keys=[0, 1, 2, 3, 4], threshold=3, sigs=[2, 3, 4]), } # fmt: on @run_sub_tests_with_dataset(non_root_rotation_cases) def test_non_root_rotations(self, md_version: MdVersion) -> None: """Test Updater.refresh() with various sequences of metadata updates Each MdVersion in the list describes metadata keys and signatures of a remote metadata version. As an example: MdVersion([1,2,3], 2, [1,2]) defines a metadata that contains keys 1, 2 and 3 with threshold 2. The metadata is signed with keys 1 and 2. Assert that refresh() result is expected and that local metadata on disk is the expected one after all roots have been loaded from remote using the standard client update workflow. """ self.setup_subtest() roles = ["timestamp", "snapshot", "targets"] for role in roles: # clear role keys, signers self.sim.root.roles[role].keyids.clear() self.sim.signers[role].clear() self.sim.root.roles[role].threshold = md_version.threshold for i in md_version.keys: self.sim.root.add_key(self.signers[i].public_key, role) for i in md_version.sigs: self.sim.add_signer(role, self.signers[i]) self.sim.root.version += 1 self.sim.publish_root() # run client workflow, assert success/failure expected_error = md_version.res if expected_error is None: self._run_refresh() # Call fetch_metadata to sign metadata with new keys expected_local_md: bytes = self.sim.fetch_metadata(role) # assert local metadata role is on disk as expected md_path = os.path.join(self.metadata_dir, f"{role}.json") with open(md_path, "rb") as f: data = f.read() self.assertEqual(data, expected_local_md) else: # failure expected with self.assertRaises(expected_error): self._run_refresh() if __name__ == "__main__": if "--dump" in sys.argv: TestUpdaterKeyRotations.dump_dir = tempfile.mkdtemp() print(f"Repository dumps in {TestUpdaterKeyRotations.dump_dir}") sys.argv.remove("--dump") utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_ng.py000066400000000000000000000326041470074210500205420ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test Updater class""" import logging import os import shutil import sys import tempfile import unittest from typing import Callable, ClassVar, List from unittest.mock import MagicMock, patch from securesystemslib.signer import Signer from tests import utils from tuf.api import exceptions from tuf.api.metadata import ( Metadata, Root, Snapshot, TargetFile, Targets, Timestamp, ) from tuf.ngclient import Updater, UpdaterConfig logger = logging.getLogger(__name__) class TestUpdater(unittest.TestCase): """Test the Updater class from 'tuf/ngclient/updater.py'.""" server_process_handler: ClassVar[utils.TestServerProcess] @classmethod def setUpClass(cls) -> None: cls.tmp_test_root_dir = tempfile.mkdtemp(dir=os.getcwd()) # Launch a SimpleHTTPServer # Test cases will request metadata and target files that have been # pre-generated in 'tuf/tests/repository_data', and are copied to # CWD/tmp_test_root_dir/* cls.server_process_handler = utils.TestServerProcess(log=logger) @classmethod def tearDownClass(cls) -> None: # Cleans resources, flush the logged lines (if any) and remove test dir cls.server_process_handler.clean() shutil.rmtree(cls.tmp_test_root_dir) def setUp(self) -> None: # Create tmp test dir inside of tmp test root dir to independently serve # new repository files for each test. We delete all tmp dirs at once in # tearDownClass after the server has released all resources. self.tmp_test_dir = tempfile.mkdtemp(dir=self.tmp_test_root_dir) # Copy the original repository files provided in the test folder so that # any modifications are restricted to the copies. # The 'repository_data' directory is expected to exist in 'tuf.tests/'. original_repository_files = os.path.join( utils.TESTS_DIR, "repository_data" ) original_repository = os.path.join( original_repository_files, "repository" ) original_keystore = os.path.join(original_repository_files, "keystore") original_client = os.path.join( original_repository_files, "client", "test_repository1", "metadata", "current", ) # Save references to the often-needed client repository directories. # Test cases need these references to access metadata and target files. self.repository_directory = os.path.join( self.tmp_test_dir, "repository" ) self.keystore_directory = os.path.join(self.tmp_test_dir, "keystore") self.client_directory = os.path.join(self.tmp_test_dir, "client") # Copy the original 'repository', 'client', and 'keystore' directories # to the temporary repository the test cases can use. shutil.copytree(original_repository, self.repository_directory) shutil.copytree(original_client, self.client_directory) shutil.copytree(original_keystore, self.keystore_directory) # 'path/to/tmp/repository' -> 'localhost:8001/tmp/repository'. repository_basepath = self.repository_directory[len(os.getcwd()) :] url_prefix = ( "http://" + utils.TEST_HOST_ADDRESS + ":" + str(self.server_process_handler.port) + repository_basepath.replace("\\", "/") ) self.metadata_url = f"{url_prefix}/metadata/" self.targets_url = f"{url_prefix}/targets/" self.dl_dir = tempfile.mkdtemp(dir=self.tmp_test_dir) # Creating a repository instance. The test cases will use this client # updater to refresh metadata, fetch target files, etc. self.updater = Updater( metadata_dir=self.client_directory, metadata_base_url=self.metadata_url, target_dir=self.dl_dir, target_base_url=self.targets_url, ) def tearDown(self) -> None: # Logs stdout and stderr from the sever subprocess. self.server_process_handler.flush_log() def _modify_repository_root( self, modification_func: Callable[[Metadata], None], bump_version: bool = False, ) -> None: """Apply 'modification_func' to root and persist it.""" role_path = os.path.join( self.repository_directory, "metadata", "root.json" ) root = Metadata[Root].from_file(role_path) modification_func(root) if bump_version: root.signed.version += 1 root_key_path = os.path.join(self.keystore_directory, "root_key") uri = f"file2:{root_key_path}" role = root.signed.get_delegated_role(Root.type) key = root.signed.get_key(role.keyids[0]) signer = Signer.from_priv_key_uri(uri, key) root.sign(signer) root.to_file( os.path.join(self.repository_directory, "metadata", "root.json") ) root.to_file( os.path.join( self.repository_directory, "metadata", f"{root.signed.version}.root.json", ) ) def _assert_files(self, roles: List[str]) -> None: """Assert that local metadata files exist for 'roles'""" expected_files = [f"{role}.json" for role in roles] client_files = sorted(os.listdir(self.client_directory)) self.assertEqual(client_files, expected_files) def test_refresh_and_download(self) -> None: # Test refresh without consistent targets - targets without hash prefix. # top-level targets are already in local cache (but remove others) os.remove(os.path.join(self.client_directory, "role1.json")) os.remove(os.path.join(self.client_directory, "role2.json")) os.remove(os.path.join(self.client_directory, "1.root.json")) # top-level metadata is in local directory already self.updater.refresh() self._assert_files( [Root.type, Snapshot.type, Targets.type, Timestamp.type] ) # Get targetinfos, assert that cache does not contain files info1 = self.updater.get_targetinfo("file1.txt") assert isinstance(info1, TargetFile) self._assert_files( [Root.type, Snapshot.type, Targets.type, Timestamp.type] ) # Get targetinfo for 'file3.txt' listed in the delegated role1 info3 = self.updater.get_targetinfo("file3.txt") assert isinstance(info3, TargetFile) expected_files = [ "role1", Root.type, Snapshot.type, Targets.type, Timestamp.type, ] self._assert_files(expected_files) self.assertIsNone(self.updater.find_cached_target(info1)) self.assertIsNone(self.updater.find_cached_target(info3)) # Download files, assert that cache has correct files self.updater.download_target(info1) path = self.updater.find_cached_target(info1) self.assertEqual(path, os.path.join(self.dl_dir, info1.path)) self.assertIsNone(self.updater.find_cached_target(info3)) self.updater.download_target(info3) path = self.updater.find_cached_target(info1) self.assertEqual(path, os.path.join(self.dl_dir, info1.path)) path = self.updater.find_cached_target(info3) self.assertEqual(path, os.path.join(self.dl_dir, info3.path)) def test_refresh_with_only_local_root(self) -> None: os.remove(os.path.join(self.client_directory, "timestamp.json")) os.remove(os.path.join(self.client_directory, "snapshot.json")) os.remove(os.path.join(self.client_directory, "targets.json")) os.remove(os.path.join(self.client_directory, "role1.json")) os.remove(os.path.join(self.client_directory, "role2.json")) os.remove(os.path.join(self.client_directory, "1.root.json")) self._assert_files([Root.type]) self.updater.refresh() self._assert_files( [Root.type, Snapshot.type, Targets.type, Timestamp.type] ) # Get targetinfo for 'file3.txt' listed in the delegated role1 self.updater.get_targetinfo("file3.txt") expected_files = [ "role1", Root.type, Snapshot.type, Targets.type, Timestamp.type, ] self._assert_files(expected_files) def test_implicit_refresh_with_only_local_root(self) -> None: os.remove(os.path.join(self.client_directory, "timestamp.json")) os.remove(os.path.join(self.client_directory, "snapshot.json")) os.remove(os.path.join(self.client_directory, "targets.json")) os.remove(os.path.join(self.client_directory, "role1.json")) os.remove(os.path.join(self.client_directory, "role2.json")) os.remove(os.path.join(self.client_directory, "1.root.json")) self._assert_files(["root"]) # Get targetinfo for 'file3.txt' listed in the delegated role1 self.updater.get_targetinfo("file3.txt") expected_files = ["role1", "root", "snapshot", "targets", "timestamp"] self._assert_files(expected_files) def test_both_target_urls_not_set(self) -> None: # target_base_url = None and Updater._target_base_url = None updater = Updater(self.client_directory, self.metadata_url, self.dl_dir) info = TargetFile(1, {"sha256": ""}, "targetpath") with self.assertRaises(ValueError): updater.download_target(info) def test_no_target_dir_no_filepath(self) -> None: # filepath = None and Updater.target_dir = None updater = Updater(self.client_directory, self.metadata_url) info = TargetFile(1, {"sha256": ""}, "targetpath") with self.assertRaises(ValueError): updater.find_cached_target(info) with self.assertRaises(ValueError): updater.download_target(info) def test_external_targets_url(self) -> None: self.updater.refresh() info = self.updater.get_targetinfo("file1.txt") assert isinstance(info, TargetFile) self.updater.download_target(info, target_base_url=self.targets_url) def test_length_hash_mismatch(self) -> None: self.updater.refresh() targetinfo = self.updater.get_targetinfo("file1.txt") assert isinstance(targetinfo, TargetFile) length = targetinfo.length with self.assertRaises(exceptions.RepositoryError): targetinfo.length = 44 self.updater.download_target(targetinfo) with self.assertRaises(exceptions.RepositoryError): targetinfo.length = length targetinfo.hashes = {"sha256": "abcd"} self.updater.download_target(targetinfo) def test_updating_root(self) -> None: # Bump root version, resign and refresh self._modify_repository_root(lambda _: None, bump_version=True) self.updater.refresh() self.assertEqual(self.updater._trusted_set.root.version, 2) def test_missing_targetinfo(self) -> None: self.updater.refresh() # Get targetinfo for non-existing file self.assertIsNone(self.updater.get_targetinfo("file33.txt")) @patch.object(os, "replace", wraps=os.replace) @patch.object(os, "remove", wraps=os.remove) def test_persist_metadata_fails( self, wrapped_remove: MagicMock, wrapped_replace: MagicMock ) -> None: # Testing that when write succeeds (the file is created) and replace # fails by throwing OSError, then the file will be deleted. wrapped_replace.side_effect = OSError() with self.assertRaises(OSError): self.updater._persist_metadata("target", b"data") wrapped_replace.assert_called_once() wrapped_remove.assert_called_once() # Assert that the created tempfile during writing is eventually deleted # or in other words, there is no temporary file left in the folder. for filename in os.listdir(self.updater._dir): self.assertFalse(filename.startswith("tmp")) def test_invalid_target_base_url(self) -> None: info = TargetFile(1, {"sha256": ""}, "targetpath") with self.assertRaises(exceptions.DownloadError): self.updater.download_target(info, target_base_url="invalid_url") def test_non_existing_target_file(self) -> None: info = TargetFile(1, {"sha256": ""}, "/non_existing_file.txt") # When non-existing target file is given, download fails with # "404 Client Error: File not found for url" with self.assertRaises(exceptions.DownloadHTTPError): self.updater.download_target(info) def test_user_agent(self) -> None: # test default self.updater.refresh() session = next(iter(self.updater._fetcher._sessions.values())) ua = session.headers["User-Agent"] self.assertEqual(ua[:11], "python-tuf/") # test custom UA updater = Updater( self.client_directory, self.metadata_url, self.dl_dir, self.targets_url, config=UpdaterConfig(app_user_agent="MyApp/1.2.3"), ) updater.refresh() session = next(iter(updater._fetcher._sessions.values())) ua = session.headers["User-Agent"] self.assertEqual(ua[:23], "MyApp/1.2.3 python-tuf/") if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_top_level_update.py000066400000000000000000000753501470074210500234760ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test ngclient Updater top-level metadata update workflow""" import builtins import datetime import os import sys import tempfile import unittest from datetime import timezone from typing import Iterable, Optional from unittest.mock import MagicMock, Mock, call, patch from tests import utils from tests.repository_simulator import RepositorySimulator from tuf.api.exceptions import ( BadVersionNumberError, DownloadLengthMismatchError, ExpiredMetadataError, LengthOrHashMismatchError, UnsignedMetadataError, ) from tuf.api.metadata import ( SPECIFICATION_VERSION, TOP_LEVEL_ROLE_NAMES, DelegatedRole, Metadata, Root, Snapshot, Targets, Timestamp, ) from tuf.ngclient import Updater class TestRefresh(unittest.TestCase): """Test update of top-level metadata following 'Detailed client workflow' in the specification.""" # set dump_dir to trigger repository state dumps dump_dir: Optional[str] = None past_datetime = datetime.datetime.now(timezone.utc).replace( microsecond=0 ) - datetime.timedelta(days=5) def setUp(self) -> None: self.temp_dir = tempfile.TemporaryDirectory() self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") self.targets_dir = os.path.join(self.temp_dir.name, "targets") os.mkdir(self.metadata_dir) os.mkdir(self.targets_dir) self.sim = RepositorySimulator() # boostrap client with initial root metadata with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: f.write(self.sim.signed_roots[0]) if self.dump_dir is not None: # create test specific dump directory name = self.id().split(".")[-1] self.sim.dump_dir = os.path.join(self.dump_dir, name) os.mkdir(self.sim.dump_dir) def tearDown(self) -> None: self.temp_dir.cleanup() def _run_refresh(self) -> Updater: """Create a new Updater instance and refresh""" if self.dump_dir is not None: self.sim.write() updater = Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", self.sim, ) updater.refresh() return updater def _init_updater(self) -> Updater: """Create a new Updater instance""" if self.dump_dir is not None: self.sim.write() return Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", self.sim, ) def _assert_files_exist(self, roles: Iterable[str]) -> None: """Assert that local metadata files exist for 'roles'""" expected_files = sorted([f"{role}.json" for role in roles]) local_metadata_files = sorted(os.listdir(self.metadata_dir)) self.assertListEqual(local_metadata_files, expected_files) def _assert_content_equals( self, role: str, version: Optional[int] = None ) -> None: """Assert that local file content is the expected""" expected_content = self.sim.fetch_metadata(role, version) with open(os.path.join(self.metadata_dir, f"{role}.json"), "rb") as f: self.assertEqual(f.read(), expected_content) def _assert_version_equals(self, role: str, expected_version: int) -> None: """Assert that local metadata version is the expected""" md = Metadata.from_file(os.path.join(self.metadata_dir, f"{role}.json")) self.assertEqual(md.signed.version, expected_version) def test_first_time_refresh(self) -> None: # Metadata dir contains only the mandatory initial root.json self._assert_files_exist([Root.type]) # Add one more root version to repository so that # refresh() updates from local trusted root (v1) to # remote root (v2) self.sim.root.version += 1 self.sim.publish_root() self._run_refresh() self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) for role in TOP_LEVEL_ROLE_NAMES: version = 2 if role == Root.type else None self._assert_content_equals(role, version) def test_trusted_root_missing(self) -> None: os.remove(os.path.join(self.metadata_dir, "root.json")) with self.assertRaises(OSError): self._run_refresh() # Metadata dir is empty self.assertFalse(os.listdir(self.metadata_dir)) def test_trusted_root_expired(self) -> None: # Create an expired root version self.sim.root.expires = self.past_datetime self.sim.root.version += 1 self.sim.publish_root() # Update to latest root which is expired but still # saved as a local root. updater = self._init_updater() with self.assertRaises(ExpiredMetadataError): updater.refresh() self._assert_files_exist([Root.type]) self._assert_content_equals(Root.type, 2) # Local root metadata can be loaded even if expired updater = self._init_updater() # Create a non-expired root version and refresh self.sim.root.expires = self.sim.safe_expiry self.sim.root.version += 1 self.sim.publish_root() updater.refresh() # Root is successfully updated to latest version self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) self._assert_content_equals(Root.type, 3) def test_trusted_root_unsigned(self) -> None: # Local trusted root is not signed root_path = os.path.join(self.metadata_dir, "root.json") md_root = Metadata.from_file(root_path) md_root.signatures.clear() md_root.to_file(root_path) with self.assertRaises(UnsignedMetadataError): self._run_refresh() # The update failed, no changes in metadata self._assert_files_exist([Root.type]) md_root_after = Metadata.from_file(root_path) self.assertEqual(md_root.to_bytes(), md_root_after.to_bytes()) def test_max_root_rotations(self) -> None: # Root must stop looking for new versions after Y number of # intermediate files were downloaded. updater = self._init_updater() updater.config.max_root_rotations = 3 # Create some number of roots greater than 'max_root_rotations' while self.sim.root.version < updater.config.max_root_rotations + 3: self.sim.root.version += 1 self.sim.publish_root() md_root = Metadata.from_file( os.path.join(self.metadata_dir, "root.json") ) initial_root_version = md_root.signed.version updater.refresh() # Assert that root version was increased with no more # than 'max_root_rotations' self._assert_version_equals( Root.type, initial_root_version + updater.config.max_root_rotations ) def test_intermediate_root_incorrectly_signed(self) -> None: # Check for an arbitrary software attack # Intermediate root v2 is unsigned self.sim.root.version += 1 root_signers = self.sim.signers[Root.type].copy() self.sim.signers[Root.type].clear() self.sim.publish_root() # Final root v3 is correctly signed self.sim.root.version += 1 self.sim.signers[Root.type] = root_signers self.sim.publish_root() # Incorrectly signed intermediate root is detected with self.assertRaises(UnsignedMetadataError): self._run_refresh() # The update failed, latest root version is v1 self._assert_files_exist([Root.type]) self._assert_content_equals(Root.type, 1) def test_intermediate_root_expired(self) -> None: # The expiration of the new (intermediate) root metadata file # does not matter yet # Intermediate root v2 is expired self.sim.root.expires = self.past_datetime self.sim.root.version += 1 self.sim.publish_root() # Final root v3 is up to date self.sim.root.expires = self.sim.safe_expiry self.sim.root.version += 1 self.sim.publish_root() self._run_refresh() # Successfully updated to root v3 self._assert_files_exist(TOP_LEVEL_ROLE_NAMES) self._assert_content_equals(Root.type, 3) def test_final_root_incorrectly_signed(self) -> None: # Check for an arbitrary software attack self.sim.root.version += 1 # root v2 self.sim.signers[Root.type].clear() self.sim.publish_root() with self.assertRaises(UnsignedMetadataError): self._run_refresh() # The update failed, latest root version is v1 self._assert_files_exist([Root.type]) self._assert_content_equals(Root.type, 1) def test_new_root_same_version(self) -> None: # Check for a rollback_attack # Repository serves a root file with the same version as previous self.sim.publish_root() with self.assertRaises(BadVersionNumberError): self._run_refresh() # The update failed, latest root version is v1 self._assert_files_exist([Root.type]) self._assert_content_equals(Root.type, 1) def test_new_root_nonconsecutive_version(self) -> None: # Repository serves non-consecutive root version self.sim.root.version += 2 self.sim.publish_root() with self.assertRaises(BadVersionNumberError): self._run_refresh() # The update failed, latest root version is v1 self._assert_files_exist([Root.type]) self._assert_content_equals(Root.type, 1) def test_final_root_expired(self) -> None: # Check for a freeze attack # Final root is expired self.sim.root.expires = self.past_datetime self.sim.root.version += 1 self.sim.publish_root() with self.assertRaises(ExpiredMetadataError): self._run_refresh() # The update failed but final root is persisted on the file system self._assert_files_exist([Root.type]) self._assert_content_equals(Root.type, 2) def test_new_timestamp_unsigned(self) -> None: # Check for an arbitrary software attack self.sim.signers[Timestamp.type].clear() with self.assertRaises(UnsignedMetadataError): self._run_refresh() self._assert_files_exist([Root.type]) @patch.object(datetime, "datetime", wraps=datetime.datetime) def test_expired_timestamp_version_rollback(self, mock_time: Mock) -> None: """Verifies that local timestamp is used in rollback checks even if it is expired. The timestamp updates and rollback checks are performed with the following timing: - Timestamp v1 expiry set to day 7 - First updater refresh performed on day 0 - Repository publishes timestamp v2 on day 0 - Timestamp v2 expiry set to day 21 - Second updater refresh performed on day 18: assert that rollback check uses expired timestamp v1""" now = datetime.datetime.now(timezone.utc) self.sim.timestamp.expires = now + datetime.timedelta(days=7) self.sim.timestamp.version = 2 # Make a successful update of valid metadata which stores it in cache self._run_refresh() self.sim.timestamp.expires = now + datetime.timedelta(days=21) self.sim.timestamp.version = 1 mock_time.now.return_value = datetime.datetime.now( timezone.utc ) + datetime.timedelta(days=18) patcher = patch("datetime.datetime", mock_time) # Check that a rollback protection is performed even if # local timestamp has expired with patcher, self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_version_equals(Timestamp.type, 2) @patch.object(datetime, "datetime", wraps=datetime.datetime) def test_expired_timestamp_snapshot_rollback(self, mock_time: Mock) -> None: """Verifies that rollback protection is done even if local timestamp has expired. The snapshot updates and rollback protection checks are performed with the following timing: - Timestamp v1 expiry set to day 7 - Repository bumps snapshot to v3 on day 0 - First updater refresh performed on day 0 - Timestamp v2 expiry set to day 21 - Second updater refresh performed on day 18: assert that rollback protection is done with expired timestamp v1""" now = datetime.datetime.now(timezone.utc) self.sim.timestamp.expires = now + datetime.timedelta(days=7) # Bump the snapshot version number to 3 self.sim.update_snapshot() self.sim.update_snapshot() # Make a successful update of valid metadata which stores it in cache self._run_refresh() self.sim.snapshot.version = 1 # Snapshot version number is set to 2, which is still less than 3 self.sim.update_snapshot() self.sim.timestamp.expires = now + datetime.timedelta(days=21) mock_time.now.return_value = datetime.datetime.now( timezone.utc ) + datetime.timedelta(days=18) patcher = patch("datetime.datetime", mock_time) # Assert that rollback protection is done even if # local timestamp has expired with patcher, self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_version_equals(Timestamp.type, 3) def test_new_timestamp_version_rollback(self) -> None: # Check for a rollback attack self.sim.timestamp.version = 2 self._run_refresh() self.sim.timestamp.version = 1 with self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_version_equals(Timestamp.type, 2) def test_new_timestamp_snapshot_rollback(self) -> None: # Check for a rollback attack. self.sim.snapshot.version = 2 self.sim.update_timestamp() # timestamp v2 self._run_refresh() # Snapshot meta version is smaller than previous self.sim.timestamp.snapshot_meta.version = 1 self.sim.timestamp.version += 1 # timestamp v3 with self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_version_equals(Timestamp.type, 2) def test_new_timestamp_expired(self) -> None: # Check for a freeze attack self.sim.timestamp.expires = self.past_datetime self.sim.update_timestamp() with self.assertRaises(ExpiredMetadataError): self._run_refresh() self._assert_files_exist([Root.type]) def test_new_timestamp_fast_forward_recovery(self) -> None: """Test timestamp fast-forward recovery using key rotation. The timestamp recovery is made by the following steps - Remove the timestamp key - Create and add a new key for timestamp - Bump and publish root - Rollback the timestamp version """ # attacker updates to a higher version self.sim.timestamp.version = 99999 # client refreshes the metadata and see the new timestamp version self._run_refresh() self._assert_version_equals(Timestamp.type, 99999) # repository rotates timestamp keys, rolls back timestamp version self.sim.rotate_keys(Timestamp.type) self.sim.root.version += 1 self.sim.publish_root() self.sim.timestamp.version = 1 # client refresh the metadata and see the initial timestamp version self._run_refresh() self._assert_version_equals(Timestamp.type, 1) def test_new_snapshot_hash_mismatch(self) -> None: # Check against timestamp role's snapshot hash # Update timestamp with snapshot's hashes self.sim.compute_metafile_hashes_length = True self.sim.update_timestamp() # timestamp v2 self._run_refresh() # Modify snapshot contents without updating # timestamp's snapshot hash self.sim.snapshot.expires += datetime.timedelta(days=1) self.sim.snapshot.version += 1 # snapshot v2 self.sim.timestamp.snapshot_meta.version = self.sim.snapshot.version self.sim.timestamp.version += 1 # timestamp v3 # Hash mismatch error with self.assertRaises(LengthOrHashMismatchError): self._run_refresh() self._assert_version_equals(Timestamp.type, 3) self._assert_version_equals(Snapshot.type, 1) def test_new_snapshot_unsigned(self) -> None: # Check for an arbitrary software attack self.sim.signers[Snapshot.type].clear() with self.assertRaises(UnsignedMetadataError): self._run_refresh() self._assert_files_exist([Root.type, Timestamp.type]) def test_new_snapshot_version_mismatch(self) -> None: # Check against timestamp role's snapshot version # Increase snapshot version without updating timestamp self.sim.snapshot.version += 1 with self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_files_exist([Root.type, Timestamp.type]) def test_new_snapshot_version_rollback(self) -> None: # Check for a rollback attack self.sim.snapshot.version = 2 self.sim.update_timestamp() self._run_refresh() self.sim.snapshot.version = 1 self.sim.update_timestamp() with self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_version_equals(Snapshot.type, 2) def test_new_snapshot_fast_forward_recovery(self) -> None: """Test snapshot fast-forward recovery using key rotation. The snapshot recovery requires the snapshot and timestamp key rotation. It is made by the following steps: - Remove the snapshot and timestamp keys - Create and add a new key for snapshot and timestamp - Rollback snapshot version - Bump and publish root - Bump the timestamp """ # attacker updates to a higher version (bumping timestamp is required) self.sim.snapshot.version = 99999 self.sim.update_timestamp() # client refreshes the metadata and see the new snapshot version self._run_refresh() self._assert_version_equals(Snapshot.type, 99999) # repository rotates snapshot & timestamp keys, rolls back snapshot self.sim.rotate_keys(Snapshot.type) self.sim.rotate_keys(Timestamp.type) self.sim.root.version += 1 self.sim.publish_root() self.sim.snapshot.version = 1 self.sim.update_timestamp() # client refresh the metadata and see the initial snapshot version self._run_refresh() self._assert_version_equals(Snapshot.type, 1) def test_new_snapshot_expired(self) -> None: # Check for a freeze attack self.sim.snapshot.expires = self.past_datetime self.sim.update_snapshot() with self.assertRaises(ExpiredMetadataError): self._run_refresh() self._assert_files_exist([Root.type, Timestamp.type]) def test_new_targets_hash_mismatch(self) -> None: # Check against snapshot role's targets hashes # Update snapshot with target's hashes self.sim.compute_metafile_hashes_length = True self.sim.update_snapshot() self._run_refresh() # Modify targets contents without updating # snapshot's targets hashes self.sim.targets.version += 1 self.sim.snapshot.meta[ "targets.json" ].version = self.sim.targets.version self.sim.snapshot.version += 1 self.sim.update_timestamp() with self.assertRaises(LengthOrHashMismatchError): self._run_refresh() self._assert_version_equals(Snapshot.type, 3) self._assert_version_equals(Targets.type, 1) def test_new_targets_unsigned(self) -> None: # Check for an arbitrary software attack self.sim.signers[Targets.type].clear() with self.assertRaises(UnsignedMetadataError): self._run_refresh() self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) def test_new_targets_version_mismatch(self) -> None: # Check against snapshot role's targets version # Increase targets version without updating snapshot self.sim.targets.version += 1 with self.assertRaises(BadVersionNumberError): self._run_refresh() self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) def test_new_targets_expired(self) -> None: # Check for a freeze attack. self.sim.targets.expires = self.past_datetime self.sim.update_snapshot() with self.assertRaises(ExpiredMetadataError): self._run_refresh() self._assert_files_exist([Root.type, Timestamp.type, Snapshot.type]) def test_compute_metafile_hashes_length(self) -> None: self.sim.compute_metafile_hashes_length = True self.sim.update_snapshot() self._run_refresh() self._assert_version_equals(Timestamp.type, 2) self._assert_version_equals(Snapshot.type, 2) self.sim.compute_metafile_hashes_length = False self.sim.update_snapshot() self._run_refresh() self._assert_version_equals(Timestamp.type, 3) self._assert_version_equals(Snapshot.type, 3) def test_new_targets_fast_forward_recovery(self) -> None: """Test targets fast-forward recovery using key rotation. The targets recovery is made by issuing new Snapshot keys, by following steps: - Remove the snapshot key - Create and add a new key for snapshot - Bump and publish root - Rollback the target version """ # attacker updates to a higher version self.sim.targets.version = 99999 self.sim.update_snapshot() # client refreshes the metadata and see the new targets version self._run_refresh() self._assert_version_equals(Targets.type, 99999) # repository rotates snapshot keys, rolls back targets version self.sim.rotate_keys(Snapshot.type) self.sim.root.version += 1 self.sim.publish_root() self.sim.targets.version = 1 self.sim.update_snapshot() # client refreshes the metadata version and see initial targets version self._run_refresh() self._assert_version_equals(Targets.type, 1) @patch.object(builtins, "open", wraps=builtins.open) def test_not_loading_targets_twice(self, wrapped_open: MagicMock) -> None: # Do not load targets roles more than once when traversing # the delegations tree # Add new delegated targets, update the snapshot spec_version = ".".join(SPECIFICATION_VERSION) targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None) role = DelegatedRole("role1", [], 1, False, ["*"], None) self.sim.add_delegation("targets", role, targets) self.sim.update_snapshot() # Run refresh, top-level roles are loaded updater = self._run_refresh() # Clean up calls to open during refresh() wrapped_open.reset_mock() # First time looking for "somepath", only 'role1' must be loaded updater.get_targetinfo("somepath") wrapped_open.assert_called_once_with( os.path.join(self.metadata_dir, "role1.json"), "rb" ) wrapped_open.reset_mock() # Second call to get_targetinfo, all metadata is already loaded updater.get_targetinfo("somepath") wrapped_open.assert_not_called() def test_snapshot_rollback_with_local_snapshot_hash_mismatch(self) -> None: # Test triggering snapshot rollback check on a newly downloaded snapshot # when the local snapshot is loaded even when there is a hash mismatch # with timestamp.snapshot_meta. # By raising this flag on timestamp update the simulator would: # 1) compute the hash of the new modified version of snapshot # 2) assign the hash to timestamp.snapshot_meta # The purpose is to create a hash mismatch between timestamp.meta and # the local snapshot, but to have hash match between timestamp.meta and # the next snapshot version. self.sim.compute_metafile_hashes_length = True # Initialize all metadata and assign targets version higher than 1. self.sim.targets.version = 2 self.sim.update_snapshot() self._run_refresh() # The new targets must have a lower version than the local trusted one. self.sim.targets.version = 1 self.sim.update_snapshot() # During the snapshot update, the local snapshot will be loaded even if # there is a hash mismatch with timestamp.snapshot_meta, because it will # be considered as trusted. # Should fail as a new version of snapshot will be fetched which lowers # the snapshot.meta["targets.json"] version by 1 and throws an error. with self.assertRaises(BadVersionNumberError): self._run_refresh() @patch.object(builtins, "open", wraps=builtins.open) def test_load_metadata_from_cache(self, wrapped_open: MagicMock) -> None: # Add new delegated targets spec_version = ".".join(SPECIFICATION_VERSION) targets = Targets(1, spec_version, self.sim.safe_expiry, {}, None) role = DelegatedRole("role1", [], 1, False, ["*"], None) self.sim.add_delegation("targets", role, targets) self.sim.update_snapshot() # Make a successful update of valid metadata which stores it in cache updater = self._run_refresh() updater.get_targetinfo("non_existent_target") # Clean up calls to open during refresh() wrapped_open.reset_mock() # Clean up fetch tracker metadata self.sim.fetch_tracker.metadata.clear() # Create a new updater and perform a second update while # the metadata is already stored in cache (metadata dir) updater = Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", self.sim, ) updater.get_targetinfo("non_existent_target") # Test that metadata is loaded from cache and not downloaded wrapped_open.assert_has_calls( [ call(os.path.join(self.metadata_dir, "root.json"), "rb"), call(os.path.join(self.metadata_dir, "timestamp.json"), "rb"), call(os.path.join(self.metadata_dir, "snapshot.json"), "rb"), call(os.path.join(self.metadata_dir, "targets.json"), "rb"), call(os.path.join(self.metadata_dir, "role1.json"), "rb"), ] ) expected_calls = [("root", 2), ("timestamp", None)] self.assertListEqual(self.sim.fetch_tracker.metadata, expected_calls) @patch.object(datetime, "datetime", wraps=datetime.datetime) def test_expired_metadata(self, mock_time: Mock) -> None: """Verifies that expired local timestamp/snapshot can be used for updating from remote. The updates and verifications are performed with the following timing: - Timestamp v1 expiry set to day 7 - First updater refresh performed on day 0 - Repository bumps snapshot and targets to v2 on day 0 - Timestamp v2 expiry set to day 21 - Second updater refresh performed on day 18, it is successful and timestamp/snaphot final versions are v2""" now = datetime.datetime.now(timezone.utc) self.sim.timestamp.expires = now + datetime.timedelta(days=7) # Make a successful update of valid metadata which stores it in cache self._run_refresh() self.sim.targets.version += 1 self.sim.update_snapshot() self.sim.timestamp.expires = now + datetime.timedelta(days=21) # Mocking time so that local timestam has expired # but the new timestamp has not mock_time.now.return_value = datetime.datetime.now( timezone.utc ) + datetime.timedelta(days=18) with patch("datetime.datetime", mock_time): self._run_refresh() # Assert that the final version of timestamp/snapshot is version 2 # which means a successful refresh is performed # with expired local metadata for role in ["timestamp", "snapshot", "targets"]: md = Metadata.from_file( os.path.join(self.metadata_dir, f"{role}.json") ) self.assertEqual(md.signed.version, 2) def test_max_metadata_lengths(self) -> None: """Test that clients configured max metadata lengths are respected""" # client has root v1 already: create a new one available for download self.sim.root.version += 1 self.sim.publish_root() config_vars = [ "root_max_length", "timestamp_max_length", "snapshot_max_length", "targets_max_length", ] # make sure going over any length limit raises DownloadLengthMismatchError for var_name in config_vars: updater = self._init_updater() setattr(updater.config, var_name, 100) with self.assertRaises(DownloadLengthMismatchError): updater.refresh() # All good with normal length limits updater = self._init_updater() updater.refresh() def test_timestamp_eq_versions_check(self) -> None: # Test that a modified timestamp with different content, but the same # version doesn't replace the valid locally stored one. # Make a successful update of valid metadata which stores it in cache self._run_refresh() initial_timestamp_meta_ver = self.sim.timestamp.snapshot_meta.version # Change timestamp without bumping its version in order to test if a new # timestamp with the same version will be persisted. self.sim.timestamp.snapshot_meta.version = 100 self._run_refresh() # If the local timestamp md file has the same snapshot_meta.version as # the initial one, then the new modified timestamp has not been stored. timestamp_path = os.path.join(self.metadata_dir, "timestamp.json") timestamp: Metadata[Timestamp] = Metadata.from_file(timestamp_path) self.assertEqual( initial_timestamp_meta_ver, timestamp.signed.snapshot_meta.version ) if __name__ == "__main__": if "--dump" in sys.argv: TestRefresh.dump_dir = tempfile.mkdtemp() print(f"Repository Simulator dumps in {TestRefresh.dump_dir}") sys.argv.remove("--dump") utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_updater_validation.py000066400000000000000000000041101470074210500222570ustar00rootroot00000000000000# Copyright 2022, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Test ngclient Updater validations.""" import os import sys import tempfile import unittest from tests import utils from tests.repository_simulator import RepositorySimulator from tuf.ngclient import Updater class TestUpdater(unittest.TestCase): """Test ngclient Updater input validation.""" def setUp(self) -> None: self.temp_dir = tempfile.TemporaryDirectory() self.metadata_dir = os.path.join(self.temp_dir.name, "metadata") self.targets_dir = os.path.join(self.temp_dir.name, "targets") os.mkdir(self.metadata_dir) os.mkdir(self.targets_dir) # Setup the repository, bootstrap client root.json self.sim = RepositorySimulator() with open(os.path.join(self.metadata_dir, "root.json"), "bw") as f: f.write(self.sim.signed_roots[0]) def tearDown(self) -> None: self.temp_dir.cleanup() def _new_updater(self) -> Updater: return Updater( self.metadata_dir, "https://example.com/metadata/", self.targets_dir, "https://example.com/targets/", fetcher=self.sim, ) def test_local_target_storage_fail(self) -> None: self.sim.add_target("targets", b"content", "targetpath") self.sim.targets.version += 1 self.sim.update_snapshot() updater = self._new_updater() target_info = updater.get_targetinfo("targetpath") assert target_info is not None with self.assertRaises(FileNotFoundError): updater.download_target(target_info, filepath="") def test_non_existing_metadata_dir(self) -> None: with self.assertRaises(FileNotFoundError): # Initialize Updater with non-existing metadata_dir Updater( "non_existing_metadata_dir", "https://example.com/metadata/", fetcher=self.sim, ) if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/test_utils.py000066400000000000000000000041121470074210500175430ustar00rootroot00000000000000# Copyright 2020, TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """ test_utils.py Martin Vrachev. October 21, 2020. See LICENSE-MIT OR LICENSE for licensing information. Provide tests for some of the functions in utils.py module. """ import logging import socket import sys import unittest from tests import utils logger = logging.getLogger(__name__) def can_connect(port: int) -> bool: """Check if a socket can connect on the given port""" try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.connect(("localhost", port)) return True except Exception: # noqa: BLE001 return False finally: # The process will always enter in finally even after return. if sock: sock.close() class TestServerProcess(unittest.TestCase): """Test functionality provided in TestServerProcess from tests/utils.py.""" def test_simple_server_startup(self) -> None: # Test normal case server_process_handler = utils.TestServerProcess(log=logger) # Make sure we can connect to the server self.assertTrue(can_connect(server_process_handler.port)) server_process_handler.clean() def test_cleanup(self) -> None: # Test normal case server_process_handler = utils.TestServerProcess( log=logger, server="simple_server.py" ) server_process_handler.clean() # Check if the process has successfully been killed. self.assertFalse(server_process_handler.is_process_running()) def test_server_exit_before_timeout(self) -> None: with self.assertRaises(utils.TestServerProcessError): utils.TestServerProcess(logger, server="non_existing_server.py") # Test starting a server which immediately exits." with self.assertRaises(utils.TestServerProcessError): utils.TestServerProcess(logger, server="fast_server_exit.py") if __name__ == "__main__": utils.configure_test_logging(sys.argv) unittest.main() python-tuf-5.1.0/tests/utils.py000066400000000000000000000301301470074210500165030ustar00rootroot00000000000000# Copyright 2020, TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """ utils.py August 3, 2020. Jussi Kukkonen See LICENSE-MIT OR LICENSE for licensing information. Provide common utilities for TUF tests """ import argparse import errno import logging import os import queue import socket import subprocess import sys import threading import time import unittest import warnings from contextlib import contextmanager from typing import IO, Any, Callable, Dict, Iterator, List, Optional logger = logging.getLogger(__name__) # May be used to reliably read other files in tests dir regardless of cwd TESTS_DIR = os.path.dirname(os.path.realpath(__file__)) # Used when forming URLs on the client side TEST_HOST_ADDRESS = "127.0.0.1" # DataSet is only here so type hints can be used. DataSet = Dict[str, Any] # Test runner decorator: Runs the test as a set of N SubTests, # (where N is number of items in dataset), feeding the actual test # function one test case at a time def run_sub_tests_with_dataset( dataset: DataSet, ) -> Callable[[Callable], Callable]: """Decorator starting a unittest.TestCase.subtest() for each of the cases in dataset""" def real_decorator( function: Callable[[unittest.TestCase, Any], None], ) -> Callable[[unittest.TestCase], None]: def wrapper(test_cls: unittest.TestCase) -> None: for case, data in dataset.items(): with test_cls.subTest(case=case): # Save case name for future reference test_cls.case_name = case.replace(" ", "_") function(test_cls, data) return wrapper return real_decorator class TestServerProcessError(Exception): def __init__(self, value: str = "TestServerProcess") -> None: super().__init__() self.value = value def __str__(self) -> str: return repr(self.value) @contextmanager def ignore_deprecation_warnings(module: str) -> Iterator[None]: with warnings.catch_warnings(): warnings.filterwarnings( "ignore", category=DeprecationWarning, module=module ) yield # Wait until host:port accepts connections. # Raises TimeoutError if this does not happen within timeout seconds # There are major differences between operating systems on how this works # but the current blocking connect() seems to work fast on Linux and seems # to at least work on Windows (ECONNREFUSED unfortunately has a 2 second # timeout on Windows) def wait_for_server( host: str, server: str, port: int, timeout: int = 10 ) -> None: """Wait for server start until timeout is reached or server has started""" start = time.time() remaining_timeout = timeout succeeded = False while not succeeded and remaining_timeout > 0: try: sock: Optional[socket.socket] = socket.socket( socket.AF_INET, socket.SOCK_STREAM ) assert sock is not None sock.settimeout(remaining_timeout) sock.connect((host, port)) succeeded = True except socket.timeout: pass except OSError as e: # ECONNREFUSED is expected while the server is not started if e.errno not in [errno.ECONNREFUSED]: logger.warning( "Unexpected error while waiting for server: %s", str(e) ) # Avoid pegging a core just for this time.sleep(0.01) finally: if sock: sock.close() sock = None remaining_timeout = int(timeout - (time.time() - start)) if not succeeded: raise TimeoutError( "Could not connect to the " + server + " on port " + str(port) + "!" ) def configure_test_logging(argv: List[str]) -> None: """Configure logger level for a certain test file""" # parse arguments but only handle '-v': argv may contain # other things meant for unittest argument parser parser = argparse.ArgumentParser(add_help=False) parser.add_argument("-v", "--verbose", action="count", default=0) args, _ = parser.parse_known_args(argv) if args.verbose <= 1: # 0 and 1 both mean ERROR: this way '-v' makes unittest print test # names without increasing log level loglevel = logging.ERROR elif args.verbose == 2: loglevel = logging.WARNING elif args.verbose == 3: loglevel = logging.INFO else: loglevel = logging.DEBUG logging.basicConfig(level=loglevel) def cleanup_dir(path: str) -> None: """Delete all files inside a directory""" for filepath in [ os.path.join(path, filename) for filename in os.listdir(path) ]: os.remove(filepath) class TestServerProcess: """Helper class used to create a child process with the subprocess.Popen object and use a thread-safe Queue structure for logging. Args: log: Logger which will be used for logging. server: Path to the server to run in the subprocess. timeout: Time in seconds in which the server should start or otherwise TimeoutError error will be raised. popen_cwd: Current working directory used when instancing a subprocess.Popen object. extra_cmd_args: Additional arguments for the command which will start the subprocess. More precisely: "python -u ". If no list is provided, an empty list ("[]") will be assigned to it. """ def __init__( self, log: logging.Logger, server: str = os.path.join(TESTS_DIR, "simple_server.py"), timeout: int = 10, popen_cwd: str = ".", extra_cmd_args: Optional[List[str]] = None, ): self.server = server self.__logger = log # Stores popped messages from the queue. self.__logged_messages: List[str] = [] self.__server_process: Optional[subprocess.Popen] = None self._log_queue: Optional[queue.Queue] = None self.port = -1 if extra_cmd_args is None: extra_cmd_args = [] try: self._start_server(timeout, extra_cmd_args, popen_cwd) wait_for_server("localhost", self.server, self.port, timeout) except Exception as e: # Clean the resources and log the server errors if any exists. self.clean() raise e def _start_server( self, timeout: int, extra_cmd_args: List[str], popen_cwd: str ) -> None: """ Start the server subprocess and a thread responsible to redirect stdout/stderr to the Queue. Waits for the port message maximum timeout seconds. """ self._start_process(extra_cmd_args, popen_cwd) self._start_redirect_thread() self._wait_for_port(timeout) self.__logger.info("%s serving on %d", self.server, self.port) def _start_process(self, extra_cmd_args: List[str], popen_cwd: str) -> None: """Starts the process running the server.""" # The "-u" option forces stdin, stdout and stderr to be unbuffered. command = [sys.executable, "-u", self.server, *extra_cmd_args] # Reusing one subprocess in multiple tests, but split up the logs # for each. self.__server_process = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=popen_cwd, ) def _start_redirect_thread(self) -> None: """Starts a thread redirecting the stdout/stderr to the Queue.""" assert isinstance(self.__server_process, subprocess.Popen) # Run log_queue_worker() in a thread. # The thread will exit when the child process dies. self._log_queue = queue.Queue() log_thread = threading.Thread( target=self._log_queue_worker, args=(self.__server_process.stdout, self._log_queue), ) # "daemon = True" means the thread won't interfere with the # process exit. log_thread.daemon = True log_thread.start() @staticmethod def _log_queue_worker(stream: IO, line_queue: queue.Queue) -> None: """ Worker function to run in a seprate thread. Reads from 'stream', puts lines in a Queue (Queue is thread-safe). """ while True: # readline() is a blocking operation. # decode to push a string in the queue instead of 8-bit bytes. log_line = stream.readline().decode("utf-8") line_queue.put(log_line) if len(log_line) == 0: # This is the end of the stream meaning the server process # has exited. stream.close() break def _wait_for_port(self, timeout: int) -> None: """ Validates the first item from the Queue against the port message. If validation is successful, self.port is set. Raises TestServerProcessError if the process has exited or TimeoutError if no message was found within timeout seconds. """ assert isinstance(self.__server_process, subprocess.Popen) assert isinstance(self._log_queue, queue.Queue) # We have hardcoded the message we expect on a successful server # startup. This message should be the first message sent by the server! expected_msg = "bind succeeded, server port is: " try: line = self._log_queue.get(timeout=timeout) if len(line) == 0: # The process has exited. raise TestServerProcessError( self.server + " exited unexpectedly " + "with code " + str(self.__server_process.poll()) + "!" ) if line.startswith(expected_msg): self.port = int(line[len(expected_msg) :]) else: # An exception or some other message is printed from the server. self.__logged_messages.append(line) # Check if more lines are logged. self.flush_log() raise TestServerProcessError( self.server + " did not print port " + "message as first stdout line as expected!" ) except queue.Empty as e: raise TimeoutError( "Failure during " + self.server + " startup!" ) from e def _kill_server_process(self) -> None: """Kills the server subprocess if it's running.""" assert isinstance(self.__server_process, subprocess.Popen) if self.is_process_running(): self.__logger.info( "Server process %d terminated", self.__server_process.pid ) self.__server_process.kill() self.__server_process.wait() def flush_log(self) -> None: """Flushes the log lines from the logging queue.""" assert isinstance(self._log_queue, queue.Queue) while True: # Get lines from log_queue try: line = self._log_queue.get(block=False) if len(line) > 0: self.__logged_messages.append(line) except queue.Empty: # No more lines are logged in the queue. break if len(self.__logged_messages) > 0: title = "Test server (" + self.server + ") output:\n" message = [title, *self.__logged_messages] self.__logger.info("| ".join(message)) self.__logged_messages = [] def clean(self) -> None: """ Kills the subprocess and closes the TempFile. Calls flush_log to check for logged information, but not yet flushed. """ # If there is anything logged, flush it before closing the resourses. self.flush_log() self._kill_server_process() def is_process_running(self) -> bool: assert isinstance(self.__server_process, subprocess.Popen) return self.__server_process.poll() is None python-tuf-5.1.0/tox.ini000066400000000000000000000042411470074210500151460ustar00rootroot00000000000000# Tox (https://tox.readthedocs.io/en/latest/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] isolated_build=true envlist = lint,docs,py skipsdist = true [testenv] # TODO: Consider refactoring the tests to not require the aggregation script # being invoked from the `tests` directory. This seems to be the convention and # would make use of other testing tools such as coverage/coveralls easier. changedir = tests commands = python3 --version python3 -m coverage run aggregate_tests.py python3 -m coverage report -m --fail-under 97 deps = -r{toxinidir}/requirements/test.txt # Install TUF in editable mode, instead of tox default virtual environment # installation (see `skipsdist`), to get relative paths in coverage reports --editable {toxinidir} install_command = python3 -m pip install {opts} {packages} # Workaround https://github.com/tox-dev/tox/issues/2801 (python3 not allowed in Windows) allowlist_externals = python3 # Develop test env to run tests against securesystemslib's main branch # Must to be invoked explicitly with, e.g. `tox -e with-sslib-main` [testenv:with-sslib-main] commands_pre = python3 -m pip install --force-reinstall git+https://github.com/secure-systems-lab/securesystemslib.git@main#egg=securesystemslib[crypto,pynacl] commands = python3 -m coverage run aggregate_tests.py python3 -m coverage report -m [testenv:lint] changedir = {toxinidir} deps = -r{toxinidir}/requirements/lint.txt --editable {toxinidir} lint_dirs = tuf examples tests verify_release .github/scripts passenv = RUFF_OUTPUT_FORMAT commands = ruff check {[testenv:lint]lint_dirs} ruff format --diff {[testenv:lint]lint_dirs} mypy {[testenv:lint]lint_dirs} [testenv:fix] changedir = {toxinidir} deps = {[testenv:lint]deps} commands = ruff check --fix {[testenv:lint]lint_dirs} ruff format {[testenv:lint]lint_dirs} [testenv:docs] deps = -r{toxinidir}/requirements/docs.txt changedir = {toxinidir} commands = sphinx-build -b html docs docs/build/html -W python-tuf-5.1.0/tuf/000077500000000000000000000000001470074210500144305ustar00rootroot00000000000000python-tuf-5.1.0/tuf/__init__.py000066400000000000000000000002721470074210500165420ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """TUF.""" # This value is used in the requests user agent. __version__ = "5.1.0" python-tuf-5.1.0/tuf/api/000077500000000000000000000000001470074210500152015ustar00rootroot00000000000000python-tuf-5.1.0/tuf/api/__init__.py000066400000000000000000000000001470074210500173000ustar00rootroot00000000000000python-tuf-5.1.0/tuf/api/_payload.py000066400000000000000000001757301470074210500173600ustar00rootroot00000000000000# Copyright the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Helper classes for low-level Metadata API.""" import abc import fnmatch import io import logging from dataclasses import dataclass from datetime import datetime, timezone from typing import ( IO, Any, ClassVar, Dict, Iterator, List, Optional, Tuple, TypeVar, Union, ) from securesystemslib import exceptions as sslib_exceptions from securesystemslib import hash as sslib_hash from securesystemslib.signer import Key, Signature from tuf.api.exceptions import LengthOrHashMismatchError, UnsignedMetadataError _ROOT = "root" _SNAPSHOT = "snapshot" _TARGETS = "targets" _TIMESTAMP = "timestamp" # We aim to support SPECIFICATION_VERSION and require the input metadata # files to have the same major version (the first number) as ours. SPECIFICATION_VERSION = ["1", "0", "31"] TOP_LEVEL_ROLE_NAMES = {_ROOT, _TIMESTAMP, _SNAPSHOT, _TARGETS} logger = logging.getLogger(__name__) # T is a Generic type constraint for container payloads T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") class Signed(metaclass=abc.ABCMeta): """A base class for the signed part of TUF metadata. Objects with base class Signed are usually included in a ``Metadata`` object on the signed attribute. This class provides attributes and methods that are common for all TUF metadata types (roles). *All parameters named below are not just constructor arguments but also instance attributes.* Args: version: Metadata version number. If None, then 1 is assigned. spec_version: Supported TUF specification version. If None, then the version currently supported by the library is assigned. expires: Metadata expiry date in UTC timezone. If None, then current date and time is assigned. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Raises: ValueError: Invalid arguments. """ # type is required for static reference without changing the API type: ClassVar[str] = "signed" # _type and type are identical: 1st replicates file format, 2nd passes lint @property def _type(self) -> str: return self.type @property def expires(self) -> datetime: """Get the metadata expiry date.""" return self._expires @expires.setter def expires(self, value: datetime) -> None: """Set the metadata expiry date. # Use 'datetime' module to e.g. expire in seven days from now obj.expires = now(timezone.utc) + timedelta(days=7) """ self._expires = value.replace(microsecond=0) if self._expires.tzinfo is None: # Naive datetime: just make it UTC self._expires = self._expires.replace(tzinfo=timezone.utc) elif self._expires.tzinfo != timezone.utc: raise ValueError(f"Expected tz UTC, not {self._expires.tzinfo}") # NOTE: Signed is a stupid name, because this might not be signed yet, but # we keep it to match spec terminology (I often refer to this as "payload", # or "inner metadata") def __init__( self, version: Optional[int], spec_version: Optional[str], expires: Optional[datetime], unrecognized_fields: Optional[Dict[str, Any]], ): if spec_version is None: spec_version = ".".join(SPECIFICATION_VERSION) # Accept semver (X.Y.Z) but also X.Y for legacy compatibility spec_list = spec_version.split(".") if len(spec_list) not in [2, 3] or not all( el.isdigit() for el in spec_list ): raise ValueError(f"Failed to parse spec_version {spec_version}") # major version must match if spec_list[0] != SPECIFICATION_VERSION[0]: raise ValueError(f"Unsupported spec_version {spec_version}") self.spec_version = spec_version self.expires = expires or datetime.now(timezone.utc) if version is None: version = 1 elif version <= 0: raise ValueError(f"version must be > 0, got {version}") self.version = version if unrecognized_fields is None: unrecognized_fields = {} self.unrecognized_fields = unrecognized_fields def __eq__(self, other: object) -> bool: if not isinstance(other, Signed): return False return ( self.type == other.type and self.version == other.version and self.spec_version == other.spec_version and self.expires == other.expires and self.unrecognized_fields == other.unrecognized_fields ) @abc.abstractmethod def to_dict(self) -> Dict[str, Any]: """Serialize and return a dict representation of self.""" raise NotImplementedError @classmethod @abc.abstractmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Signed": """Deserialization helper, creates object from json/dict representation. """ raise NotImplementedError @classmethod def _common_fields_from_dict( cls, signed_dict: Dict[str, Any] ) -> Tuple[int, str, datetime]: """Return common fields of ``Signed`` instances from the passed dict representation, and returns an ordered list to be passed as leading positional arguments to a subclass constructor. See ``{Root, Timestamp, Snapshot, Targets}.from_dict`` methods for usage. """ _type = signed_dict.pop("_type") if _type != cls.type: raise ValueError(f"Expected type {cls.type}, got {_type}") version = signed_dict.pop("version") spec_version = signed_dict.pop("spec_version") expires_str = signed_dict.pop("expires") # Convert 'expires' TUF metadata string to a datetime object, which is # what the constructor expects and what we store. The inverse operation # is implemented in '_common_fields_to_dict'. expires = datetime.strptime(expires_str, "%Y-%m-%dT%H:%M:%SZ").replace( tzinfo=timezone.utc ) return version, spec_version, expires def _common_fields_to_dict(self) -> Dict[str, Any]: """Return a dict representation of common fields of ``Signed`` instances. See ``{Root, Timestamp, Snapshot, Targets}.to_dict`` methods for usage. """ return { "_type": self._type, "version": self.version, "spec_version": self.spec_version, "expires": self.expires.strftime("%Y-%m-%dT%H:%M:%SZ"), **self.unrecognized_fields, } def is_expired(self, reference_time: Optional[datetime] = None) -> bool: """Check metadata expiration against a reference time. Args: reference_time: Time to check expiration date against. A naive datetime in UTC expected. Default is current UTC date and time. Returns: ``True`` if expiration time is less than the reference time. """ if reference_time is None: reference_time = datetime.now(timezone.utc) return reference_time >= self.expires class Role: """Container that defines which keys are required to sign roles metadata. Role defines how many keys are required to successfully sign the roles metadata, and which keys are accepted. *All parameters named below are not just constructor arguments but also instance attributes.* Args: keyids: Roles signing key identifiers. threshold: Number of keys required to sign this role's metadata. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Raises: ValueError: Invalid arguments. """ def __init__( self, keyids: List[str], threshold: int, unrecognized_fields: Optional[Dict[str, Any]] = None, ): if len(set(keyids)) != len(keyids): raise ValueError(f"Nonunique keyids: {keyids}") if threshold < 1: raise ValueError("threshold should be at least 1!") self.keyids = keyids self.threshold = threshold if unrecognized_fields is None: unrecognized_fields = {} self.unrecognized_fields = unrecognized_fields def __eq__(self, other: object) -> bool: if not isinstance(other, Role): return False return ( self.keyids == other.keyids and self.threshold == other.threshold and self.unrecognized_fields == other.unrecognized_fields ) @classmethod def from_dict(cls, role_dict: Dict[str, Any]) -> "Role": """Create ``Role`` object from its json/dict representation. Raises: ValueError, KeyError: Invalid arguments. """ keyids = role_dict.pop("keyids") threshold = role_dict.pop("threshold") # All fields left in the role_dict are unrecognized. return cls(keyids, threshold, role_dict) def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of self.""" return { "keyids": self.keyids, "threshold": self.threshold, **self.unrecognized_fields, } @dataclass class VerificationResult: """Signature verification result for delegated role metadata. Attributes: threshold: Number of required signatures. signed: dict of keyid to Key, containing keys that have signed. unsigned: dict of keyid to Key, containing keys that have not signed. """ threshold: int signed: Dict[str, Key] unsigned: Dict[str, Key] def __bool__(self) -> bool: return self.verified @property def verified(self) -> bool: """True if threshold of signatures is met.""" return len(self.signed) >= self.threshold @property def missing(self) -> int: """Number of additional signatures required to reach threshold.""" return max(0, self.threshold - len(self.signed)) @dataclass class RootVerificationResult: """Signature verification result for root metadata. Root must be verified by itself and the previous root version. This dataclass represents both results. For the edge case of first version of root, these underlying results are identical. Note that `signed` and `unsigned` correctness requires the underlying VerificationResult keys to not conflict (no reusing the same keyid for different keys). Attributes: first: First underlying VerificationResult second: Second underlying VerificationResult """ first: VerificationResult second: VerificationResult def __bool__(self) -> bool: return self.verified @property def verified(self) -> bool: """True if threshold of signatures is met in both underlying VerificationResults. """ return self.first.verified and self.second.verified @property def signed(self) -> Dict[str, Key]: """Dictionary of all signing keys that have signed, from both VerificationResults. return a union of all signed (in python<3.9 this requires dict unpacking) """ return {**self.first.signed, **self.second.signed} @property def unsigned(self) -> Dict[str, Key]: """Dictionary of all signing keys that have not signed, from both VerificationResults. return a union of all unsigned (in python<3.9 this requires dict unpacking) """ return {**self.first.unsigned, **self.second.unsigned} class _DelegatorMixin(metaclass=abc.ABCMeta): """Class that implements verify_delegate() for Root and Targets""" @abc.abstractmethod def get_delegated_role(self, delegated_role: str) -> Role: """Return the role object for the given delegated role. Raises ValueError if delegated_role is not actually delegated. """ raise NotImplementedError @abc.abstractmethod def get_key(self, keyid: str) -> Key: """Return the key object for the given keyid. Raises ValueError if key is not found. """ raise NotImplementedError def get_verification_result( self, delegated_role: str, payload: bytes, signatures: Dict[str, Signature], ) -> VerificationResult: """Return signature threshold verification result for delegated role. NOTE: Unlike `verify_delegate()` this method does not raise, if the role metadata is not fully verified. Args: delegated_role: Name of the delegated role to verify payload: Signed payload bytes for the delegated role signatures: Signatures over payload bytes Raises: ValueError: no delegation was found for ``delegated_role``. """ role = self.get_delegated_role(delegated_role) signed = {} unsigned = {} for keyid in role.keyids: try: key = self.get_key(keyid) except ValueError: logger.info("No key for keyid %s", keyid) continue if keyid not in signatures: unsigned[keyid] = key logger.info("No signature for keyid %s", keyid) continue sig = signatures[keyid] try: key.verify_signature(sig, payload) signed[keyid] = key except sslib_exceptions.UnverifiedSignatureError: unsigned[keyid] = key logger.info("Key %s failed to verify %s", keyid, delegated_role) return VerificationResult(role.threshold, signed, unsigned) def verify_delegate( self, delegated_role: str, payload: bytes, signatures: Dict[str, Signature], ) -> None: """Verify signature threshold for delegated role. Verify that there are enough valid ``signatures`` over ``payload``, to meet the threshold of keys for ``delegated_role``, as defined by the delegator (``self``). Args: delegated_role: Name of the delegated role to verify payload: Signed payload bytes for the delegated role signatures: Signatures over payload bytes Raises: UnsignedMetadataError: ``delegated_role`` was not signed with required threshold of keys for ``role_name``. ValueError: no delegation was found for ``delegated_role``. """ result = self.get_verification_result( delegated_role, payload, signatures ) if not result: raise UnsignedMetadataError( f"{delegated_role} was signed by {len(result.signed)}/" f"{result.threshold} keys" ) class Root(Signed, _DelegatorMixin): """A container for the signed part of root metadata. Parameters listed below are also instance attributes. Args: version: Metadata version number. Default is 1. spec_version: Supported TUF specification version. Default is the version currently supported by the library. expires: Metadata expiry date. Default is current date and time. keys: Dictionary of keyids to Keys. Defines the keys used in ``roles``. Default is empty dictionary. roles: Dictionary of role names to Roles. Defines which keys are required to sign the metadata for a specific role. Default is a dictionary of top level roles without keys and threshold of 1. consistent_snapshot: ``True`` if repository supports consistent snapshots. Default is True. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Raises: ValueError: Invalid arguments. """ type = _ROOT def __init__( self, version: Optional[int] = None, spec_version: Optional[str] = None, expires: Optional[datetime] = None, keys: Optional[Dict[str, Key]] = None, roles: Optional[Dict[str, Role]] = None, consistent_snapshot: Optional[bool] = True, unrecognized_fields: Optional[Dict[str, Any]] = None, ): super().__init__(version, spec_version, expires, unrecognized_fields) self.consistent_snapshot = consistent_snapshot self.keys = keys if keys is not None else {} if roles is None: roles = {r: Role([], 1) for r in TOP_LEVEL_ROLE_NAMES} elif set(roles) != TOP_LEVEL_ROLE_NAMES: raise ValueError("Role names must be the top-level metadata roles") self.roles = roles def __eq__(self, other: object) -> bool: if not isinstance(other, Root): return False return ( super().__eq__(other) and self.keys == other.keys and self.roles == other.roles and self.consistent_snapshot == other.consistent_snapshot ) @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Root": """Create ``Root`` object from its json/dict representation. Raises: ValueError, KeyError, TypeError: Invalid arguments. """ common_args = cls._common_fields_from_dict(signed_dict) consistent_snapshot = signed_dict.pop("consistent_snapshot", None) keys = signed_dict.pop("keys") roles = signed_dict.pop("roles") for keyid, key_dict in keys.items(): keys[keyid] = Key.from_dict(keyid, key_dict) for role_name, role_dict in roles.items(): roles[role_name] = Role.from_dict(role_dict) # All fields left in the signed_dict are unrecognized. return cls(*common_args, keys, roles, consistent_snapshot, signed_dict) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" root_dict = self._common_fields_to_dict() keys = {keyid: key.to_dict() for (keyid, key) in self.keys.items()} roles = {} for role_name, role in self.roles.items(): roles[role_name] = role.to_dict() if self.consistent_snapshot is not None: root_dict["consistent_snapshot"] = self.consistent_snapshot root_dict.update( { "keys": keys, "roles": roles, } ) return root_dict def add_key(self, key: Key, role: str) -> None: """Add new signing key for delegated role ``role``. Args: key: Signing key to be added for ``role``. role: Name of the role, for which ``key`` is added. Raises: ValueError: If the argument order is wrong or if ``role`` doesn't exist. """ # Verify that our users are not using the old argument order. if isinstance(role, Key): raise ValueError("Role must be a string, not a Key instance") if role not in self.roles: raise ValueError(f"Role {role} doesn't exist") if key.keyid not in self.roles[role].keyids: self.roles[role].keyids.append(key.keyid) self.keys[key.keyid] = key def revoke_key(self, keyid: str, role: str) -> None: """Revoke key from ``role`` and updates the key store. Args: keyid: Identifier of the key to be removed for ``role``. role: Name of the role, for which a signing key is removed. Raises: ValueError: If ``role`` doesn't exist or if ``role`` doesn't include the key. """ if role not in self.roles: raise ValueError(f"Role {role} doesn't exist") if keyid not in self.roles[role].keyids: raise ValueError(f"Key with id {keyid} is not used by {role}") self.roles[role].keyids.remove(keyid) for keyinfo in self.roles.values(): if keyid in keyinfo.keyids: return del self.keys[keyid] def get_delegated_role(self, delegated_role: str) -> Role: """Return the role object for the given delegated role. Raises ValueError if delegated_role is not actually delegated. """ if delegated_role not in self.roles: raise ValueError(f"Delegated role {delegated_role} not found") return self.roles[delegated_role] def get_key(self, keyid: str) -> Key: if keyid not in self.keys: raise ValueError(f"Key {keyid} not found") return self.keys[keyid] def get_root_verification_result( self, previous: Optional["Root"], payload: bytes, signatures: Dict[str, Signature], ) -> RootVerificationResult: """Return signature threshold verification result for two root roles. Verify root metadata with two roles (`self` and optionally `previous`). If the repository has no root role versions yet, `previous` can be left None. In all other cases, `previous` must be the previous version of the Root. NOTE: Unlike `verify_delegate()` this method does not raise, if the root metadata is not fully verified. Args: previous: The previous `Root` to verify payload with, or None payload: Signed payload bytes for root signatures: Signatures over payload bytes Raises: ValueError: no delegation was found for ``root`` or given Root versions are not sequential. """ if previous is None: previous = self elif self.version != previous.version + 1: versions = f"v{previous.version} and v{self.version}" raise ValueError( f"Expected sequential root versions, got {versions}." ) return RootVerificationResult( previous.get_verification_result(Root.type, payload, signatures), self.get_verification_result(Root.type, payload, signatures), ) class BaseFile: """A base class of ``MetaFile`` and ``TargetFile``. Encapsulates common static methods for length and hash verification. """ @staticmethod def _verify_hashes( data: Union[bytes, IO[bytes]], expected_hashes: Dict[str, str] ) -> None: """Verify that the hash of ``data`` matches ``expected_hashes``.""" is_bytes = isinstance(data, bytes) for algo, exp_hash in expected_hashes.items(): try: if is_bytes: digest_object = sslib_hash.digest(algo) digest_object.update(data) else: # if data is not bytes, assume it is a file object digest_object = sslib_hash.digest_fileobject(data, algo) except ( sslib_exceptions.UnsupportedAlgorithmError, sslib_exceptions.FormatError, ) as e: raise LengthOrHashMismatchError( f"Unsupported algorithm '{algo}'" ) from e observed_hash = digest_object.hexdigest() if observed_hash != exp_hash: raise LengthOrHashMismatchError( f"Observed hash {observed_hash} does not match " f"expected hash {exp_hash}" ) @staticmethod def _verify_length( data: Union[bytes, IO[bytes]], expected_length: int ) -> None: """Verify that the length of ``data`` matches ``expected_length``.""" if isinstance(data, bytes): observed_length = len(data) else: # if data is not bytes, assume it is a file object data.seek(0, io.SEEK_END) observed_length = data.tell() if observed_length != expected_length: raise LengthOrHashMismatchError( f"Observed length {observed_length} does not match " f"expected length {expected_length}" ) @staticmethod def _validate_hashes(hashes: Dict[str, str]) -> None: if not hashes: raise ValueError("Hashes must be a non empty dictionary") for key, value in hashes.items(): if not (isinstance(key, str) and isinstance(value, str)): raise TypeError("Hashes items must be strings") @staticmethod def _validate_length(length: int) -> None: if length < 0: raise ValueError(f"Length must be >= 0, got {length}") @staticmethod def _get_length_and_hashes( data: Union[bytes, IO[bytes]], hash_algorithms: Optional[List[str]] ) -> Tuple[int, Dict[str, str]]: """Calculate length and hashes of ``data``.""" if isinstance(data, bytes): length = len(data) else: data.seek(0, io.SEEK_END) length = data.tell() hashes = {} if hash_algorithms is None: hash_algorithms = [sslib_hash.DEFAULT_HASH_ALGORITHM] for algorithm in hash_algorithms: try: if isinstance(data, bytes): digest_object = sslib_hash.digest(algorithm) digest_object.update(data) else: digest_object = sslib_hash.digest_fileobject( data, algorithm ) except ( sslib_exceptions.UnsupportedAlgorithmError, sslib_exceptions.FormatError, ) as e: raise ValueError(f"Unsupported algorithm '{algorithm}'") from e hashes[algorithm] = digest_object.hexdigest() return (length, hashes) class MetaFile(BaseFile): """A container with information about a particular metadata file. *All parameters named below are not just constructor arguments but also instance attributes.* Args: version: Version of the metadata file. length: Length of the metadata file in bytes. hashes: Dictionary of hash algorithm names to hashes of the metadata file content. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Raises: ValueError, TypeError: Invalid arguments. """ def __init__( self, version: int = 1, length: Optional[int] = None, hashes: Optional[Dict[str, str]] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ): if version <= 0: raise ValueError(f"Metafile version must be > 0, got {version}") if length is not None: self._validate_length(length) if hashes is not None: self._validate_hashes(hashes) self.version = version self.length = length self.hashes = hashes if unrecognized_fields is None: unrecognized_fields = {} self.unrecognized_fields = unrecognized_fields def __eq__(self, other: object) -> bool: if not isinstance(other, MetaFile): return False return ( self.version == other.version and self.length == other.length and self.hashes == other.hashes and self.unrecognized_fields == other.unrecognized_fields ) @classmethod def from_dict(cls, meta_dict: Dict[str, Any]) -> "MetaFile": """Create ``MetaFile`` object from its json/dict representation. Raises: ValueError, KeyError: Invalid arguments. """ version = meta_dict.pop("version") length = meta_dict.pop("length", None) hashes = meta_dict.pop("hashes", None) # All fields left in the meta_dict are unrecognized. return cls(version, length, hashes, meta_dict) @classmethod def from_data( cls, version: int, data: Union[bytes, IO[bytes]], hash_algorithms: List[str], ) -> "MetaFile": """Creates MetaFile object from bytes. This constructor should only be used if hashes are wanted. By default, MetaFile(ver) should be used. Args: version: Version of the metadata file. data: Metadata bytes that the metafile represents. hash_algorithms: Hash algorithms to create the hashes with. If not specified, the securesystemslib default hash algorithm is used. Raises: ValueError: The hash algorithms list contains an unsupported algorithm. """ length, hashes = cls._get_length_and_hashes(data, hash_algorithms) return cls(version, length, hashes) def to_dict(self) -> Dict[str, Any]: """Return the dictionary representation of self.""" res_dict: Dict[str, Any] = { "version": self.version, **self.unrecognized_fields, } if self.length is not None: res_dict["length"] = self.length if self.hashes is not None: res_dict["hashes"] = self.hashes return res_dict def verify_length_and_hashes(self, data: Union[bytes, IO[bytes]]) -> None: """Verify that the length and hashes of ``data`` match expected values. Args: data: File object or its content in bytes. Raises: LengthOrHashMismatchError: Calculated length or hashes do not match expected values or hash algorithm is not supported. """ if self.length is not None: self._verify_length(data, self.length) if self.hashes is not None: self._verify_hashes(data, self.hashes) class Timestamp(Signed): """A container for the signed part of timestamp metadata. TUF file format uses a dictionary to contain the snapshot information: this is not the case with ``Timestamp.snapshot_meta`` which is a ``MetaFile``. *All parameters named below are not just constructor arguments but also instance attributes.* Args: version: Metadata version number. Default is 1. spec_version: Supported TUF specification version. Default is the version currently supported by the library. expires: Metadata expiry date. Default is current date and time. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API snapshot_meta: Meta information for snapshot metadata. Default is a MetaFile with version 1. Raises: ValueError: Invalid arguments. """ type = _TIMESTAMP def __init__( self, version: Optional[int] = None, spec_version: Optional[str] = None, expires: Optional[datetime] = None, snapshot_meta: Optional[MetaFile] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ): super().__init__(version, spec_version, expires, unrecognized_fields) self.snapshot_meta = snapshot_meta or MetaFile(1) def __eq__(self, other: object) -> bool: if not isinstance(other, Timestamp): return False return ( super().__eq__(other) and self.snapshot_meta == other.snapshot_meta ) @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Timestamp": """Create ``Timestamp`` object from its json/dict representation. Raises: ValueError, KeyError: Invalid arguments. """ common_args = cls._common_fields_from_dict(signed_dict) meta_dict = signed_dict.pop("meta") snapshot_meta = MetaFile.from_dict(meta_dict["snapshot.json"]) # All fields left in the timestamp_dict are unrecognized. return cls(*common_args, snapshot_meta, signed_dict) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" res_dict = self._common_fields_to_dict() res_dict["meta"] = {"snapshot.json": self.snapshot_meta.to_dict()} return res_dict class Snapshot(Signed): """A container for the signed part of snapshot metadata. Snapshot contains information about all target Metadata files. *All parameters named below are not just constructor arguments but also instance attributes.* Args: version: Metadata version number. Default is 1. spec_version: Supported TUF specification version. Default is the version currently supported by the library. expires: Metadata expiry date. Default is current date and time. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API meta: Dictionary of targets filenames to ``MetaFile`` objects. Default is a dictionary with a Metafile for "snapshot.json" version 1. Raises: ValueError: Invalid arguments. """ type = _SNAPSHOT def __init__( self, version: Optional[int] = None, spec_version: Optional[str] = None, expires: Optional[datetime] = None, meta: Optional[Dict[str, MetaFile]] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ): super().__init__(version, spec_version, expires, unrecognized_fields) self.meta = meta if meta is not None else {"targets.json": MetaFile(1)} def __eq__(self, other: object) -> bool: if not isinstance(other, Snapshot): return False return super().__eq__(other) and self.meta == other.meta @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Snapshot": """Create ``Snapshot`` object from its json/dict representation. Raises: ValueError, KeyError: Invalid arguments. """ common_args = cls._common_fields_from_dict(signed_dict) meta_dicts = signed_dict.pop("meta") meta = {} for meta_path, meta_dict in meta_dicts.items(): meta[meta_path] = MetaFile.from_dict(meta_dict) # All fields left in the snapshot_dict are unrecognized. return cls(*common_args, meta, signed_dict) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" snapshot_dict = self._common_fields_to_dict() meta_dict = {} for meta_path, meta_info in self.meta.items(): meta_dict[meta_path] = meta_info.to_dict() snapshot_dict["meta"] = meta_dict return snapshot_dict class DelegatedRole(Role): """A container with information about a delegated role. A delegation can happen in two ways: - ``paths`` is set: delegates targets matching any path pattern in ``paths`` - ``path_hash_prefixes`` is set: delegates targets whose target path hash starts with any of the prefixes in ``path_hash_prefixes`` ``paths`` and ``path_hash_prefixes`` are mutually exclusive: both cannot be set, at least one of them must be set. *All parameters named below are not just constructor arguments but also instance attributes.* Args: name: Delegated role name. keyids: Delegated role signing key identifiers. threshold: Number of keys required to sign this role's metadata. terminating: ``True`` if this delegation terminates a target lookup. paths: Path patterns. See note above. path_hash_prefixes: Hash prefixes. See note above. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API. Raises: ValueError: Invalid arguments. """ def __init__( self, name: str, keyids: List[str], threshold: int, terminating: bool, paths: Optional[List[str]] = None, path_hash_prefixes: Optional[List[str]] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ): super().__init__(keyids, threshold, unrecognized_fields) self.name = name self.terminating = terminating exclusive_vars = [paths, path_hash_prefixes] if sum(1 for var in exclusive_vars if var is not None) != 1: raise ValueError( "Only one of (paths, path_hash_prefixes) must be set" ) if paths is not None and any(not isinstance(p, str) for p in paths): raise ValueError("Paths must be strings") if path_hash_prefixes is not None and any( not isinstance(p, str) for p in path_hash_prefixes ): raise ValueError("Path_hash_prefixes must be strings") self.paths = paths self.path_hash_prefixes = path_hash_prefixes def __eq__(self, other: object) -> bool: if not isinstance(other, DelegatedRole): return False return ( super().__eq__(other) and self.name == other.name and self.terminating == other.terminating and self.paths == other.paths and self.path_hash_prefixes == other.path_hash_prefixes ) @classmethod def from_dict(cls, role_dict: Dict[str, Any]) -> "DelegatedRole": """Create ``DelegatedRole`` object from its json/dict representation. Raises: ValueError, KeyError, TypeError: Invalid arguments. """ name = role_dict.pop("name") keyids = role_dict.pop("keyids") threshold = role_dict.pop("threshold") terminating = role_dict.pop("terminating") paths = role_dict.pop("paths", None) path_hash_prefixes = role_dict.pop("path_hash_prefixes", None) # All fields left in the role_dict are unrecognized. return cls( name, keyids, threshold, terminating, paths, path_hash_prefixes, role_dict, ) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" base_role_dict = super().to_dict() res_dict = { "name": self.name, "terminating": self.terminating, **base_role_dict, } if self.paths is not None: res_dict["paths"] = self.paths elif self.path_hash_prefixes is not None: res_dict["path_hash_prefixes"] = self.path_hash_prefixes return res_dict @staticmethod def _is_target_in_pathpattern(targetpath: str, pathpattern: str) -> bool: """Determine whether ``targetpath`` matches the ``pathpattern``.""" # We need to make sure that targetpath and pathpattern are pointing to # the same directory as fnmatch doesn't threat "/" as a special symbol. target_parts = targetpath.split("/") pattern_parts = pathpattern.split("/") if len(target_parts) != len(pattern_parts): return False # Every part in the pathpattern could include a glob pattern, that's why # each of the target and pathpattern parts should match. for target_dir, pattern_dir in zip(target_parts, pattern_parts): if not fnmatch.fnmatch(target_dir, pattern_dir): return False return True def is_delegated_path(self, target_filepath: str) -> bool: """Determine whether the given ``target_filepath`` is in one of the paths that ``DelegatedRole`` is trusted to provide. The ``target_filepath`` and the ``DelegatedRole`` paths are expected to be in their canonical forms, so e.g. "a/b" instead of "a//b" . Only "/" is supported as target path separator. Leading separators are not handled as special cases (see `TUF specification on targetpath `_). Args: target_filepath: URL path to a target file, relative to a base targets URL. """ if self.path_hash_prefixes is not None: # Calculate the hash of the filepath # to determine in which bin to find the target. digest_object = sslib_hash.digest(algorithm="sha256") digest_object.update(target_filepath.encode("utf-8")) target_filepath_hash = digest_object.hexdigest() for path_hash_prefix in self.path_hash_prefixes: if target_filepath_hash.startswith(path_hash_prefix): return True elif self.paths is not None: for pathpattern in self.paths: # A delegated role path may be an explicit path or glob # pattern (Unix shell-style wildcards). if self._is_target_in_pathpattern(target_filepath, pathpattern): return True return False class SuccinctRoles(Role): """Succinctly defines a hash bin delegation graph. A ``SuccinctRoles`` object describes a delegation graph that covers all targets, distributing them uniformly over the delegated roles (i.e. bins) in the graph. The total number of bins is 2 to the power of the passed ``bit_length``. Bin names are the concatenation of the passed ``name_prefix`` and a zero-padded hex representation of the bin index separated by a hyphen. The passed ``keyids`` and ``threshold`` is used for each bin, and each bin is 'terminating'. For details: https://github.com/theupdateframework/taps/blob/master/tap15.md Args: keyids: Signing key identifiers for any bin metadata. threshold: Number of keys required to sign any bin metadata. bit_length: Number of bits between 1 and 32. name_prefix: Prefix of all bin names. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API. Raises: ValueError, TypeError, AttributeError: Invalid arguments. """ def __init__( self, keyids: List[str], threshold: int, bit_length: int, name_prefix: str, unrecognized_fields: Optional[Dict[str, Any]] = None, ) -> None: super().__init__(keyids, threshold, unrecognized_fields) if bit_length <= 0 or bit_length > 32: raise ValueError("bit_length must be between 1 and 32") if not isinstance(name_prefix, str): raise ValueError("name_prefix must be a string") self.bit_length = bit_length self.name_prefix = name_prefix # Calculate the suffix_len value based on the total number of bins in # hex. If bit_length = 10 then number_of_bins = 1024 or bin names will # have a suffix between "000" and "3ff" in hex and suffix_len will be 3 # meaning the third bin will have a suffix of "003". self.number_of_bins = 2**bit_length # suffix_len is calculated based on "number_of_bins - 1" as the name # of the last bin contains the number "number_of_bins -1" as a suffix. self.suffix_len = len(f"{self.number_of_bins-1:x}") def __eq__(self, other: object) -> bool: if not isinstance(other, SuccinctRoles): return False return ( super().__eq__(other) and self.bit_length == other.bit_length and self.name_prefix == other.name_prefix ) @classmethod def from_dict(cls, role_dict: Dict[str, Any]) -> "SuccinctRoles": """Create ``SuccinctRoles`` object from its json/dict representation. Raises: ValueError, KeyError, AttributeError, TypeError: Invalid arguments. """ keyids = role_dict.pop("keyids") threshold = role_dict.pop("threshold") bit_length = role_dict.pop("bit_length") name_prefix = role_dict.pop("name_prefix") # All fields left in the role_dict are unrecognized. return cls(keyids, threshold, bit_length, name_prefix, role_dict) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" base_role_dict = super().to_dict() return { "bit_length": self.bit_length, "name_prefix": self.name_prefix, **base_role_dict, } def get_role_for_target(self, target_filepath: str) -> str: """Calculate the name of the delegated role responsible for ``target_filepath``. The target at path ``target_filepath`` is assigned to a bin by casting the left-most ``bit_length`` of bits of the file path hash digest to int, using it as bin index between 0 and ``2**bit_length - 1``. Args: target_filepath: URL path to a target file, relative to a base targets URL. """ hasher = sslib_hash.digest(algorithm="sha256") hasher.update(target_filepath.encode("utf-8")) # We can't ever need more than 4 bytes (32 bits). hash_bytes = hasher.digest()[:4] # Right shift hash bytes, so that we only have the leftmost # bit_length bits that we care about. shift_value = 32 - self.bit_length bin_number = int.from_bytes(hash_bytes, byteorder="big") >> shift_value # Add zero padding if necessary and cast to hex the suffix. suffix = f"{bin_number:0{self.suffix_len}x}" return f"{self.name_prefix}-{suffix}" def get_roles(self) -> Iterator[str]: """Yield the names of all different delegated roles one by one.""" for i in range(self.number_of_bins): suffix = f"{i:0{self.suffix_len}x}" yield f"{self.name_prefix}-{suffix}" def is_delegated_role(self, role_name: str) -> bool: """Determine whether the given ``role_name`` is in one of the delegated roles that ``SuccinctRoles`` represents. Args: role_name: The name of the role to check against. """ desired_prefix = self.name_prefix + "-" if not role_name.startswith(desired_prefix): return False suffix = role_name[len(desired_prefix) :] if len(suffix) != self.suffix_len: return False try: # make sure suffix is hex value num = int(suffix, 16) except ValueError: return False return 0 <= num < self.number_of_bins class Delegations: """A container object storing information about all delegations. *All parameters named below are not just constructor arguments but also instance attributes.* Args: keys: Dictionary of keyids to Keys. Defines the keys used in ``roles``. roles: Ordered dictionary of role names to DelegatedRoles instances. It defines which keys are required to sign the metadata for a specific role. The roles order also defines the order that role delegations are considered during target searches. succinct_roles: Contains succinct information about hash bin delegations. Note that succinct roles is not a TUF specification feature yet and setting `succinct_roles` to a value makes the resulting metadata non-compliant. The metadata will not be accepted as valid by specification compliant clients such as those built with python-tuf <= 1.1.0. For more information see: https://github.com/theupdateframework/taps/blob/master/tap15.md unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Exactly one of ``roles`` and ``succinct_roles`` must be set. Raises: ValueError: Invalid arguments. """ def __init__( self, keys: Dict[str, Key], roles: Optional[Dict[str, DelegatedRole]] = None, succinct_roles: Optional[SuccinctRoles] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ): self.keys = keys if sum(1 for v in [roles, succinct_roles] if v is not None) != 1: raise ValueError("One of roles and succinct_roles must be set") if roles is not None: for role in roles: if not role or role in TOP_LEVEL_ROLE_NAMES: raise ValueError( "Delegated roles cannot be empty or use top-level " "role names" ) self.roles = roles self.succinct_roles = succinct_roles if unrecognized_fields is None: unrecognized_fields = {} self.unrecognized_fields = unrecognized_fields def __eq__(self, other: object) -> bool: if not isinstance(other, Delegations): return False all_attributes_check = ( self.keys == other.keys and self.roles == other.roles and self.succinct_roles == other.succinct_roles and self.unrecognized_fields == other.unrecognized_fields ) if self.roles is not None and other.roles is not None: all_attributes_check = ( all_attributes_check # Order of the delegated roles matters (see issue #1788). and list(self.roles.items()) == list(other.roles.items()) ) return all_attributes_check @classmethod def from_dict(cls, delegations_dict: Dict[str, Any]) -> "Delegations": """Create ``Delegations`` object from its json/dict representation. Raises: ValueError, KeyError, TypeError: Invalid arguments. """ keys = delegations_dict.pop("keys") keys_res = {} for keyid, key_dict in keys.items(): keys_res[keyid] = Key.from_dict(keyid, key_dict) roles = delegations_dict.pop("roles", None) roles_res: Optional[Dict[str, DelegatedRole]] = None if roles is not None: roles_res = {} for role_dict in roles: new_role = DelegatedRole.from_dict(role_dict) if new_role.name in roles_res: raise ValueError(f"Duplicate role {new_role.name}") roles_res[new_role.name] = new_role succinct_roles_dict = delegations_dict.pop("succinct_roles", None) succinct_roles_info = None if succinct_roles_dict is not None: succinct_roles_info = SuccinctRoles.from_dict(succinct_roles_dict) # All fields left in the delegations_dict are unrecognized. return cls(keys_res, roles_res, succinct_roles_info, delegations_dict) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" keys = {keyid: key.to_dict() for keyid, key in self.keys.items()} res_dict: Dict[str, Any] = { "keys": keys, **self.unrecognized_fields, } if self.roles is not None: roles = [role_obj.to_dict() for role_obj in self.roles.values()] res_dict["roles"] = roles elif self.succinct_roles is not None: res_dict["succinct_roles"] = self.succinct_roles.to_dict() return res_dict def get_roles_for_target( self, target_filepath: str ) -> Iterator[Tuple[str, bool]]: """Given ``target_filepath`` get names and terminating status of all delegated roles who are responsible for it. Args: target_filepath: URL path to a target file, relative to a base targets URL. """ if self.roles is not None: for role in self.roles.values(): if role.is_delegated_path(target_filepath): yield role.name, role.terminating elif self.succinct_roles is not None: # We consider all succinct_roles as terminating. # For more information read TAP 15. yield self.succinct_roles.get_role_for_target(target_filepath), True class TargetFile(BaseFile): """A container with information about a particular target file. *All parameters named below are not just constructor arguments but also instance attributes.* Args: length: Length of the target file in bytes. hashes: Dictionary of hash algorithm names to hashes of the target file content. path: URL path to a target file, relative to a base targets URL. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Raises: ValueError, TypeError: Invalid arguments. """ def __init__( self, length: int, hashes: Dict[str, str], path: str, unrecognized_fields: Optional[Dict[str, Any]] = None, ): self._validate_length(length) self._validate_hashes(hashes) self.length = length self.hashes = hashes self.path = path if unrecognized_fields is None: unrecognized_fields = {} self.unrecognized_fields = unrecognized_fields @property def custom(self) -> Any: # noqa: ANN401 """Get implementation specific data related to the target. python-tuf does not use or validate this data. """ return self.unrecognized_fields.get("custom") def __eq__(self, other: object) -> bool: if not isinstance(other, TargetFile): return False return ( self.length == other.length and self.hashes == other.hashes and self.path == other.path and self.unrecognized_fields == other.unrecognized_fields ) @classmethod def from_dict(cls, target_dict: Dict[str, Any], path: str) -> "TargetFile": """Create ``TargetFile`` object from its json/dict representation. Raises: ValueError, KeyError, TypeError: Invalid arguments. """ length = target_dict.pop("length") hashes = target_dict.pop("hashes") # All fields left in the target_dict are unrecognized. return cls(length, hashes, path, target_dict) def to_dict(self) -> Dict[str, Any]: """Return the JSON-serializable dictionary representation of self.""" return { "length": self.length, "hashes": self.hashes, **self.unrecognized_fields, } @classmethod def from_file( cls, target_file_path: str, local_path: str, hash_algorithms: Optional[List[str]] = None, ) -> "TargetFile": """Create ``TargetFile`` object from a file. Args: target_file_path: URL path to a target file, relative to a base targets URL. local_path: Local path to target file content. hash_algorithms: Hash algorithms to calculate hashes with. If not specified the securesystemslib default hash algorithm is used. Raises: FileNotFoundError: The file doesn't exist. ValueError: The hash algorithms list contains an unsupported algorithm. """ with open(local_path, "rb") as file: return cls.from_data(target_file_path, file, hash_algorithms) @classmethod def from_data( cls, target_file_path: str, data: Union[bytes, IO[bytes]], hash_algorithms: Optional[List[str]] = None, ) -> "TargetFile": """Create ``TargetFile`` object from bytes. Args: target_file_path: URL path to a target file, relative to a base targets URL. data: Target file content. hash_algorithms: Hash algorithms to create the hashes with. If not specified the securesystemslib default hash algorithm is used. Raises: ValueError: The hash algorithms list contains an unsupported algorithm. """ length, hashes = cls._get_length_and_hashes(data, hash_algorithms) return cls(length, hashes, target_file_path) def verify_length_and_hashes(self, data: Union[bytes, IO[bytes]]) -> None: """Verify that length and hashes of ``data`` match expected values. Args: data: Target file object or its content in bytes. Raises: LengthOrHashMismatchError: Calculated length or hashes do not match expected values or hash algorithm is not supported. """ self._verify_length(data, self.length) self._verify_hashes(data, self.hashes) def get_prefixed_paths(self) -> List[str]: """ Return hash-prefixed URL path fragments for the target file path. """ paths = [] parent, sep, name = self.path.rpartition("/") for hash_value in self.hashes.values(): paths.append(f"{parent}{sep}{hash_value}.{name}") return paths class Targets(Signed, _DelegatorMixin): """A container for the signed part of targets metadata. Targets contains verifying information about target files and also delegates responsibility to other Targets roles. *All parameters named below are not just constructor arguments but also instance attributes.* Args: version: Metadata version number. Default is 1. spec_version: Supported TUF specification version. Default is the version currently supported by the library. expires: Metadata expiry date. Default is current date and time. targets: Dictionary of target filenames to TargetFiles. Default is an empty dictionary. delegations: Defines how this Targets delegates responsibility to other Targets Metadata files. Default is None. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API Raises: ValueError: Invalid arguments. """ type = _TARGETS def __init__( self, version: Optional[int] = None, spec_version: Optional[str] = None, expires: Optional[datetime] = None, targets: Optional[Dict[str, TargetFile]] = None, delegations: Optional[Delegations] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ) -> None: super().__init__(version, spec_version, expires, unrecognized_fields) self.targets = targets if targets is not None else {} self.delegations = delegations def __eq__(self, other: object) -> bool: if not isinstance(other, Targets): return False return ( super().__eq__(other) and self.targets == other.targets and self.delegations == other.delegations ) @classmethod def from_dict(cls, signed_dict: Dict[str, Any]) -> "Targets": """Create ``Targets`` object from its json/dict representation. Raises: ValueError, KeyError, TypeError: Invalid arguments. """ common_args = cls._common_fields_from_dict(signed_dict) targets = signed_dict.pop(_TARGETS) try: delegations_dict = signed_dict.pop("delegations") except KeyError: delegations = None else: delegations = Delegations.from_dict(delegations_dict) res_targets = {} for target_path, target_info in targets.items(): res_targets[target_path] = TargetFile.from_dict( target_info, target_path ) # All fields left in the targets_dict are unrecognized. return cls(*common_args, res_targets, delegations, signed_dict) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" targets_dict = self._common_fields_to_dict() targets = {} for target_path, target_file_obj in self.targets.items(): targets[target_path] = target_file_obj.to_dict() targets_dict[_TARGETS] = targets if self.delegations is not None: targets_dict["delegations"] = self.delegations.to_dict() return targets_dict def add_key(self, key: Key, role: Optional[str] = None) -> None: """Add new signing key for delegated role ``role``. If succinct_roles is used then the ``role`` argument is not required. Args: key: Signing key to be added for ``role``. role: Name of the role, for which ``key`` is added. Raises: ValueError: If the argument order is wrong or if there are no delegated roles or if ``role`` is not delegated by this Target. """ # Verify that our users are not using the old argument order. if isinstance(role, Key): raise ValueError("Role must be a string, not a Key instance") if self.delegations is None: raise ValueError(f"Delegated role {role} doesn't exist") if self.delegations.roles is not None: if role not in self.delegations.roles: raise ValueError(f"Delegated role {role} doesn't exist") if key.keyid not in self.delegations.roles[role].keyids: self.delegations.roles[role].keyids.append(key.keyid) elif self.delegations.succinct_roles is not None: if key.keyid not in self.delegations.succinct_roles.keyids: self.delegations.succinct_roles.keyids.append(key.keyid) self.delegations.keys[key.keyid] = key def revoke_key(self, keyid: str, role: Optional[str] = None) -> None: """Revokes key from delegated role ``role`` and updates the delegations key store. If succinct_roles is used then the ``role`` argument is not required. Args: keyid: Identifier of the key to be removed for ``role``. role: Name of the role, for which a signing key is removed. Raises: ValueError: If there are no delegated roles or if ``role`` is not delegated by this ``Target`` or if key is not used by ``role`` or if key with id ``keyid`` is not used by succinct roles. """ if self.delegations is None: raise ValueError(f"Delegated role {role} doesn't exist") if self.delegations.roles is not None: if role not in self.delegations.roles: raise ValueError(f"Delegated role {role} doesn't exist") if keyid not in self.delegations.roles[role].keyids: raise ValueError(f"Key with id {keyid} is not used by {role}") self.delegations.roles[role].keyids.remove(keyid) for keyinfo in self.delegations.roles.values(): if keyid in keyinfo.keyids: return elif self.delegations.succinct_roles is not None: if keyid not in self.delegations.succinct_roles.keyids: raise ValueError( f"Key with id {keyid} is not used by succinct_roles" ) self.delegations.succinct_roles.keyids.remove(keyid) del self.delegations.keys[keyid] def get_delegated_role(self, delegated_role: str) -> Role: """Return the role object for the given delegated role. Raises ValueError if delegated_role is not actually delegated. """ if self.delegations is None: raise ValueError("No delegations found") role: Optional[Role] = None if self.delegations.roles is not None: role = self.delegations.roles.get(delegated_role) elif self.delegations.succinct_roles is not None: succinct = self.delegations.succinct_roles if succinct.is_delegated_role(delegated_role): role = succinct if not role: raise ValueError(f"Delegated role {delegated_role} not found") return role def get_key(self, keyid: str) -> Key: if self.delegations is None: raise ValueError("No delegations found") if keyid not in self.delegations.keys: raise ValueError(f"Key {keyid} not found") return self.delegations.keys[keyid] python-tuf-5.1.0/tuf/api/dsse.py000066400000000000000000000104251470074210500165130ustar00rootroot00000000000000"""Low-level TUF DSSE API. (experimental!)""" import json from typing import Generic, Type, cast from securesystemslib.dsse import Envelope as BaseSimpleEnvelope # Expose all payload classes to use API independently of ``tuf.api.metadata``. from tuf.api._payload import ( # noqa: F401 _ROOT, _SNAPSHOT, _TARGETS, _TIMESTAMP, SPECIFICATION_VERSION, TOP_LEVEL_ROLE_NAMES, BaseFile, DelegatedRole, Delegations, MetaFile, Role, Root, RootVerificationResult, Signed, Snapshot, SuccinctRoles, T, TargetFile, Targets, Timestamp, VerificationResult, ) from tuf.api.serialization import DeserializationError, SerializationError class SimpleEnvelope(Generic[T], BaseSimpleEnvelope): """Dead Simple Signing Envelope (DSSE) for TUF payloads. * Sign with ``self.sign()`` (inherited). * Verify with ``verify_delegate`` on a ``Root`` or ``Targets`` object:: delegator.verify_delegate( role_name, envelope.pae(), # Note, how we don't pass ``envelope.payload``! envelope.signatures, ) Attributes: payload: Serialized payload bytes. payload_type: Payload string identifier. signatures: Ordered dictionary of keyids to ``Signature`` objects. """ DEFAULT_PAYLOAD_TYPE = "application/vnd.tuf+json" @classmethod def from_bytes(cls, data: bytes) -> "SimpleEnvelope[T]": """Load envelope from JSON bytes. NOTE: Unlike ``tuf.api.metadata.Metadata.from_bytes``, this method does not deserialize the contained payload. Use ``self.get_signed`` to deserialize the payload into a ``Signed`` object. Args: data: envelope JSON bytes. Raises: tuf.api.serialization.DeserializationError: data cannot be deserialized. Returns: TUF ``SimpleEnvelope`` object. """ try: envelope_dict = json.loads(data.decode()) envelope = SimpleEnvelope.from_dict(envelope_dict) except Exception as e: raise DeserializationError from e return envelope def to_bytes(self) -> bytes: """Return envelope as JSON bytes. NOTE: Unlike ``tuf.api.metadata.Metadata.to_bytes``, this method does not serialize the payload. Use ``SimpleEnvelope.from_signed`` to serialize a ``Signed`` object and wrap it in an SimpleEnvelope. Raises: tuf.api.serialization.SerializationError: self cannot be serialized. """ try: envelope_dict = self.to_dict() json_bytes = json.dumps(envelope_dict).encode() except Exception as e: raise SerializationError from e return json_bytes @classmethod def from_signed(cls, signed: T) -> "SimpleEnvelope[T]": """Serialize payload as JSON bytes and wrap in envelope. Args: signed: ``Signed`` object. Raises: tuf.api.serialization.SerializationError: The signed object cannot be serialized. """ try: signed_dict = signed.to_dict() json_bytes = json.dumps(signed_dict).encode() except Exception as e: raise SerializationError from e return cls(json_bytes, cls.DEFAULT_PAYLOAD_TYPE, {}) def get_signed(self) -> T: """Extract and deserialize payload JSON bytes from envelope. Raises: tuf.api.serialization.DeserializationError: The signed object cannot be deserialized. """ try: payload_dict = json.loads(self.payload.decode()) # TODO: can we move this to tuf.api._payload? _type = payload_dict["_type"] if _type == _TARGETS: inner_cls: Type[Signed] = Targets elif _type == _SNAPSHOT: inner_cls = Snapshot elif _type == _TIMESTAMP: inner_cls = Timestamp elif _type == _ROOT: inner_cls = Root else: raise ValueError(f'unrecognized role type "{_type}"') except Exception as e: raise DeserializationError from e return cast(T, inner_cls.from_dict(payload_dict)) python-tuf-5.1.0/tuf/api/exceptions.py000066400000000000000000000037571470074210500177500ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """ Define TUF exceptions used inside the new modern implementation. The names chosen for TUF Exception classes should end in 'Error' except where there is a good reason not to, and provide that reason in those cases. """ #### Repository errors #### from securesystemslib.exceptions import StorageError # noqa: F401 class RepositoryError(Exception): """An error with a repository's state, such as a missing file. It covers all exceptions that come from the repository side when looking from the perspective of users of metadata API or ngclient. """ class UnsignedMetadataError(RepositoryError): """An error about metadata object with insufficient threshold of signatures. """ class BadVersionNumberError(RepositoryError): """An error for metadata that contains an invalid version number.""" class EqualVersionNumberError(BadVersionNumberError): """An error for metadata containing a previously verified version number.""" class ExpiredMetadataError(RepositoryError): """Indicate that a TUF Metadata file has expired.""" class LengthOrHashMismatchError(RepositoryError): """An error while checking the length and hash values of an object.""" #### Download Errors #### class DownloadError(Exception): """An error occurred while attempting to download a file.""" class DownloadLengthMismatchError(DownloadError): """Indicate that a mismatch of lengths was seen while downloading a file.""" class SlowRetrievalError(DownloadError): """Indicate that downloading a file took an unreasonably long time.""" class DownloadHTTPError(DownloadError): """ Returned by FetcherInterface implementations for HTTP errors. Args: message: The HTTP error messsage status_code: The HTTP status code """ def __init__(self, message: str, status_code: int): super().__init__(message) self.status_code = status_code python-tuf-5.1.0/tuf/api/metadata.py000066400000000000000000000345251470074210500173440ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """The low-level Metadata API. The low-level Metadata API in ``tuf.api.metadata`` module contains: * Safe de/serialization of metadata to and from files. * Access to and modification of signed metadata content. * Signing metadata and verifying signatures. Metadata API implements functionality at the metadata file level, it does not provide TUF repository or client functionality on its own (but can be used to implement them). The API design is based on the file format defined in the `TUF specification `_ and the object attributes generally follow the JSON format used in the specification. The above principle means that a ``Metadata`` object represents a single metadata file, and has a ``signed`` attribute that is an instance of one of the four top level signed classes (``Root``, ``Timestamp``, ``Snapshot`` and ``Targets``). To make Python type annotations useful ``Metadata`` can be type constrained: e.g. the signed attribute of ``Metadata[Root]`` is known to be ``Root``. Currently Metadata API supports JSON as the file format. A basic example of repository implementation using the Metadata is available in `examples/repository `_. """ import logging import tempfile from typing import Any, Dict, Generic, Optional, Type, cast from securesystemslib.signer import Signature, Signer from securesystemslib.storage import FilesystemBackend, StorageBackendInterface # Expose payload classes via ``tuf.api.metadata`` to maintain the API, # even if they are unused in the local scope. from tuf.api._payload import ( # noqa: F401 _ROOT, _SNAPSHOT, _TARGETS, _TIMESTAMP, SPECIFICATION_VERSION, TOP_LEVEL_ROLE_NAMES, BaseFile, DelegatedRole, Delegations, Key, LengthOrHashMismatchError, MetaFile, Role, Root, RootVerificationResult, Signed, Snapshot, SuccinctRoles, T, TargetFile, Targets, Timestamp, VerificationResult, ) from tuf.api.exceptions import UnsignedMetadataError from tuf.api.serialization import ( MetadataDeserializer, MetadataSerializer, SignedSerializer, ) logger = logging.getLogger(__name__) class Metadata(Generic[T]): """A container for signed TUF metadata. Provides methods to convert to and from dictionary, read and write to and from file and to create and verify metadata signatures. ``Metadata[T]`` is a generic container type where T can be any one type of [``Root``, ``Timestamp``, ``Snapshot``, ``Targets``]. The purpose of this is to allow static type checking of the signed attribute in code using Metadata:: root_md = Metadata[Root].from_file("root.json") # root_md type is now Metadata[Root]. This means signed and its # attributes like consistent_snapshot are now statically typed and the # types can be verified by static type checkers and shown by IDEs print(root_md.signed.consistent_snapshot) Using a type constraint is not required but not doing so means T is not a specific type so static typing cannot happen. Note that the type constraint ``[Root]`` is not validated at runtime (as pure annotations are not available then). New Metadata instances can be created from scratch with:: one_day = datetime.now(timezone.utc) + timedelta(days=1) timestamp = Metadata(Timestamp(expires=one_day)) Apart from ``expires`` all of the arguments to the inner constructors have reasonable default values for new metadata. *All parameters named below are not just constructor arguments but also instance attributes.* Args: signed: Actual metadata payload, i.e. one of ``Targets``, ``Snapshot``, ``Timestamp`` or ``Root``. signatures: Ordered dictionary of keyids to ``Signature`` objects, each signing the canonical serialized representation of ``signed``. Default is an empty dictionary. unrecognized_fields: Dictionary of all attributes that are not managed by TUF Metadata API. These fields are NOT signed and it's preferable if unrecognized fields are added to the Signed derivative classes. """ def __init__( self, signed: T, signatures: Optional[Dict[str, Signature]] = None, unrecognized_fields: Optional[Dict[str, Any]] = None, ): self.signed: T = signed self.signatures = signatures if signatures is not None else {} if unrecognized_fields is None: unrecognized_fields = {} self.unrecognized_fields = unrecognized_fields def __eq__(self, other: object) -> bool: if not isinstance(other, Metadata): return False return ( self.signatures == other.signatures # Order of the signatures matters (see issue #1788). and list(self.signatures.items()) == list(other.signatures.items()) and self.signed == other.signed and self.unrecognized_fields == other.unrecognized_fields ) @property def signed_bytes(self) -> bytes: """Default canonical json byte representation of ``self.signed``.""" # Use local scope import to avoid circular import errors from tuf.api.serialization.json import CanonicalJSONSerializer return CanonicalJSONSerializer().serialize(self.signed) @classmethod def from_dict(cls, metadata: Dict[str, Any]) -> "Metadata[T]": """Create ``Metadata`` object from its json/dict representation. Args: metadata: TUF metadata in dict representation. Raises: ValueError, KeyError, TypeError: Invalid arguments. Side Effect: Destroys the metadata dict passed by reference. Returns: TUF ``Metadata`` object. """ # Dispatch to contained metadata class on metadata _type field. _type = metadata["signed"]["_type"] if _type == _TARGETS: inner_cls: Type[Signed] = Targets elif _type == _SNAPSHOT: inner_cls = Snapshot elif _type == _TIMESTAMP: inner_cls = Timestamp elif _type == _ROOT: inner_cls = Root else: raise ValueError(f'unrecognized metadata type "{_type}"') # Make sure signatures are unique signatures: Dict[str, Signature] = {} for sig_dict in metadata.pop("signatures"): sig = Signature.from_dict(sig_dict) if sig.keyid in signatures: raise ValueError( f"Multiple signatures found for keyid {sig.keyid}" ) signatures[sig.keyid] = sig return cls( # Specific type T is not known at static type check time: use cast signed=cast(T, inner_cls.from_dict(metadata.pop("signed"))), signatures=signatures, # All fields left in the metadata dict are unrecognized. unrecognized_fields=metadata, ) @classmethod def from_file( cls, filename: str, deserializer: Optional[MetadataDeserializer] = None, storage_backend: Optional[StorageBackendInterface] = None, ) -> "Metadata[T]": """Load TUF metadata from file storage. Args: filename: Path to read the file from. deserializer: ``MetadataDeserializer`` subclass instance that implements the desired wireline format deserialization. Per default a ``JSONDeserializer`` is used. storage_backend: Object that implements ``securesystemslib.storage.StorageBackendInterface``. Default is ``FilesystemBackend`` (i.e. a local file). Raises: StorageError: The file cannot be read. tuf.api.serialization.DeserializationError: The file cannot be deserialized. Returns: TUF ``Metadata`` object. """ if storage_backend is None: storage_backend = FilesystemBackend() with storage_backend.get(filename) as file_obj: return cls.from_bytes(file_obj.read(), deserializer) @classmethod def from_bytes( cls, data: bytes, deserializer: Optional[MetadataDeserializer] = None, ) -> "Metadata[T]": """Load TUF metadata from raw data. Args: data: Metadata content. deserializer: ``MetadataDeserializer`` implementation to use. Default is ``JSONDeserializer``. Raises: tuf.api.serialization.DeserializationError: The file cannot be deserialized. Returns: TUF ``Metadata`` object. """ if deserializer is None: # Use local scope import to avoid circular import errors from tuf.api.serialization.json import JSONDeserializer deserializer = JSONDeserializer() return deserializer.deserialize(data) def to_bytes( self, serializer: Optional[MetadataSerializer] = None ) -> bytes: """Return the serialized TUF file format as bytes. Note that if bytes are first deserialized into ``Metadata`` and then serialized with ``to_bytes()``, the two are not required to be identical even though the signatures are guaranteed to stay valid. If byte-for-byte equivalence is required (which is the case when content hashes are used in other metadata), the original content should be used instead of re-serializing. Args: serializer: ``MetadataSerializer`` instance that implements the desired serialization format. Default is ``JSONSerializer``. Raises: tuf.api.serialization.SerializationError: The metadata object cannot be serialized. """ if serializer is None: # Use local scope import to avoid circular import errors from tuf.api.serialization.json import JSONSerializer serializer = JSONSerializer(compact=True) return serializer.serialize(self) def to_dict(self) -> Dict[str, Any]: """Return the dict representation of self.""" signatures = [sig.to_dict() for sig in self.signatures.values()] return { "signatures": signatures, "signed": self.signed.to_dict(), **self.unrecognized_fields, } def to_file( self, filename: str, serializer: Optional[MetadataSerializer] = None, storage_backend: Optional[StorageBackendInterface] = None, ) -> None: """Write TUF metadata to file storage. Note that if a file is first deserialized into ``Metadata`` and then serialized with ``to_file()``, the two files are not required to be identical even though the signatures are guaranteed to stay valid. If byte-for-byte equivalence is required (which is the case when file hashes are used in other metadata), the original file should be used instead of re-serializing. Args: filename: Path to write the file to. serializer: ``MetadataSerializer`` instance that implements the desired serialization format. Default is ``JSONSerializer``. storage_backend: ``StorageBackendInterface`` implementation. Default is ``FilesystemBackend`` (i.e. a local file). Raises: tuf.api.serialization.SerializationError: The metadata object cannot be serialized. StorageError: The file cannot be written. """ if storage_backend is None: storage_backend = FilesystemBackend() bytes_data = self.to_bytes(serializer) with tempfile.TemporaryFile() as temp_file: temp_file.write(bytes_data) storage_backend.put(temp_file, filename) # Signatures. def sign( self, signer: Signer, append: bool = False, signed_serializer: Optional[SignedSerializer] = None, ) -> Signature: """Create signature over ``signed`` and assigns it to ``signatures``. Args: signer: A ``securesystemslib.signer.Signer`` object that provides a signing implementation to generate the signature. append: ``True`` if the signature should be appended to the list of signatures or replace any existing signatures. The default behavior is to replace signatures. signed_serializer: ``SignedSerializer`` that implements the desired serialization format. Default is ``CanonicalJSONSerializer``. Raises: tuf.api.serialization.SerializationError: ``signed`` cannot be serialized. UnsignedMetadataError: Signing errors. Returns: ``securesystemslib.signer.Signature`` object that was added into signatures. """ if signed_serializer is None: bytes_data = self.signed_bytes else: bytes_data = signed_serializer.serialize(self.signed) try: signature = signer.sign(bytes_data) except Exception as e: raise UnsignedMetadataError(f"Failed to sign: {e}") from e if not append: self.signatures.clear() self.signatures[signature.keyid] = signature return signature def verify_delegate( self, delegated_role: str, delegated_metadata: "Metadata", signed_serializer: Optional[SignedSerializer] = None, ) -> None: """Verify that ``delegated_metadata`` is signed with the required threshold of keys for ``delegated_role``. .. deprecated:: 3.1.0 Please use ``Root.verify_delegate()`` or ``Targets.verify_delegate()``. """ if self.signed.type not in ["root", "targets"]: raise TypeError("Call is valid only on delegator metadata") if signed_serializer is None: payload = delegated_metadata.signed_bytes else: payload = signed_serializer.serialize(delegated_metadata.signed) self.signed.verify_delegate( delegated_role, payload, delegated_metadata.signatures ) python-tuf-5.1.0/tuf/api/serialization/000077500000000000000000000000001470074210500200565ustar00rootroot00000000000000python-tuf-5.1.0/tuf/api/serialization/__init__.py000066400000000000000000000035031470074210500221700ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """``tuf.api.serialization`` module provides abstract base classes and concrete implementations to serialize and deserialize TUF metadata. Any custom de/serialization implementations should inherit from the abstract base classes defined in this module. The implementations can use the ``to_dict()``/``from_dict()`` implementations available in the Metadata API objects. - Metadata de/serializers are used to convert to and from wireline formats. - Signed serializers are used to canonicalize data for cryptographic signatures generation and verification. """ import abc from typing import TYPE_CHECKING from tuf.api.exceptions import RepositoryError if TYPE_CHECKING: from tuf.api.metadata import Metadata, Signed class SerializationError(RepositoryError): """Error during serialization.""" class DeserializationError(RepositoryError): """Error during deserialization.""" class MetadataDeserializer(metaclass=abc.ABCMeta): """Abstract base class for deserialization of Metadata objects.""" @abc.abstractmethod def deserialize(self, raw_data: bytes) -> "Metadata": """Deserialize bytes to Metadata object.""" raise NotImplementedError class MetadataSerializer(metaclass=abc.ABCMeta): """Abstract base class for serialization of Metadata objects.""" @abc.abstractmethod def serialize(self, metadata_obj: "Metadata") -> bytes: """Serialize Metadata object to bytes.""" raise NotImplementedError class SignedSerializer(metaclass=abc.ABCMeta): """Abstract base class for serialization of Signed objects.""" @abc.abstractmethod def serialize(self, signed_obj: "Signed") -> bytes: """Serialize Signed object to bytes.""" raise NotImplementedError python-tuf-5.1.0/tuf/api/serialization/json.py000066400000000000000000000070601470074210500214040ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """``tuf.api.serialization.json`` module provides concrete implementations to serialize and deserialize TUF role metadata to and from the JSON wireline format for transportation, and to serialize the 'signed' part of TUF role metadata to the OLPC Canonical JSON format for signature generation and verification. """ import json from typing import Optional from securesystemslib.formats import encode_canonical # ... to allow de/serializing Metadata and Signed objects here, while also # creating default de/serializers there (see metadata local scope imports). # NOTE: A less desirable alternative would be to add more abstraction layers. from tuf.api.metadata import Metadata, Signed from tuf.api.serialization import ( DeserializationError, MetadataDeserializer, MetadataSerializer, SerializationError, SignedSerializer, ) class JSONDeserializer(MetadataDeserializer): """Provides JSON to Metadata deserialize method.""" def deserialize(self, raw_data: bytes) -> Metadata: """Deserialize utf-8 encoded JSON bytes into Metadata object.""" try: json_dict = json.loads(raw_data.decode("utf-8")) metadata_obj = Metadata.from_dict(json_dict) except Exception as e: raise DeserializationError("Failed to deserialize JSON") from e return metadata_obj class JSONSerializer(MetadataSerializer): """Provides Metadata to JSON serialize method. Args: compact: A boolean indicating if the JSON bytes generated in 'serialize' should be compact by excluding whitespace. validate: Check that the metadata object can be deserialized again without change of contents and thus find common mistakes. This validation might slow down serialization significantly. """ def __init__(self, compact: bool = False, validate: Optional[bool] = False): self.compact = compact self.validate = validate def serialize(self, metadata_obj: Metadata) -> bytes: """Serialize Metadata object into utf-8 encoded JSON bytes.""" try: indent = None if self.compact else 1 separators = (",", ":") if self.compact else (",", ": ") json_bytes = json.dumps( metadata_obj.to_dict(), indent=indent, separators=separators, sort_keys=True, ).encode("utf-8") if self.validate: try: new_md_obj = JSONDeserializer().deserialize(json_bytes) if metadata_obj != new_md_obj: raise ValueError( "Metadata changes if you serialize and deserialize." ) except Exception as e: raise ValueError("Metadata cannot be validated!") from e except Exception as e: raise SerializationError("Failed to serialize JSON") from e return json_bytes class CanonicalJSONSerializer(SignedSerializer): """Provides Signed to OLPC Canonical JSON serialize method.""" def serialize(self, signed_obj: Signed) -> bytes: """Serialize Signed object into utf-8 encoded OLPC Canonical JSON bytes. """ try: signed_dict = signed_obj.to_dict() canonical_bytes = encode_canonical(signed_dict).encode("utf-8") except Exception as e: raise SerializationError from e return canonical_bytes python-tuf-5.1.0/tuf/ngclient/000077500000000000000000000000001470074210500162335ustar00rootroot00000000000000python-tuf-5.1.0/tuf/ngclient/README.md000066400000000000000000000017141470074210500175150ustar00rootroot00000000000000## Next-gen TUF client for Python This package provides modules for TUF client implementers. **tuf.ngclient.Updater** is a class that implements the client workflow described in the TUF specification (see https://theupdateframework.github.io/specification/latest/#detailed-client-workflow) **tuf.ngclient.FetcherInterface** is an abstract class that client implementers can implement a concrete class of in order to reuse their own networking/download libraries -- a Requests-based implementation is used by default. This package: * Aims to be a clean, easy-to-validate reference client implementation written in modern Python * At the same time aims to be the library of choice for anyone implementing a TUF client in Python: light-weight, easy to integrate and with minimal required dependencies * Is still under development but is planned to become the default client in this implementation (i.e., the older `tuf.client` will be deprecated in the future) python-tuf-5.1.0/tuf/ngclient/__init__.py000066400000000000000000000013761470074210500203530ustar00rootroot00000000000000# Copyright New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """TUF client public API.""" from tuf.api.metadata import TargetFile # requests_fetcher is public but comes from _internal for now (because # sigstore-python 1.0 still uses the module from there). requests_fetcher # can be moved out of _internal once sigstore-python 1.0 is not relevant. from tuf.ngclient._internal.requests_fetcher import RequestsFetcher from tuf.ngclient.config import UpdaterConfig from tuf.ngclient.fetcher import FetcherInterface from tuf.ngclient.updater import Updater __all__ = [ # noqa: PLE0604 FetcherInterface.__name__, RequestsFetcher.__name__, TargetFile.__name__, Updater.__name__, UpdaterConfig.__name__, ] python-tuf-5.1.0/tuf/ngclient/_internal/000077500000000000000000000000001470074210500202065ustar00rootroot00000000000000python-tuf-5.1.0/tuf/ngclient/_internal/__init__.py000066400000000000000000000000001470074210500223050ustar00rootroot00000000000000python-tuf-5.1.0/tuf/ngclient/_internal/requests_fetcher.py000066400000000000000000000126701470074210500241410ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Provides an implementation of ``FetcherInterface`` using the Requests HTTP library. """ # requests_fetcher is public but comes from _internal for now (because # sigstore-python 1.0 still uses the module from there). requests_fetcher # can be moved out of _internal once sigstore-python 1.0 is not relevant. import logging from typing import Dict, Iterator, Optional, Tuple from urllib import parse # Imports import requests import tuf from tuf.api import exceptions from tuf.ngclient.fetcher import FetcherInterface # Globals logger = logging.getLogger(__name__) # Classes class RequestsFetcher(FetcherInterface): """An implementation of ``FetcherInterface`` based on the requests library. Attributes: socket_timeout: Timeout in seconds, used for both initial connection delay and the maximum delay between bytes received. chunk_size: Chunk size in bytes used when downloading. """ def __init__( self, socket_timeout: int = 30, chunk_size: int = 400000, app_user_agent: Optional[str] = None, ) -> None: # http://docs.python-requests.org/en/master/user/advanced/#session-objects: # # "The Session object allows you to persist certain parameters across # requests. It also persists cookies across all requests made from the # Session instance, and will use urllib3's connection pooling. So if # you're making several requests to the same host, the underlying TCP # connection will be reused, which can result in a significant # performance increase (see HTTP persistent connection)." # # NOTE: We use a separate requests.Session per scheme+hostname # combination, in order to reuse connections to the same hostname to # improve efficiency, but avoiding sharing state between different # hosts-scheme combinations to minimize subtle security issues. # Some cookies may not be HTTP-safe. self._sessions: Dict[Tuple[str, str], requests.Session] = {} # Default settings self.socket_timeout: int = socket_timeout # seconds self.chunk_size: int = chunk_size # bytes self.app_user_agent = app_user_agent def _fetch(self, url: str) -> Iterator[bytes]: """Fetch the contents of HTTP/HTTPS url from a remote server. Args: url: URL string that represents a file location. Raises: exceptions.SlowRetrievalError: Timeout occurs while receiving data. exceptions.DownloadHTTPError: HTTP error code is received. Returns: Bytes iterator """ # Get a customized session for each new schema+hostname combination. session = self._get_session(url) # Get the requests.Response object for this URL. # # Defer downloading the response body with stream=True. # Always set the timeout. This timeout value is interpreted by # requests as: # - connect timeout (max delay before first byte is received) # - read (gap) timeout (max delay between bytes received) try: response = session.get( url, stream=True, timeout=self.socket_timeout ) except requests.exceptions.Timeout as e: raise exceptions.SlowRetrievalError from e # Check response status. try: response.raise_for_status() except requests.HTTPError as e: response.close() status = e.response.status_code raise exceptions.DownloadHTTPError(str(e), status) from e return self._chunks(response) def _chunks(self, response: "requests.Response") -> Iterator[bytes]: """A generator function to be returned by fetch. This way the caller of fetch can differentiate between connection and actual data download. """ try: yield from response.iter_content(self.chunk_size) except ( requests.exceptions.ConnectionError, requests.exceptions.Timeout, ) as e: raise exceptions.SlowRetrievalError from e finally: response.close() def _get_session(self, url: str) -> requests.Session: """Return a different customized requests.Session per schema+hostname combination. Raises: exceptions.DownloadError: When there is a problem parsing the url. """ # Use a different requests.Session per schema+hostname combination, to # reuse connections while minimizing subtle security issues. parsed_url = parse.urlparse(url) if not parsed_url.scheme: raise exceptions.DownloadError(f"Failed to parse URL {url}") session_index = (parsed_url.scheme, parsed_url.hostname or "") session = self._sessions.get(session_index) if not session: session = requests.Session() self._sessions[session_index] = session ua = f"python-tuf/{tuf.__version__} {session.headers['User-Agent']}" if self.app_user_agent is not None: ua = f"{self.app_user_agent} {ua}" session.headers["User-Agent"] = ua logger.debug("Made new session %s", session_index) else: logger.debug("Reusing session %s", session_index) return session python-tuf-5.1.0/tuf/ngclient/_internal/trusted_metadata_set.py000066400000000000000000000471511470074210500247750ustar00rootroot00000000000000# Copyright the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Trusted collection of client-side TUF Metadata. ``TrustedMetadataSet`` keeps track of the current valid set of metadata for the client, and handles almost every step of the "Detailed client workflow" ( https://theupdateframework.github.io/specification/latest#detailed-client-workflow) in the TUF specification: the remaining steps are related to filesystem and network IO, which are not handled here. Loaded metadata can be accessed via index access with rolename as key (``trusted_set[Root.type]``) or, in the case of top-level metadata, using the helper properties (``trusted_set.root``). Signatures are verified and discarded upon inclusion into the trusted set. The rules that ``TrustedMetadataSet`` follows for top-level metadata are * Metadata must be loaded in order: root -> timestamp -> snapshot -> targets -> (delegated targets). * Metadata can be loaded even if it is expired (or in the snapshot case if the meta info does not match): this is called "intermediate metadata". * Intermediate metadata can _only_ be used to load newer versions of the same metadata: As an example an expired root can be used to load a new root. * Metadata is loadable only if metadata before it in loading order is loaded (and is not intermediate): As an example timestamp can be loaded if a final (non-expired) root has been loaded. * Metadata is not loadable if any metadata after it in loading order has been loaded: As an example new roots cannot be loaded if timestamp is loaded. Exceptions are raised if metadata fails to load in any way. Example of loading root, timestamp and snapshot: >>> # Load local root (RepositoryErrors here stop the update) >>> with open(root_path, "rb") as f: >>> trusted_set = TrustedMetadataSet(f.read(), EnvelopeType.METADATA) >>> >>> # update root from remote until no more are available >>> with download(Root.type, trusted_set.root.version + 1) as f: >>> trusted_set.update_root(f.read()) >>> >>> # load local timestamp, then update from remote >>> try: >>> with open(timestamp_path, "rb") as f: >>> trusted_set.update_timestamp(f.read()) >>> except (RepositoryError, OSError): >>> pass # failure to load a local file is ok >>> >>> with download(Timestamp.type) as f: >>> trusted_set.update_timestamp(f.read()) >>> >>> # load local snapshot, then update from remote if needed >>> try: >>> with open(snapshot_path, "rb") as f: >>> trusted_set.update_snapshot(f.read()) >>> except (RepositoryError, OSError): >>> # local snapshot is not valid, load from remote >>> # (RepositoryErrors here stop the update) >>> with download(Snapshot.type, version) as f: >>> trusted_set.update_snapshot(f.read()) """ import datetime import logging from collections import abc from typing import Dict, Iterator, Optional, Tuple, Type, Union, cast from securesystemslib.signer import Signature from tuf.api import exceptions from tuf.api.dsse import SimpleEnvelope from tuf.api.metadata import ( Metadata, Root, Signed, Snapshot, T, Targets, Timestamp, ) from tuf.ngclient.config import EnvelopeType logger = logging.getLogger(__name__) Delegator = Union[Root, Targets] class TrustedMetadataSet(abc.Mapping): """Internal class to keep track of trusted metadata in ``Updater``. ``TrustedMetadataSet`` ensures that the collection of metadata in it is valid and trusted through the whole client update workflow. It provides easy ways to update the metadata with the caller making decisions on what is updated. """ def __init__(self, root_data: bytes, envelope_type: EnvelopeType): """Initialize ``TrustedMetadataSet`` by loading trusted root metadata. Args: root_data: Trusted root metadata as bytes. Note that this metadata will only be verified by itself: it is the source of trust for all metadata in the ``TrustedMetadataSet`` envelope_type: Configures deserialization and verification mode of TUF metadata. Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. """ self._trusted_set: Dict[str, Signed] = {} self.reference_time = datetime.datetime.now(datetime.timezone.utc) if envelope_type is EnvelopeType.SIMPLE: self._load_data = _load_from_simple_envelope else: self._load_data = _load_from_metadata # Load and validate the local root metadata. Valid initial trusted root # metadata is required logger.debug("Updating initial trusted root") self._load_trusted_root(root_data) def __getitem__(self, role: str) -> Signed: """Return current ``Signed`` for ``role``.""" return self._trusted_set[role] def __len__(self) -> int: """Return number of ``Signed`` objects in ``TrustedMetadataSet``.""" return len(self._trusted_set) def __iter__(self) -> Iterator[Signed]: """Return iterator over ``Signed`` objects in ``TrustedMetadataSet``. """ return iter(self._trusted_set.values()) # Helper properties for top level metadata @property def root(self) -> Root: """Get current root.""" return cast(Root, self._trusted_set[Root.type]) @property def timestamp(self) -> Timestamp: """Get current timestamp.""" return cast(Timestamp, self._trusted_set[Timestamp.type]) @property def snapshot(self) -> Snapshot: """Get current snapshot.""" return cast(Snapshot, self._trusted_set[Snapshot.type]) @property def targets(self) -> Targets: """Get current top-level targets.""" return cast(Targets, self._trusted_set[Targets.type]) # Methods for updating metadata def update_root(self, data: bytes) -> Root: """Verify and load ``data`` as new root metadata. Note that an expired intermediate root is considered valid: expiry is only checked for the final root in ``update_timestamp()``. Args: data: Unverified new root metadata as bytes Raises: RuntimeError: This function is called after updating timestamp. RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. Returns: Deserialized and verified ``Root`` object """ if Timestamp.type in self._trusted_set: raise RuntimeError("Cannot update root after timestamp") logger.debug("Updating root") new_root, new_root_bytes, new_root_signatures = self._load_data( Root, data, self.root ) if new_root.version != self.root.version + 1: raise exceptions.BadVersionNumberError( f"Expected root version {self.root.version + 1}" f" instead got version {new_root.version}" ) # Verify that new root is signed by itself new_root.verify_delegate(Root.type, new_root_bytes, new_root_signatures) self._trusted_set[Root.type] = new_root logger.debug("Updated root v%d", new_root.version) return new_root def update_timestamp(self, data: bytes) -> Timestamp: """Verify and load ``data`` as new timestamp metadata. Note that an intermediate timestamp is allowed to be expired: ``TrustedMetadataSet`` will throw an ``ExpiredMetadataError`` in this case but the intermediate timestamp will be loaded. This way a newer timestamp can still be loaded (and the intermediate timestamp will be used for rollback protection). Expired timestamp will prevent loading snapshot metadata. Args: data: Unverified new timestamp metadata as bytes Raises: RuntimeError: This function is called after updating snapshot. RepositoryError: Metadata failed to load or verify as final timestamp. The actual error type and content will contain more details. Returns: Deserialized and verified ``Timestamp`` object """ if Snapshot.type in self._trusted_set: raise RuntimeError("Cannot update timestamp after snapshot") # client workflow 5.3.10: Make sure final root is not expired. if self.root.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("Final root.json is expired") # No need to check for 5.3.11 (fast forward attack recovery): # timestamp/snapshot can not yet be loaded at this point new_timestamp, _, _ = self._load_data(Timestamp, data, self.root) # If an existing trusted timestamp is updated, # check for a rollback attack if Timestamp.type in self._trusted_set: # Prevent rolling back timestamp version if new_timestamp.version < self.timestamp.version: raise exceptions.BadVersionNumberError( f"New timestamp version {new_timestamp.version} must" f" be >= {self.timestamp.version}" ) # Keep using old timestamp if versions are equal. if new_timestamp.version == self.timestamp.version: raise exceptions.EqualVersionNumberError # Prevent rolling back snapshot version snapshot_meta = self.timestamp.snapshot_meta new_snapshot_meta = new_timestamp.snapshot_meta if new_snapshot_meta.version < snapshot_meta.version: raise exceptions.BadVersionNumberError( f"New snapshot version must be >= {snapshot_meta.version}" f", got version {new_snapshot_meta.version}" ) # expiry not checked to allow old timestamp to be used for rollback # protection of new timestamp: expiry is checked in update_snapshot() self._trusted_set[Timestamp.type] = new_timestamp logger.debug("Updated timestamp v%d", new_timestamp.version) # timestamp is loaded: raise if it is not valid _final_ timestamp self._check_final_timestamp() return new_timestamp def _check_final_timestamp(self) -> None: """Raise if timestamp is expired.""" if self.timestamp.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("timestamp.json is expired") def update_snapshot( self, data: bytes, trusted: Optional[bool] = False ) -> Snapshot: """Verify and load ``data`` as new snapshot metadata. Note that an intermediate snapshot is allowed to be expired and version is allowed to not match timestamp meta version: ``TrustedMetadataSet`` will throw an ``ExpiredMetadataError``/``BadVersionNumberError`` in these cases but the intermediate snapshot will be loaded. This way a newer snapshot can still be loaded (and the intermediate snapshot will be used for rollback protection). Expired snapshot or snapshot that does not match timestamp meta version will prevent loading targets. Args: data: Unverified new snapshot metadata as bytes trusted: ``True`` if data has at some point been verified by ``TrustedMetadataSet`` as a valid snapshot. Purpose of trusted is to allow loading of locally stored snapshot as intermediate snapshot even if hashes in current timestamp meta no longer match data. Default is False. Raises: RuntimeError: This function is called before updating timestamp or after updating targets. RepositoryError: Data failed to load or verify as final snapshot. The actual error type and content will contain more details. Returns: Deserialized and verified ``Snapshot`` object """ if Timestamp.type not in self._trusted_set: raise RuntimeError("Cannot update snapshot before timestamp") if Targets.type in self._trusted_set: raise RuntimeError("Cannot update snapshot after targets") logger.debug("Updating snapshot") # Snapshot cannot be loaded if final timestamp is expired self._check_final_timestamp() snapshot_meta = self.timestamp.snapshot_meta # Verify non-trusted data against the hashes in timestamp, if any. # Trusted snapshot data has already been verified once. if not trusted: snapshot_meta.verify_length_and_hashes(data) new_snapshot, _, _ = self._load_data(Snapshot, data, self.root) # version not checked against meta version to allow old snapshot to be # used in rollback protection: it is checked when targets is updated # If an existing trusted snapshot is updated, check for rollback attack if Snapshot.type in self._trusted_set: for filename, fileinfo in self.snapshot.meta.items(): new_fileinfo = new_snapshot.meta.get(filename) # Prevent removal of any metadata in meta if new_fileinfo is None: raise exceptions.RepositoryError( f"New snapshot is missing info for '{filename}'" ) # Prevent rollback of any metadata versions if new_fileinfo.version < fileinfo.version: raise exceptions.BadVersionNumberError( f"Expected {filename} version " f"{new_fileinfo.version}, got {fileinfo.version}." ) # expiry not checked to allow old snapshot to be used for rollback # protection of new snapshot: it is checked when targets is updated self._trusted_set[Snapshot.type] = new_snapshot logger.debug("Updated snapshot v%d", new_snapshot.version) # snapshot is loaded, but we raise if it's not valid _final_ snapshot self._check_final_snapshot() return new_snapshot def _check_final_snapshot(self) -> None: """Raise if snapshot is expired or meta version does not match.""" if self.snapshot.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError("snapshot.json is expired") snapshot_meta = self.timestamp.snapshot_meta if self.snapshot.version != snapshot_meta.version: raise exceptions.BadVersionNumberError( f"Expected snapshot version {snapshot_meta.version}, " f"got {self.snapshot.version}" ) def update_targets(self, data: bytes) -> Targets: """Verify and load ``data`` as new top-level targets metadata. Args: data: Unverified new targets metadata as bytes Raises: RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. Returns: Deserialized and verified `Targets`` object """ return self.update_delegated_targets(data, Targets.type, Root.type) def update_delegated_targets( self, data: bytes, role_name: str, delegator_name: str ) -> Targets: """Verify and load ``data`` as new metadata for target ``role_name``. Args: data: Unverified new metadata as bytes role_name: Role name of the new metadata delegator_name: Name of the role delegating to the new metadata Raises: RuntimeError: This function is called before updating snapshot. RepositoryError: Metadata failed to load or verify. The actual error type and content will contain more details. Returns: Deserialized and verified ``Targets`` object """ if Snapshot.type not in self._trusted_set: raise RuntimeError("Cannot load targets before snapshot") # Targets cannot be loaded if final snapshot is expired or its version # does not match meta version in timestamp self._check_final_snapshot() delegator: Optional[Delegator] = self.get(delegator_name) if delegator is None: raise RuntimeError("Cannot load targets before delegator") logger.debug("Updating %s delegated by %s", role_name, delegator_name) # Verify against the hashes in snapshot, if any meta = self.snapshot.meta.get(f"{role_name}.json") if meta is None: raise exceptions.RepositoryError( f"Snapshot does not contain information for '{role_name}'" ) meta.verify_length_and_hashes(data) new_delegate, _, _ = self._load_data( Targets, data, delegator, role_name ) version = new_delegate.version if version != meta.version: raise exceptions.BadVersionNumberError( f"Expected {role_name} v{meta.version}, got v{version}." ) if new_delegate.is_expired(self.reference_time): raise exceptions.ExpiredMetadataError(f"New {role_name} is expired") self._trusted_set[role_name] = new_delegate logger.debug("Updated %s v%d", role_name, version) return new_delegate def _load_trusted_root(self, data: bytes) -> None: """Verify and load ``data`` as trusted root metadata. Note that an expired initial root is considered valid: expiry is only checked for the final root in ``update_timestamp()``. """ new_root, new_root_bytes, new_root_signatures = self._load_data( Root, data ) new_root.verify_delegate(Root.type, new_root_bytes, new_root_signatures) self._trusted_set[Root.type] = new_root logger.debug("Loaded trusted root v%d", new_root.version) def _load_from_metadata( role: Type[T], data: bytes, delegator: Optional[Delegator] = None, role_name: Optional[str] = None, ) -> Tuple[T, bytes, Dict[str, Signature]]: """Load traditional metadata bytes, and extract and verify payload. If no delegator is passed, verification is skipped. Returns a tuple of deserialized payload, signed payload bytes, and signatures. """ md = Metadata[T].from_bytes(data) if md.signed.type != role.type: raise exceptions.RepositoryError( f"Expected '{role.type}', got '{md.signed.type}'" ) if delegator: if role_name is None: role_name = role.type delegator.verify_delegate(role_name, md.signed_bytes, md.signatures) return md.signed, md.signed_bytes, md.signatures def _load_from_simple_envelope( role: Type[T], data: bytes, delegator: Optional[Delegator] = None, role_name: Optional[str] = None, ) -> Tuple[T, bytes, Dict[str, Signature]]: """Load simple envelope bytes, and extract and verify payload. If no delegator is passed, verification is skipped. Returns a tuple of deserialized payload, signed payload bytes, and signatures. """ envelope = SimpleEnvelope[T].from_bytes(data) if envelope.payload_type != SimpleEnvelope.DEFAULT_PAYLOAD_TYPE: raise exceptions.RepositoryError( f"Expected '{SimpleEnvelope.DEFAULT_PAYLOAD_TYPE}', " f"got '{envelope.payload_type}'" ) if delegator: if role_name is None: role_name = role.type delegator.verify_delegate( role_name, envelope.pae(), envelope.signatures ) signed = envelope.get_signed() if signed.type != role.type: raise exceptions.RepositoryError( f"Expected '{role.type}', got '{signed.type}'" ) return signed, envelope.pae(), envelope.signatures python-tuf-5.1.0/tuf/ngclient/config.py000066400000000000000000000041711470074210500200550ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Configuration options for ``Updater`` class.""" from dataclasses import dataclass from enum import Flag, unique from typing import Optional @unique class EnvelopeType(Flag): """Configures deserialization and verification mode of TUF metadata. Args: METADATA: Traditional canonical JSON -based TUF Metadata. SIMPLE: Dead Simple Signing Envelope. (experimental) """ METADATA = 1 SIMPLE = 2 @dataclass class UpdaterConfig: """Used to store ``Updater`` configuration. Args: max_root_rotations: Maximum number of root rotations. max_delegations: Maximum number of delegations. root_max_length: Maxmimum length of a root metadata file. timestamp_max_length: Maximum length of a timestamp metadata file. snapshot_max_length: Maximum length of a snapshot metadata file. targets_max_length: Maximum length of a targets metadata file. prefix_targets_with_hash: When `consistent snapshots `_ are used, target download URLs are formed by prefixing the filename with a hash digest of file content by default. This can be overridden by setting ``prefix_targets_with_hash`` to ``False``. envelope_type: Configures deserialization and verification mode of TUF metadata. Per default, it is treated as traditional canonical JSON -based TUF Metadata. app_user_agent: Application user agent, e.g. "MyApp/1.0.0". This will be prefixed to ngclient user agent when the default fetcher is used. """ max_root_rotations: int = 256 max_delegations: int = 32 root_max_length: int = 512000 # bytes timestamp_max_length: int = 16384 # bytes snapshot_max_length: int = 2000000 # bytes targets_max_length: int = 5000000 # bytes prefix_targets_with_hash: bool = True envelope_type: EnvelopeType = EnvelopeType.METADATA app_user_agent: Optional[str] = None python-tuf-5.1.0/tuf/ngclient/fetcher.py000066400000000000000000000110611470074210500202240ustar00rootroot00000000000000# Copyright 2021, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Provides an interface for network IO abstraction.""" # Imports import abc import logging import tempfile from contextlib import contextmanager from typing import IO, Iterator from tuf.api import exceptions logger = logging.getLogger(__name__) # Classes class FetcherInterface(metaclass=abc.ABCMeta): """Defines an interface for abstract network download. By providing a concrete implementation of the abstract interface, users of the framework can plug-in their preferred/customized network stack. Implementations of FetcherInterface only need to implement ``_fetch()``. The public API of the class is already implemented. """ @abc.abstractmethod def _fetch(self, url: str) -> Iterator[bytes]: """Fetch the contents of HTTP/HTTPS ``url`` from a remote server. Implementations must raise ``DownloadHTTPError`` if they receive an HTTP error code. Implementations may raise any errors but the ones that are not ``DownloadErrors`` will be wrapped in a ``DownloadError`` by ``fetch()``. Args: url: URL string that represents a file location. Raises: exceptions.DownloadHTTPError: HTTP error code was received. Returns: Bytes iterator """ raise NotImplementedError # pragma: no cover def fetch(self, url: str) -> Iterator[bytes]: """Fetch the contents of HTTP/HTTPS ``url`` from a remote server. Args: url: URL string that represents a file location. Raises: exceptions.DownloadError: An error occurred during download. exceptions.DownloadHTTPError: An HTTP error code was received. Returns: Bytes iterator """ # Ensure that fetch() only raises DownloadErrors, regardless of the # fetcher implementation try: return self._fetch(url) except exceptions.DownloadError as e: raise e except Exception as e: raise exceptions.DownloadError(f"Failed to download {url}") from e @contextmanager def download_file(self, url: str, max_length: int) -> Iterator[IO]: """Download file from given ``url``. It is recommended to use ``download_file()`` within a ``with`` block to guarantee that allocated file resources will always be released even if download fails. Args: url: URL string that represents the location of the file. max_length: Upper bound of file size in bytes. Raises: exceptions.DownloadError: An error occurred during download. exceptions.DownloadLengthMismatchError: Downloaded bytes exceed ``max_length``. exceptions.DownloadHTTPError: An HTTP error code was received. Yields: ``TemporaryFile`` object that points to the contents of ``url``. """ logger.debug("Downloading: %s", url) number_of_bytes_received = 0 with tempfile.TemporaryFile() as temp_file: chunks = self.fetch(url) for chunk in chunks: number_of_bytes_received += len(chunk) if number_of_bytes_received > max_length: raise exceptions.DownloadLengthMismatchError( f"Downloaded {number_of_bytes_received} bytes exceeding" f" the maximum allowed length of {max_length}" ) temp_file.write(chunk) logger.debug( "Downloaded %d out of %d bytes", number_of_bytes_received, max_length, ) temp_file.seek(0) yield temp_file def download_bytes(self, url: str, max_length: int) -> bytes: """Download bytes from given ``url``. Returns the downloaded bytes, otherwise like ``download_file()``. Args: url: URL string that represents the location of the file. max_length: Upper bound of data size in bytes. Raises: exceptions.DownloadError: An error occurred during download. exceptions.DownloadLengthMismatchError: Downloaded bytes exceed ``max_length``. exceptions.DownloadHTTPError: An HTTP error code was received. Returns: Content of the file in bytes. """ with self.download_file(url, max_length) as dl_file: return dl_file.read() python-tuf-5.1.0/tuf/ngclient/updater.py000066400000000000000000000476371470074210500202720ustar00rootroot00000000000000# Copyright 2020, New York University and the TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Client update workflow implementation. The ``Updater`` class provides an implementation of the `TUF client workflow `_. ``Updater`` provides an API to query available targets and to download them in a secure manner: All downloaded files are verified by signed metadata. High-level description of ``Updater`` functionality: * Initializing an ``Updater`` loads and validates the trusted local root metadata: This root metadata is used as the source of trust for all other metadata. * ``refresh()`` can optionally be called to update and load all top-level metadata as described in the specification, using both locally cached metadata and metadata downloaded from the remote repository. If refresh is not done explicitly, it will happen automatically during the first target info lookup. * ``Updater`` can be used to download targets. For each target: * ``Updater.get_targetinfo()`` is first used to find information about a specific target. This will load new targets metadata as needed (from local cache or remote repository). * ``Updater.find_cached_target()`` can optionally be used to check if a target file is already locally cached. * ``Updater.download_target()`` downloads a target file and ensures it is verified correct by the metadata. Note that applications using ``Updater`` should be 'single instance' applications: running multiple instances that use the same cache directories at the same time is not supported. A simple example of using the Updater to implement a Python TUF client that downloads target files is available in `examples/client `_. """ import contextlib import logging import os import shutil import tempfile from typing import Optional, Set, cast from urllib import parse from tuf.api import exceptions from tuf.api.metadata import Root, Snapshot, TargetFile, Targets, Timestamp from tuf.ngclient._internal import requests_fetcher, trusted_metadata_set from tuf.ngclient.config import EnvelopeType, UpdaterConfig from tuf.ngclient.fetcher import FetcherInterface logger = logging.getLogger(__name__) class Updater: """Creates a new ``Updater`` instance and loads trusted root metadata. Args: metadata_dir: Local metadata directory. Directory must be writable and it must contain a trusted root.json file metadata_base_url: Base URL for all remote metadata downloads target_dir: Local targets directory. Directory must be writable. It will be used as the default target download directory by ``find_cached_target()`` and ``download_target()`` target_base_url: ``Optional``; Default base URL for all remote target downloads. Can be individually set in ``download_target()`` fetcher: ``Optional``; ``FetcherInterface`` implementation used to download both metadata and targets. Default is ``RequestsFetcher`` config: ``Optional``; ``UpdaterConfig`` could be used to setup common configuration options. Raises: OSError: Local root.json cannot be read RepositoryError: Local root.json is invalid """ def __init__( self, metadata_dir: str, metadata_base_url: str, target_dir: Optional[str] = None, target_base_url: Optional[str] = None, fetcher: Optional[FetcherInterface] = None, config: Optional[UpdaterConfig] = None, ): self._dir = metadata_dir self._metadata_base_url = _ensure_trailing_slash(metadata_base_url) self.target_dir = target_dir if target_base_url is None: self._target_base_url = None else: self._target_base_url = _ensure_trailing_slash(target_base_url) self.config = config or UpdaterConfig() if fetcher is not None: self._fetcher = fetcher else: self._fetcher = requests_fetcher.RequestsFetcher( app_user_agent=self.config.app_user_agent ) supported_envelopes = [EnvelopeType.METADATA, EnvelopeType.SIMPLE] if self.config.envelope_type not in supported_envelopes: raise ValueError( f"config: envelope_type must be one of {supported_envelopes}, " f"got '{self.config.envelope_type}'" ) # Read trusted local root metadata data = self._load_local_metadata(Root.type) self._trusted_set = trusted_metadata_set.TrustedMetadataSet( data, self.config.envelope_type ) def refresh(self) -> None: """Refresh top-level metadata. Downloads, verifies, and loads metadata for the top-level roles in the specified order (root -> timestamp -> snapshot -> targets) implementing all the checks required in the TUF client workflow. A ``refresh()`` can be done only once during the lifetime of an Updater. If ``refresh()`` has not been explicitly called before the first ``get_targetinfo()`` call, it will be done implicitly at that time. The metadata for delegated roles is not updated by ``refresh()``: that happens on demand during ``get_targetinfo()``. However, if the repository uses `consistent_snapshot `_, then all metadata downloaded by the Updater will use the same consistent repository state. Raises: OSError: New metadata could not be written to disk RepositoryError: Metadata failed to verify in some way DownloadError: Download of a metadata file failed in some way """ self._load_root() self._load_timestamp() self._load_snapshot() self._load_targets(Targets.type, Root.type) def _generate_target_file_path(self, targetinfo: TargetFile) -> str: if self.target_dir is None: raise ValueError("target_dir must be set if filepath is not given") # Use URL encoded target path as filename filename = parse.quote(targetinfo.path, "") return os.path.join(self.target_dir, filename) def get_targetinfo(self, target_path: str) -> Optional[TargetFile]: """Return ``TargetFile`` instance with information for ``target_path``. The return value can be used as an argument to ``download_target()`` and ``find_cached_target()``. If ``refresh()`` has not been called before calling ``get_targetinfo()``, the refresh will be done implicitly. As a side-effect this method downloads all the additional (delegated targets) metadata it needs to return the target information. Args: target_path: `path-relative-URL string `_ that uniquely identifies the target within the repository. Raises: OSError: New metadata could not be written to disk RepositoryError: Metadata failed to verify in some way DownloadError: Download of a metadata file failed in some way Returns: ``TargetFile`` instance or ``None``. """ if Targets.type not in self._trusted_set: self.refresh() return self._preorder_depth_first_walk(target_path) def find_cached_target( self, targetinfo: TargetFile, filepath: Optional[str] = None, ) -> Optional[str]: """Check whether a local file is an up to date target. Args: targetinfo: ``TargetFile`` from ``get_targetinfo()``. filepath: Local path to file. If ``None``, a file path is generated based on ``target_dir`` constructor argument. Raises: ValueError: Incorrect arguments Returns: Local file path if the file is an up to date target file. ``None`` if file is not found or it is not up to date. """ if filepath is None: filepath = self._generate_target_file_path(targetinfo) try: with open(filepath, "rb") as target_file: targetinfo.verify_length_and_hashes(target_file) return filepath except (OSError, exceptions.LengthOrHashMismatchError): return None def download_target( self, targetinfo: TargetFile, filepath: Optional[str] = None, target_base_url: Optional[str] = None, ) -> str: """Download the target file specified by ``targetinfo``. Args: targetinfo: ``TargetFile`` from ``get_targetinfo()``. filepath: Local path to download into. If ``None``, the file is downloaded into directory defined by ``target_dir`` constructor argument using a generated filename. If file already exists, it is overwritten. target_base_url: Base URL used to form the final target download URL. Default is the value provided in ``Updater()`` Raises: ValueError: Invalid arguments DownloadError: Download of the target file failed in some way RepositoryError: Downloaded target failed to be verified in some way OSError: Failed to write target to file Returns: Local path to downloaded file """ if filepath is None: filepath = self._generate_target_file_path(targetinfo) if target_base_url is None: if self._target_base_url is None: raise ValueError( "target_base_url must be set in either " "download_target() or constructor" ) target_base_url = self._target_base_url else: target_base_url = _ensure_trailing_slash(target_base_url) target_filepath = targetinfo.path consistent_snapshot = self._trusted_set.root.consistent_snapshot if consistent_snapshot and self.config.prefix_targets_with_hash: hashes = list(targetinfo.hashes.values()) dirname, sep, basename = target_filepath.rpartition("/") target_filepath = f"{dirname}{sep}{hashes[0]}.{basename}" full_url = f"{target_base_url}{target_filepath}" with self._fetcher.download_file( full_url, targetinfo.length ) as target_file: targetinfo.verify_length_and_hashes(target_file) target_file.seek(0) with open(filepath, "wb") as destination_file: shutil.copyfileobj(target_file, destination_file) logger.debug("Downloaded target %s", targetinfo.path) return filepath def _download_metadata( self, rolename: str, length: int, version: Optional[int] = None ) -> bytes: """Download a metadata file and return it as bytes.""" encoded_name = parse.quote(rolename, "") if version is None: url = f"{self._metadata_base_url}{encoded_name}.json" else: url = f"{self._metadata_base_url}{version}.{encoded_name}.json" return self._fetcher.download_bytes(url, length) def _load_local_metadata(self, rolename: str) -> bytes: encoded_name = parse.quote(rolename, "") with open(os.path.join(self._dir, f"{encoded_name}.json"), "rb") as f: return f.read() def _persist_metadata(self, rolename: str, data: bytes) -> None: """Write metadata to disk atomically to avoid data loss.""" temp_file_name: Optional[str] = None try: # encode the rolename to avoid issues with e.g. path separators encoded_name = parse.quote(rolename, "") filename = os.path.join(self._dir, f"{encoded_name}.json") with tempfile.NamedTemporaryFile( dir=self._dir, delete=False ) as temp_file: temp_file_name = temp_file.name temp_file.write(data) os.replace(temp_file.name, filename) except OSError as e: # remove tempfile if we managed to create one, # then let the exception happen if temp_file_name is not None: with contextlib.suppress(FileNotFoundError): os.remove(temp_file_name) raise e def _load_root(self) -> None: """Load remote root metadata. Sequentially load and persist on local disk every newer root metadata version available on the remote. """ # Update the root role lower_bound = self._trusted_set.root.version + 1 upper_bound = lower_bound + self.config.max_root_rotations for next_version in range(lower_bound, upper_bound): try: data = self._download_metadata( Root.type, self.config.root_max_length, next_version, ) self._trusted_set.update_root(data) self._persist_metadata(Root.type, data) except exceptions.DownloadHTTPError as exception: if exception.status_code not in {403, 404}: raise # 404/403 means current root is newest available break def _load_timestamp(self) -> None: """Load local and remote timestamp metadata.""" try: data = self._load_local_metadata(Timestamp.type) self._trusted_set.update_timestamp(data) except (OSError, exceptions.RepositoryError) as e: # Local timestamp does not exist or is invalid logger.debug("Local timestamp not valid as final: %s", e) # Load from remote (whether local load succeeded or not) data = self._download_metadata( Timestamp.type, self.config.timestamp_max_length ) try: self._trusted_set.update_timestamp(data) except exceptions.EqualVersionNumberError: # If the new timestamp version is the same as current, discard the # new timestamp. This is normal and it shouldn't raise any error. return self._persist_metadata(Timestamp.type, data) def _load_snapshot(self) -> None: """Load local (and if needed remote) snapshot metadata.""" try: data = self._load_local_metadata(Snapshot.type) self._trusted_set.update_snapshot(data, trusted=True) logger.debug("Local snapshot is valid: not downloading new one") except (OSError, exceptions.RepositoryError) as e: # Local snapshot does not exist or is invalid: update from remote logger.debug("Local snapshot not valid as final: %s", e) snapshot_meta = self._trusted_set.timestamp.snapshot_meta length = snapshot_meta.length or self.config.snapshot_max_length version = None if self._trusted_set.root.consistent_snapshot: version = snapshot_meta.version data = self._download_metadata(Snapshot.type, length, version) self._trusted_set.update_snapshot(data) self._persist_metadata(Snapshot.type, data) def _load_targets(self, role: str, parent_role: str) -> Targets: """Load local (and if needed remote) metadata for ``role``.""" # Avoid loading 'role' more than once during "get_targetinfo" if role in self._trusted_set: return cast(Targets, self._trusted_set[role]) try: data = self._load_local_metadata(role) delegated_targets = self._trusted_set.update_delegated_targets( data, role, parent_role ) logger.debug("Local %s is valid: not downloading new one", role) return delegated_targets except (OSError, exceptions.RepositoryError) as e: # Local 'role' does not exist or is invalid: update from remote logger.debug("Failed to load local %s: %s", role, e) snapshot = self._trusted_set.snapshot metainfo = snapshot.meta.get(f"{role}.json") if metainfo is None: raise exceptions.RepositoryError( f"Role {role} was delegated but is not part of snapshot" ) from None length = metainfo.length or self.config.targets_max_length version = None if self._trusted_set.root.consistent_snapshot: version = metainfo.version data = self._download_metadata(role, length, version) delegated_targets = self._trusted_set.update_delegated_targets( data, role, parent_role ) self._persist_metadata(role, data) return delegated_targets def _preorder_depth_first_walk( self, target_filepath: str ) -> Optional[TargetFile]: """ Interrogates the tree of target delegations in order of appearance (which implicitly order trustworthiness), and returns the matching target found in the most trusted role. """ # List of delegations to be interrogated. A (role, parent role) pair # is needed to load and verify the delegated targets metadata. delegations_to_visit = [(Targets.type, Root.type)] visited_role_names: Set[str] = set() # Preorder depth-first traversal of the graph of target delegations. while ( len(visited_role_names) <= self.config.max_delegations and len(delegations_to_visit) > 0 ): # Pop the role name from the top of the stack. role_name, parent_role = delegations_to_visit.pop(-1) # Skip any visited current role to prevent cycles. if role_name in visited_role_names: logger.debug("Skipping visited current role %s", role_name) continue # The metadata for 'role_name' must be downloaded/updated before # its targets, delegations, and child roles can be inspected. targets = self._load_targets(role_name, parent_role) target = targets.targets.get(target_filepath) if target is not None: logger.debug("Found target in current role %s", role_name) return target # After preorder check, add current role to set of visited roles. visited_role_names.add(role_name) if targets.delegations is not None: child_roles_to_visit = [] # NOTE: This may be a slow operation if there are many # delegated roles. for ( child_name, terminating, ) in targets.delegations.get_roles_for_target(target_filepath): logger.debug("Adding child role %s", child_name) child_roles_to_visit.append((child_name, role_name)) if terminating: logger.debug("Not backtracking to other roles") delegations_to_visit = [] break # Push 'child_roles_to_visit' in reverse order of appearance # onto 'delegations_to_visit'. Roles are popped from the end of # the list. child_roles_to_visit.reverse() delegations_to_visit.extend(child_roles_to_visit) if len(delegations_to_visit) > 0: logger.debug( "%d roles left to visit, but allowed at most %d delegations", len(delegations_to_visit), self.config.max_delegations, ) # If this point is reached then target is not found, return None return None def _ensure_trailing_slash(url: str) -> str: """Return url guaranteed to end in a slash.""" return url if url.endswith("/") else f"{url}/" python-tuf-5.1.0/tuf/py.typed000066400000000000000000000000001470074210500161150ustar00rootroot00000000000000python-tuf-5.1.0/tuf/repository/000077500000000000000000000000001470074210500166475ustar00rootroot00000000000000python-tuf-5.1.0/tuf/repository/__init__.py000066400000000000000000000010021470074210500207510ustar00rootroot00000000000000# Copyright 2021-2022 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Repository API: A helper library for repository implementations This module is intended to make any "metadata editing" applications easier to implement: this includes repository applications, CI integration components as well as developer and signing tools. The repository module is not considered part of the stable python-tuf API yet. """ from tuf.repository._repository import AbortEdit, Repository # noqa: F401 python-tuf-5.1.0/tuf/repository/_repository.py000066400000000000000000000234111470074210500216000ustar00rootroot00000000000000# Copyright 2021-2022 python-tuf contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """Repository Abstraction for metadata management""" import logging from abc import ABC, abstractmethod from contextlib import contextmanager, suppress from copy import deepcopy from typing import Dict, Generator, Optional, Tuple from tuf.api.exceptions import UnsignedMetadataError from tuf.api.metadata import ( Metadata, MetaFile, Root, Signed, Snapshot, Targets, Timestamp, ) logger = logging.getLogger(__name__) class AbortEdit(Exception): # noqa: N818 """Raise to exit the edit() contextmanager without saving changes""" class Repository(ABC): """Abstract class for metadata modifying implementations NOTE: The repository module is not considered part of the python-tuf stable API yet. This class is intended to be a base class used in any metadata editing application, whether it is a real repository server or a developer tool. Implementations must implement open() and close(), and can then use the edit() contextmanager to implement actual operations. Note that signing an already existing version of metadata (as could be done for threshold signing) does not fit into this model of open()+close() or edit(). A few operations (snapshot and timestamp) are already implemented in this base class. """ @abstractmethod def open(self, role: str) -> Metadata: """Load a roles metadata from storage or cache, return it If role has no metadata, create first version from scratch. """ raise NotImplementedError @abstractmethod def close(self, role: str, md: Metadata) -> None: """Write roles metadata into storage Update expiry and version and replace signatures with ones from all available keys. Keep snapshot_info and targets_infos updated. """ raise NotImplementedError @property def targets_infos(self) -> Dict[str, MetaFile]: """Returns the MetaFiles for current targets metadatas This property is used by do_snapshot() to update Snapshot.meta: Repository implementations should override this property to enable do_snapshot(). Note that there is a difference between this return value and Snapshot.meta: This dictionary reflects the targets metadata that currently exists in the repository but Snapshot.meta also includes metadata that used to exist, but no longer exists, in the repository. """ raise NotImplementedError @property def snapshot_info(self) -> MetaFile: """Returns the MetaFile for current snapshot metadata This property is used by do_timestamp() to update Timestamp.meta: Repository implementations should override this property to enable do_timestamp(). """ raise NotImplementedError @contextmanager def edit(self, role: str) -> Generator[Signed, None, None]: """Context manager for editing a role's metadata Context manager takes care of loading the roles metadata (or creating new metadata), updating expiry and version. The caller can do other changes to the Signed object and when the context manager exits, a new version of the roles metadata is stored. Context manager user can raise AbortEdit from inside the with-block to cancel the edit: in this case none of the changes are stored. """ md = self.open(role) with suppress(AbortEdit): yield md.signed self.close(role, md) @contextmanager def edit_root(self) -> Generator[Root, None, None]: """Context manager for editing root metadata. See edit()""" with self.edit(Root.type) as root: if not isinstance(root, Root): raise RuntimeError("Unexpected root type") yield root @contextmanager def edit_timestamp(self) -> Generator[Timestamp, None, None]: """Context manager for editing timestamp metadata. See edit()""" with self.edit(Timestamp.type) as timestamp: if not isinstance(timestamp, Timestamp): raise RuntimeError("Unexpected timestamp type") yield timestamp @contextmanager def edit_snapshot(self) -> Generator[Snapshot, None, None]: """Context manager for editing snapshot metadata. See edit()""" with self.edit(Snapshot.type) as snapshot: if not isinstance(snapshot, Snapshot): raise RuntimeError("Unexpected snapshot type") yield snapshot @contextmanager def edit_targets( self, rolename: str = Targets.type ) -> Generator[Targets, None, None]: """Context manager for editing targets metadata. See edit()""" with self.edit(rolename) as targets: if not isinstance(targets, Targets): raise RuntimeError(f"Unexpected targets ({rolename}) type") yield targets def root(self) -> Root: """Read current root metadata""" root = self.open(Root.type).signed if not isinstance(root, Root): raise RuntimeError("Unexpected root type") return root def timestamp(self) -> Timestamp: """Read current timestamp metadata""" timestamp = self.open(Timestamp.type).signed if not isinstance(timestamp, Timestamp): raise RuntimeError("Unexpected timestamp type") return timestamp def snapshot(self) -> Snapshot: """Read current snapshot metadata""" snapshot = self.open(Snapshot.type).signed if not isinstance(snapshot, Snapshot): raise RuntimeError("Unexpected snapshot type") return snapshot def targets(self, rolename: str = Targets.type) -> Targets: """Read current targets metadata""" targets = self.open(rolename).signed if not isinstance(targets, Targets): raise RuntimeError("Unexpected targets type") return targets def do_snapshot( self, force: bool = False ) -> Tuple[bool, Dict[str, MetaFile]]: """Update snapshot meta information Updates the snapshot meta information according to current targets metadata state and the current snapshot meta information. Arguments: force: should new snapshot version be created even if meta information would not change? Returns: Tuple of - True if snapshot was created, False if not - MetaFiles for targets versions removed from snapshot meta """ # Snapshot update is needed if # * any targets files are not yet in snapshot or # * any targets version is incorrect update_version = force removed: Dict[str, MetaFile] = {} root = self.root() snapshot_md = self.open(Snapshot.type) try: root.verify_delegate( Snapshot.type, snapshot_md.signed_bytes, snapshot_md.signatures, ) except UnsignedMetadataError: update_version = True with self.edit_snapshot() as snapshot: for keyname, new_meta in self.targets_infos.items(): if keyname not in snapshot.meta: update_version = True snapshot.meta[keyname] = deepcopy(new_meta) continue old_meta = snapshot.meta[keyname] if new_meta.version < old_meta.version: raise ValueError(f"{keyname} version rollback") if new_meta.version > old_meta.version: update_version = True snapshot.meta[keyname] = deepcopy(new_meta) removed[keyname] = old_meta if not update_version: # prevent edit_snapshot() from storing a new version raise AbortEdit("Skip snapshot: No targets version changes") if not update_version: # this is reachable as edit_snapshot() handles AbortEdit logger.debug("Snapshot update not needed") # type: ignore[unreachable] else: logger.debug("Snapshot v%d", snapshot.version) return update_version, removed def do_timestamp( self, force: bool = False ) -> Tuple[bool, Optional[MetaFile]]: """Update timestamp meta information Updates timestamp according to current snapshot state Returns: Tuple of - True if timestamp was created, False if not - MetaFile for snapshot version removed from timestamp (if any) """ update_version = force removed = None root = self.root() timestamp_md = self.open(Timestamp.type) try: root.verify_delegate( Timestamp.type, timestamp_md.signed_bytes, timestamp_md.signatures, ) except UnsignedMetadataError: update_version = True with self.edit_timestamp() as timestamp: if self.snapshot_info.version < timestamp.snapshot_meta.version: raise ValueError("snapshot version rollback") if self.snapshot_info.version > timestamp.snapshot_meta.version: update_version = True removed = timestamp.snapshot_meta timestamp.snapshot_meta = deepcopy(self.snapshot_info) if not update_version: raise AbortEdit("Skip timestamp: No snapshot version changes") if not update_version: # this is reachable as edit_timestamp() handles AbortEdit logger.debug("Timestamp update not needed") # type: ignore[unreachable] else: logger.debug("Timestamp v%d", timestamp.version) return update_version, removed python-tuf-5.1.0/verify_release000077500000000000000000000224751470074210500165760ustar00rootroot00000000000000#!/usr/bin/env python # Copyright 2022, TUF contributors # SPDX-License-Identifier: MIT OR Apache-2.0 """verify_release - verify that published release matches a locally built one Builds a release from current commit and verifies that the release artifacts on GitHub and PyPI match the built release artifacts. """ import argparse import json import os import subprocess import sys from filecmp import cmp from tempfile import TemporaryDirectory from typing import Optional try: import build as _ # type: ignore[import-not-found] # noqa: F401 import requests except ImportError: print("Error: verify_release requires modules 'requests' and 'build':") print(" pip install requests build") sys.exit(1) # Project variables # Note that only these project artifacts are supported: # [f"{PYPI_PROJECT}-{VER}-none-any.whl", f"{PYPI_PROJECT}-{VER}.tar.gz"] GITHUB_ORG = "theupdateframework" GITHUB_PROJECT = "python-tuf" PYPI_PROJECT = "tuf" HTTP_TIMEOUT = 5 def build(build_dir: str) -> str: """Build release locally. Return version as string""" orig_dir = os.path.dirname(os.path.abspath(__file__)) with TemporaryDirectory() as src_dir: # fresh git clone: this prevents uncommitted files from affecting build git_cmd = ["git", "clone", "--quiet", orig_dir, src_dir] subprocess.run(git_cmd, stdout=subprocess.DEVNULL, check=True) # patch env to constrain build backend version as we do in cd.yml env = os.environ.copy() env["PIP_CONSTRAINT"] = "requirements/build.txt" build_cmd = ["python3", "-m", "build", "--outdir", build_dir, src_dir] subprocess.run( build_cmd, stdout=subprocess.DEVNULL, check=True, env=env ) for filename in os.listdir(build_dir): prefix, postfix = f"{PYPI_PROJECT}-", ".tar.gz" if filename.startswith(prefix) and filename.endswith(postfix): return filename[len(prefix) : -len(postfix)] raise RuntimeError("Build version not found") def get_git_version() -> str: """Return version string from git describe""" cmd = ["git", "describe"] process = subprocess.run(cmd, text=True, capture_output=True, check=True) if not process.stdout.startswith("v") or not process.stdout.endswith("\n"): raise RuntimeError(f"Unexpected git version {process.stdout}") return process.stdout[1:-1] def get_github_version() -> str: """Return version string of latest GitHub release""" release_json = f"https://api.github.com/repos/{GITHUB_ORG}/{GITHUB_PROJECT}/releases/latest" releases = json.loads( requests.get(release_json, timeout=HTTP_TIMEOUT).content ) return releases["tag_name"][1:] def get_pypi_pip_version() -> str: """Return latest version string available on PyPI according to pip""" # pip can't tell us what the newest available version is... So we download # newest tarball and figure out the version from the filename with TemporaryDirectory() as pypi_dir: cmd = ["pip", "download", "--no-deps", "--dest", pypi_dir] source_download = [*cmd, "--no-binary", PYPI_PROJECT, PYPI_PROJECT] subprocess.run(source_download, stdout=subprocess.DEVNULL, check=True) for filename in os.listdir(pypi_dir): prefix, postfix = f"{PYPI_PROJECT}-", ".tar.gz" if filename.startswith(prefix) and filename.endswith(postfix): return filename[len(prefix) : -len(postfix)] raise RuntimeError("PyPI version not found") def verify_github_release(version: str, compare_dir: str) -> bool: """Verify that given GitHub version artifacts match expected artifacts""" base_url = ( f"https://github.com/{GITHUB_ORG}/{GITHUB_PROJECT}/releases/download" ) tar = f"{PYPI_PROJECT}-{version}.tar.gz" wheel = f"{PYPI_PROJECT}-{version}-py3-none-any.whl" with TemporaryDirectory() as github_dir: for filename in [tar, wheel]: url = f"{base_url}/v{version}/{filename}" response = requests.get(url, stream=True, timeout=HTTP_TIMEOUT) with open(os.path.join(github_dir, filename), "wb") as f: for data in response.iter_content(): f.write(data) return cmp( os.path.join(github_dir, tar), os.path.join(compare_dir, tar), shallow=False, ) and cmp( os.path.join(github_dir, wheel), os.path.join(compare_dir, wheel), shallow=False, ) def verify_pypi_release(version: str, compare_dir: str) -> bool: """Verify that given PyPI version artifacts match expected artifacts""" tar = f"{PYPI_PROJECT}-{version}.tar.gz" wheel = f"{PYPI_PROJECT}-{version}-py3-none-any.whl" with TemporaryDirectory() as pypi_dir: cmd = ["pip", "download", "--no-deps", "--dest", pypi_dir] target = f"{PYPI_PROJECT}=={version}" binary_download = [*cmd, target] source_download = [*cmd, "--no-binary", PYPI_PROJECT, target] subprocess.run(binary_download, stdout=subprocess.DEVNULL, check=True) subprocess.run(source_download, stdout=subprocess.DEVNULL, check=True) return cmp( os.path.join(pypi_dir, wheel), os.path.join(compare_dir, wheel), shallow=False, ) and cmp( os.path.join(pypi_dir, tar), os.path.join(compare_dir, tar), shallow=False, ) def sign_release_artifacts( version: str, build_dir: str, key_id: Optional[str] = None ) -> None: """Sign built release artifacts with gpg and write signature files to cwd""" sdist = f"{PYPI_PROJECT}-{version}.tar.gz" wheel = f"{PYPI_PROJECT}-{version}-py3-none-any.whl" cmd = ["gpg", "--detach-sign", "--armor"] if key_id is not None: cmd += ["--local-user", key_id] for filename in [sdist, wheel]: artifact_path = os.path.join(build_dir, filename) signature_path = f"{filename}.asc" subprocess.run( [*cmd, "--output", signature_path, artifact_path], check=True ) if not os.path.exists(signature_path): raise RuntimeError("Signing failed, signature not found") def finished(s: str) -> None: """Displays a finished message.""" # clear line sys.stdout.write("\033[K") print(f"* {s}") def progress(s: str) -> None: """Displays a progress message.""" # clear line sys.stdout.write("\033[K") # carriage return but no newline: next print will overwrite this one print(f" {s}...", end="\r", flush=True) def main() -> int: # noqa: D103 parser = argparse.ArgumentParser() parser.add_argument( "--skip-pypi", action="store_true", dest="skip_pypi", help="Skip PyPI release check.", ) parser.add_argument( "--sign", nargs="?", const=True, metavar="", dest="sign", help="Sign release artifacts with 'gpg'. If no is passed," " the default signing key is used. Resulting '*.asc' files are written" " to CWD.", ) args = parser.parse_args() success = True with TemporaryDirectory() as build_dir: progress("Building release") build_version = build(build_dir) finished(f"Built release {build_version}") git_version = get_git_version() if not git_version.startswith(build_version): raise RuntimeError( f"Git version is {git_version}, expected {build_version}" ) if git_version != build_version: finished(f"WARNING: Git describes version as {git_version}") progress("Checking GitHub latest version") github_version = get_github_version() if github_version != build_version: finished(f"WARNING: GitHub latest version is {github_version}") if not args.skip_pypi: progress("Checking PyPI latest version") pypi_version = get_pypi_pip_version() if pypi_version != build_version: finished(f"WARNING: PyPI latest version is {pypi_version}") progress("Downloading release from PyPI") if not verify_pypi_release(build_version, build_dir): # This is expected while build is not reproducible finished("ERROR: PyPI artifacts do not match built release") success = False else: finished("PyPI artifacts match the built release") progress("Downloading release from GitHub") if not verify_github_release(build_version, build_dir): # This is expected while build is not reproducible finished("ERROR: GitHub artifacts do not match built release") success = False else: finished("GitHub artifacts match the built release") # NOTE: 'gpg' might prompt for password or ask if it should # override files... if args.sign: progress("Signing built release with gpg") if success: key_id = args.sign if args.sign is not True else None sign_release_artifacts(build_version, build_dir, key_id) finished("Created signatures in cwd (see '*.asc' files)") else: finished("WARNING: Skipped signing of non-matching artifacts") return 0 if success else 1 if __name__ == "__main__": sys.exit(main())