pax_global_header00006660000000000000000000000064147515014100014510gustar00rootroot0000000000000052 comment=f38f261f5df1d393a97aec3a5463017da6c22934 gittuf-0.9.0/000077500000000000000000000000001475150141000130205ustar00rootroot00000000000000gittuf-0.9.0/.gitattributes000066400000000000000000000000461475150141000157130ustar00rootroot00000000000000docs/cli/*.md linguist-generated=true gittuf-0.9.0/.github/000077500000000000000000000000001475150141000143605ustar00rootroot00000000000000gittuf-0.9.0/.github/ISSUE_TEMPLATE/000077500000000000000000000000001475150141000165435ustar00rootroot00000000000000gittuf-0.9.0/.github/ISSUE_TEMPLATE/bug.yml000066400000000000000000000020151475150141000200410ustar00rootroot00000000000000name: Bug Report description: File a bug report labels: ["bug"] assignees: - adityasaky body: - type: markdown attributes: value: | Thanks for taking the time to fill out this bug report! - type: textarea id: what-happened attributes: label: What happened? description: Also tell us, what did you expect to happen? Please include the version or revision of gittuf. validations: required: true - type: textarea id: logs attributes: label: Relevant log output description: Please copy and paste the stack trace if it's available. This will be automatically formatted into code, so no need for backticks. render: Shell - type: checkboxes id: terms attributes: label: Code of Conduct description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/gittuf/community/blob/main/CODE-OF-CONDUCT.md) options: - label: I agree to follow this project's Code of Conduct required: true gittuf-0.9.0/.github/ISSUE_TEMPLATE/issue.yml000066400000000000000000000022101475150141000204110ustar00rootroot00000000000000name: Discussion / Feature Request description: Open a discussion for gittuf workflows or a feature request labels: ["discussion"] body: - type: markdown attributes: value: | Thanks for taking the time to file this issue! - type: textarea id: Description attributes: label: Add a description description: Please provide some details about what you'd like to discuss. If relevant, please include the version or revision of gittuf. validations: required: true - type: textarea id: logs attributes: label: Relevant log output if the discussion pertains to existing gittuf functionality description: Please copy and paste the stack trace if it's available. This will be automatically formatted into code, so no need for backticks. render: Shell - type: checkboxes id: terms attributes: label: Code of Conduct description: By submitting this issue, you agree to follow our [Code of Conduct](https://github.com/gittuf/community/blob/main/CODE-OF-CONDUCT.md) options: - label: I agree to follow this project's Code of Conduct required: true gittuf-0.9.0/.github/dependabot.yml000066400000000000000000000013541475150141000172130ustar00rootroot00000000000000version: 2 updates: # Monitor Go dependencies - package-ecosystem: "gomod" directory: "/" schedule: interval: "daily" time: "14:00" # Bundle updates into one PR groups: all: applies-to: version-updates patterns: - "*" commit-message: prefix: "chore" include: "scope" open-pull-requests-limit: 10 # Monitor GitHub Actions - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" time: "14:00" # Bundle updates into one PR groups: all: applies-to: version-updates patterns: - "*" commit-message: prefix: "chore" include: "scope" open-pull-requests-limit: 10 gittuf-0.9.0/.github/workflows/000077500000000000000000000000001475150141000164155ustar00rootroot00000000000000gittuf-0.9.0/.github/workflows/check-docs.sh000077500000000000000000000002121475150141000207520ustar00rootroot00000000000000#!/bin/bash set -euo pipefail make generate if [[ $(git --no-pager diff) ]] ; then echo "Please re-generate CLI docs" exit 1 fi gittuf-0.9.0/.github/workflows/ci.yml000066400000000000000000000012461475150141000175360ustar00rootroot00000000000000name: tests on: push: branches: ['main'] paths-ignore: - "docs/*" - "**.md" pull_request: paths-ignore: - "docs/*" - "**.md" permissions: read-all jobs: test: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] go-version: ['1.23'] runs-on: ${{ matrix.os }} steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Install Go uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: ${{ matrix.go-version }} cache: true - name: Test run: go test -timeout 20m ./... gittuf-0.9.0/.github/workflows/codeql.yml000066400000000000000000000067171475150141000204220ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ "main" ] paths-ignore: - "docs/**" - "**.md" pull_request: branches: [ "main" ] paths-ignore: - "docs/**" - "**.md" schedule: - cron: '28 17 * * 6' permissions: read-all jobs: analyze: name: Analyze (${{ matrix.language }}) # Runner size impacts CodeQL analysis time. To learn more, please see: # - https://gh.io/recommended-hardware-resources-for-running-codeql # - https://gh.io/supported-runners-and-hardware-resources # - https://gh.io/using-larger-runners (GitHub.com only) # Consider using larger runners or machines with greater resources for possible analysis time improvements. runs-on: ${{ 'ubuntu-latest' }} timeout-minutes: ${{ 360 }} permissions: # required for all workflows security-events: write strategy: fail-fast: false matrix: include: - language: go build-mode: autobuild # CodeQL supports the following values keywords for 'language': 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' # Use `c-cpp` to analyze code written in C, C++ or both # Use 'java-kotlin' to analyze code written in Java, Kotlin or both # Use 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both # To learn more about changing the languages that are analyzed or customizing the build mode for your analysis, # see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/customizing-your-advanced-setup-for-code-scanning. # If you are analyzing a compiled language, you can modify the 'build-mode' for that language to customize how # your codebase is analyzed, see https://docs.github.com/en/code-security/code-scanning/creating-an-advanced-setup-for-code-scanning/codeql-code-scanning-for-compiled-languages steps: - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@9e8d0789d4a0fa9ceb6b1738f7e269594bdd67f0 with: languages: ${{ matrix.language }} build-mode: ${{ matrix.build-mode }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@9e8d0789d4a0fa9ceb6b1738f7e269594bdd67f0 with: category: "/language:${{matrix.language}}" gittuf-0.9.0/.github/workflows/coverage.yml000066400000000000000000000014151475150141000207340ustar00rootroot00000000000000name: coverage on: push: branches: ['main'] paths-ignore: - "docs/**" - "**.md" pull_request: paths-ignore: - "docs/**" - "**.md" permissions: read-all jobs: test: runs-on: ubuntu-latest steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Install Go uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: 1.23 cache: true - name: Check Coverage run: go test -covermode=atomic -coverprofile='coverage.cov' `go list ./... | grep -v -f .test_ignore.txt` - name: Coveralls Parallel uses: coverallsapp/github-action@648a8eb78e6d50909eff900e4ec85cab4524a45b with: file: 'coverage.cov' gittuf-0.9.0/.github/workflows/docs.yml000066400000000000000000000006101475150141000200650ustar00rootroot00000000000000name: docs on: push: branches: ['main'] pull_request: permissions: read-all jobs: docs: runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: '1.23' cache: true - run: ./.github/workflows/check-docs.sh gittuf-0.9.0/.github/workflows/get-started-tests-policy-v02.yml000066400000000000000000000021451475150141000244270ustar00rootroot00000000000000name: get-started-tests with policy v02 on: push: branches: ['main'] paths-ignore: - "docs/**" - "!docs/testing/**" - "!docs/get-started.md" - "*.md" pull_request: paths-ignore: - "docs/**" - "!docs/testing/**" - "!docs/get-started.md" - "*.md" permissions: read-all jobs: test: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] go-version: ['1.23'] runs-on: ${{ matrix.os }} steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Install Go uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: ${{ matrix.go-version }} cache: true - name: Install Python uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 with: python-version: '3.10' - name: Build gittuf run: make just-install - name: Test Getting Started run: python3 docs/testing/test-get-started-md.py env: GITTUF_DEV: '1' GITTUF_ALLOW_V02_POLICY: '1' gittuf-0.9.0/.github/workflows/get-started-tests.yml000066400000000000000000000020151475150141000225210ustar00rootroot00000000000000name: get-started-tests on: push: branches: ['main'] paths-ignore: - "docs/**" - "!docs/testing/**" - "!docs/get-started.md" - "*.md" pull_request: paths-ignore: - "docs/**" - "!docs/testing/**" - "!docs/get-started.md" - "*.md" permissions: read-all jobs: test: strategy: matrix: os: [ubuntu-latest, macos-latest, windows-latest] go-version: ['1.23'] runs-on: ${{ matrix.os }} steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Install Go uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: ${{ matrix.go-version }} cache: true - name: Install Python uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 with: python-version: '3.10' - name: Build gittuf run: make just-install - name: Test Getting Started run: python3 docs/testing/test-get-started-md.py gittuf-0.9.0/.github/workflows/gittuf-rsl-main.yml000066400000000000000000000024421475150141000221640ustar00rootroot00000000000000name: Record change to main branch on: push: branches: - 'main' permissions: read-all jobs: create-rsl-entry: if: github.repository == 'gittuf/gittuf' runs-on: ubuntu-latest permissions: contents: write id-token: write steps: - name: Install gittuf uses: gittuf/gittuf-installer@f31e69c7c18c7473cbce18ed69a559b945d3a738 - name: Install gitsign uses: chainguard-dev/actions/setup-gitsign@main - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: fetch-depth: 0 - name: Update RSL env: KEY: ${{ secrets.KEY }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | echo "$KEY" > /tmp/key chmod 600 /tmp/key # ssh-keygen signer requires this git fetch origin refs/gittuf/reference-state-log:refs/gittuf/reference-state-log refs/gittuf/attestations:refs/gittuf/attestations GITTUF_DEV=1 gittuf dev attest-github --signing-key /tmp/key --repository ${{ github.repository }} --commit ${{ github.sha }} --base-branch "main" gittuf rsl record main git push origin refs/gittuf/reference-state-log:refs/gittuf/reference-state-log refs/gittuf/attestations:refs/gittuf/attestations gittuf-0.9.0/.github/workflows/gittuf-rsl-non-main.yml000066400000000000000000000015651475150141000227610ustar00rootroot00000000000000name: Record change to non-main branch on: push: branches-ignore: - 'main' permissions: read-all jobs: create-rsl-entry: if: github.repository == 'gittuf/gittuf' runs-on: ubuntu-latest permissions: contents: write id-token: write steps: - name: Install gittuf uses: gittuf/gittuf-installer@f31e69c7c18c7473cbce18ed69a559b945d3a738 - name: Install gitsign uses: chainguard-dev/actions/setup-gitsign@main - name: Checkout repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: fetch-depth: 0 - name: Update RSL run: | git fetch origin refs/gittuf/reference-state-log:refs/gittuf/reference-state-log gittuf rsl record ${{ github.ref }} git push origin refs/gittuf/reference-state-log:refs/gittuf/reference-state-log gittuf-0.9.0/.github/workflows/gittuf-verify.yml000066400000000000000000000010721475150141000217440ustar00rootroot00000000000000name: gittuf Verification on: workflow_run: workflows: ["Record change to main branch"] branches: [main] types: - completed permissions: read-all jobs: gittuf-verify: if: github.repository == 'gittuf/gittuf' runs-on: ubuntu-latest steps: - name: Install gittuf uses: gittuf/gittuf-installer@f31e69c7c18c7473cbce18ed69a559b945d3a738 - name: Checkout and verify repository run: | gittuf clone https://github.com/${{ github.repository }} cd gittuf gittuf verify-ref main --verbose gittuf-0.9.0/.github/workflows/lint.yml000066400000000000000000000011661475150141000201120ustar00rootroot00000000000000name: golangci-lint on: push: branches: ['main'] paths-ignore: - "docs/**" - "**.md" pull_request: paths-ignore: - "docs/**" - "**.md" permissions: read-all jobs: golangci: name: lint runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: '1.23' cache: true - name: golangci-lint uses: golangci/golangci-lint-action@e60da84bfae8c7920a47be973d75e15710aa8bd7 with: version: latest gittuf-0.9.0/.github/workflows/release.yml000066400000000000000000000014231475150141000205600ustar00rootroot00000000000000name: release on: push: tags: - 'v*' permissions: read-all jobs: release: permissions: contents: write id-token: write runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: fetch-depth: 0 # fetch full history for previous tag information - uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: '1.23' cache: true - uses: sigstore/cosign-installer@c56c2d3e59e4281cc41dea2217323ba5694b171e - uses: goreleaser/goreleaser-action@9ed2f89a662bf1735a48bc8557fd212fa902bebf with: version: latest args: release --clean env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} gittuf-0.9.0/.github/workflows/run-demo.yml000066400000000000000000000012661475150141000206730ustar00rootroot00000000000000name: Run demo on: push: branches: - main pull_request: branches: - main permissions: read-all jobs: demo: name: Run demo runs-on: ubuntu-latest steps: - name: Checkout demo repository uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 with: repository: gittuf/demo - name: Install Python uses: actions/setup-python@42375524e23c412d93fb67b49958b491fce71c38 with: python-version: '3.12' - name: Install gittuf-installer uses: gittuf/gittuf-installer@f31e69c7c18c7473cbce18ed69a559b945d3a738 - name: Run demo script run: python run-demo.py --no-prompt gittuf-0.9.0/.github/workflows/scorecard.yml000066400000000000000000000061321475150141000211070ustar00rootroot00000000000000# This workflow uses actions that are not certified by GitHub. They are provided # by a third-party and are governed by separate terms of service, privacy # policy, and support documentation. name: OpenSSF Scorecard on: # For Branch-Protection check. Only the default branch is supported. See # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection branch_protection_rule: # To guarantee Maintained check is occasionally updated. See # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained schedule: - cron: '15 10 * * 3' push: branches: [ "main" ] paths-ignore: - "docs/**" - "**.md" # Declare default permissions as read only. permissions: read-all jobs: analysis: name: Scorecard analysis runs-on: ubuntu-latest permissions: # Needed to upload the results to code-scanning dashboard. security-events: write # Needed to publish results and get a badge (see publish_results below). id-token: write # Uncomment the permissions below if installing in a private repository. # contents: read # actions: read steps: - name: "Checkout code" uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: "Run analysis" uses: ossf/scorecard-action@62b2cac7ed8198b15735ed49ab1e5cf35480ba46 # v2.4.0 with: results_file: results.sarif results_format: sarif # (Optional) "write" PAT token. Uncomment the `repo_token` line below if: # - you want to enable the Branch-Protection check on a *public* repository, or # - you are installing Scorecard on a *private* repository # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action?tab=readme-ov-file#authentication-with-fine-grained-pat-optional. # repo_token: ${{ secrets.SCORECARD_TOKEN }} # Public repositories: # - Publish results to OpenSSF REST API for easy access by consumers # - Allows the repository to include the Scorecard badge. # - See https://github.com/ossf/scorecard-action#publishing-results. # For private repositories: # - `publish_results` will always be set to `false`, regardless # of the value entered here. publish_results: true # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # format to the repository Actions tab. - name: "Upload artifact" uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0 with: name: SARIF file path: results.sarif retention-days: 5 # Upload the results to GitHub's code scanning dashboard (optional). # Commenting out will disable upload of results to your repo's Code Scanning dashboard - name: "Upload to code-scanning" uses: github/codeql-action/upload-sarif@9e8d0789d4a0fa9ceb6b1738f7e269594bdd67f0 # v3.28.9 with: sarif_file: results.sarif gittuf-0.9.0/.github/workflows/ubuntu-2204.yml000066400000000000000000000014451475150141000210530ustar00rootroot00000000000000name: git 2.34.1 on ubuntu-22.04 on: push: branches: ["main"] paths-ignore: - "docs/**" - "**.md" pull_request: paths-ignore: - "docs/**" - "**.md" permissions: read-all jobs: test: strategy: matrix: go-version: ['1.23'] runs-on: ubuntu-22.04 steps: - name: Downgrade Git run: sudo apt-get update && sudo apt-get install -y --allow-downgrades git=1:2.34.1-1ubuntu1.12 git-man=1:2.34.1-1ubuntu1.12 - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 - name: Install Go uses: actions/setup-go@f111f3307d8850f501ac008e886eec1fd1932a34 with: go-version: ${{ matrix.go-version }} cache: true - name: Test run: go test ./... gittuf-0.9.0/.gitignore000066400000000000000000000000261475150141000150060ustar00rootroot00000000000000dist/* *.prof vendor* gittuf-0.9.0/.golangci.yml000066400000000000000000000005421475150141000154050ustar00rootroot00000000000000linters: enable: - asciicheck - errcheck - errorlint - gofmt - goimports - gosec - gocritic - importas - prealloc - revive - misspell - stylecheck - tparallel - unconvert - unparam - unused - whitespace output: uniq-by-line: false run: issues-exit-code: 1 timeout: 10m exclude-dirs: - internal/third_party gittuf-0.9.0/.goreleaser.yml000066400000000000000000000024201475150141000157470ustar00rootroot00000000000000version: 2 project_name: gittuf builds: - id: gittuf mod_timestamp: '{{ .CommitTimestamp }}' env: - CGO_ENABLED=0 flags: - -trimpath goos: - linux - darwin - freebsd - windows goarch: - amd64 - arm64 ldflags: - "-s -w" - "-extldflags=-zrelro" - "-extldflags=-znow" - "-buildid= -X github.com/gittuf/gittuf/internal/version.gitVersion={{ .Version }}" - id: git-remote-gittuf mod_timestamp: '{{ .CommitTimestamp }}' main: ./internal/git-remote-gittuf binary: git-remote-gittuf env: - CGO_ENABLED=0 flags: - -trimpath goos: - linux - darwin - freebsd - windows goarch: - amd64 - arm64 ldflags: - "-s -w" - "-extldflags=-zrelro" - "-extldflags=-znow" - "-buildid= -X github.com/gittuf/gittuf/internal/version.gitVersion={{ .Version }}" archives: - id: binary format: binary allow_different_binary_count: true gomod: proxy: true changelog: disable: true signs: - cmd: cosign env: - COSIGN_YES=true certificate: '${artifact}.pem' signature: '${artifact}.sig' args: - sign-blob - '--output-certificate=${certificate}' - '--output-signature=${signature}' - '${artifact}' artifacts: binary output: true release: prerelease: allow github: owner: gittuf name: gittuf draft: true gittuf-0.9.0/.test_ignore.txt000066400000000000000000000005371475150141000161660ustar00rootroot00000000000000github.com/gittuf/gittuf/docs/cli github.com/gittuf/gittuf/experimental/gittuf/options github.com/gittuf/gittuf/internal/cmd github.com/gittuf/gittuf/internal/dev github.com/gittuf/gittuf/internal/git-remote-gittuf github.com/gittuf/gittuf/internal/testartifacts github.com/gittuf/gittuf/internal/third_party github.com/gittuf/gittuf/internal/version gittuf-0.9.0/CHANGELOG.md000066400000000000000000000206761475150141000146440ustar00rootroot00000000000000# Changelog This file tracks the changes introduced by gittuf versions. ## v0.9.0 ### Added - Added a terminal UI (TUI) to enable managing gittuf policy interactively - Added global rules to set thresholds and prohibit force pushes to help set security baselines in repositories with gittuf - Added workflows to support synchronizing/propagating policy and RSL changes across multiple repositories - Added local persistent cache functionality to reduce the time taken for verification of a repository after successful initial verification - Added functionality to set a repository's canonical location in gittuf metadata - Added a control for RSL recording to skip checking for duplicates - Added the gittuf Augmentation Process (GAP) for formalizing changes to gittuf - Added color output for various gittuf logging flows - Added functionality to discard currently staged changes to policy - Added functionality to remove principals and keys no longer used by rules in the metadata ### Updated - Updated RSL printing to now use buffered output, improving performance - Improved testing coverage of `gitinterface` - Updated the design document for clarity and to reflect recent changes to gittuf - Updated various dependencies and CI workflows ## v0.8.1 - Fixed loading of legacy ECDSA key format - Replaced `show` with `rev-parse` in some gitinterface APIs - Added gittuf/demo run to CI - Updated various dependencies and CI workflows ## v0.8.0 - Added an experimental gittuf Go API - Added an experimental version (`v0.2`) of policy metadata, which adds support for "principals" in gittuf - Added an experimental flow to determine a feature ref's mergeability - Optimized some preprocessing flows in the `policy` package - Improved gittuf's design documentation - Improved testing coverage of `gittuf` and `rsl` - Fixed an internal issue with git-remote-gittuf and Go's builtin max - Fixed issue with `git-remote-gittuf` with server responses on push - Fixed issue with `git-remote-gittuf` when pushing to a remote repository without gittuf enabled - Fixed issue with `git-remote-gittuf` freezing upon failure to authenticate with the remote repository when using HTTP - Updated various dependencies and CI workflows ## v0.7.0 - Added support for metadata signing using Sigstore (currently `GITTUF_DEV` only) - Removed use of legacy custom securesystemslib key formats in gittuf's tests - Removed vendored signerverifier library - Unified SSH signature verification for Git commits and tags - Refactored `policy` and `tuf` packages to support versioning policy metadata - Updated various dependencies and CI workflows ## v0.6.2 - Added `git-remote-gittuf` to the release workflow's pre-built artifacts - Updated CI workflow dependency ## v0.6.1 - Added a counter to RSL entries to support persistent caching - Added experimental support for signature extensions to vendored DSSE library - Refactored `GetLatestReferenceEntry` RSL API - Fixed Makefile build on Windows - Moved `update-root-threshold` and `update-policy-threshold` out of developer mode - Fixed issue with git-remote-gittuf using the wrong transport when fetching the RSL - Fixed issue with git-remote-gittuf when explicitly pushing the RSL - Fixed issue with git-remote-gittuf and `curl` fetches and pushes on Windows - Increased testing coverage of `policy` and `gitinterface` - Improved documentation for getting started with gittuf, especially on Windows platforms - Added copyright notices to code files - Updated various dependencies and CI workflows ## v0.6.0 - Added command to reorder policy rules - Added support for older Git versions - Added support for GitHub pull request approval attestations - Added support for using enterprise GitHub instances - Added caching for the RSL APIs `GetEntry` and `GetParentForEntry` - Added parallelization for some unit tests - Removed some deprecated flows such as `FindPublicKeysForPath` and refactored verification APIs - Added CodeQL scanning for the repository - Updated various dependencies and CI workflows ## v0.5.2 - Fixed issue with git-remote-gittuf when force pushing - Fixed issue with git-remote-gittuf not fetching RSL before adding new entries - Updated various dependencies ## v0.5.1 - Updated release workflow to support GoReleaser v2 ## v0.5.0 - Added support for `ssh-keygen` based signer and verifier - Added support for overriding reference name when local and remote reference names differ - Added initial (alpha) implementation of git-remote-gittuf - Added command to display RSL - Added support for automatically skipping RSL entries that point to rebased commits - Updated policy verification pattern matching to use `fnmatch` - Updated to use Git binary for various operations on underlying repository - Updated various dependencies and CI workflows - Updated docs to make command snippets easier to copy - Removed extraneous fields from gittuf policy metadata - Removed `verify-commit` and `verify-tag` workflows in favor of `verify-ref` (BREAKING CHANGE) - Governance: added Patrick Zielinski and Neil Naveen as gittuf maintainers ## v0.4.0 - Added support for `policy-staging` for sequential signing of metadata to meet a threshold - Added support for minimum required signatures for rules - Added support for profiling with pprof - Added `--from-entry` to `verify-ref` - Added debug statements for `--verbose` flag - Added caching of verifiers for each verified namespace (reference or file path) to avoid repeated searches of the same policy state - Added separated `add-rule` and `update-rule` workflows for policy - Added dogfooding plan - Added CI workflows for phase 1 of dogfooding - Added OpenSSF Scorecard for the repository - Updated policy to require each rule name to be unique across all rule files - Updated file rules verification to use same policy as branch protection rules verification - Update reference authorization attestations to use merge tree for the change being authorized - Updated design document with definitions and a diagram - Updated tag verification to check the tag's RSL entry points to either the tag object or the tag's target object - Updated roadmap to indicate status for each item - Updated minimum Go version to 1.22 - Updated pointer to gittuf community details - Updated various dependencies and CI workflows ## v0.3.0 - Added check to prevent duplicate RSL entries for the same ref and target - Added a formal developer mode for new early-stage gittuf features - Added early support for attestations with one type for approving reference changes (developer mode only) - Added support for gittuf-specific Git hooks with a pre-push hook to fetch / create / push RSL entries - Updated `verify-ref` to perform full verification by default (BREAKING CHANGE) - Updated identification of trusted keys in policy to support varying threshold values between delegations - Added verification tests for delegated policies - Added root key management commands to the CLI - Added command to list rules in gittuf policy - Added support for standard encoding of private and public keys - Added support for verifying SSH Git commit and tag signatures - Added check for cycles when walking policy graph during verification - Added autogenerated CLI docs - Removed file rule verification when no file rules exist in the policy for efficiency - Added command to sign existing policy file with no other changes - Added get started guide and gittuf logo to docs - Removed CLI usage message for gittuf errors - Updated various dependencies ## v0.2.0 - Added support to RSL to find unskipped entries - Added `Get*` functions to gitinterface to compartmentalize choice of Git library - Added support in RSL and policy functions for RSL annotation entries - Added recovery mode for policy verification workflow - Added `go fmt` as Makefile target - Updated length of refspecs slice to account for doubled entries - Added support for merge commits in gitinterface - Updated CLI to check if Git signing is viable to abort early - Fixed bug in CLI that required an unnecessary signing key argument - Fixed `clone`'s ability to handle trailing slashes - Improved testing for in policy verification for delegations - Added plumbing for better logging - Updated various dependencies - Updated installation instructions to include Sigstore verification of binaries ## v0.1.0 - Implemented reference state log (RSL) - Added support for Git reference policies using RSL entry signatures - Added support for file policies using commit signatures - Added support for basic gittuf sync operations gittuf-0.9.0/CONTRIBUTING.md000066400000000000000000000036111475150141000152520ustar00rootroot00000000000000# Contributing Guide Contributions to gittuf can be of several types: * changes to the [design document](/docs/design-document.md) or [gittuf Augmentation Proposals (GAPs)](/docs/gaps/README.md) stored in the `docs/` folder * code changes for bug fixes, new features, documentation, and other enhancements to the implementation * new issues or feature requests [Join our community](https://github.com/gittuf/community/?tab=readme-ov-file#join-us) to get started! ## Contributor Workflow When submitting changes to the gittuf docs or implementation, contributors must open a GitHub pull request to the repository. If a proposed change is a significant deviation from gittuf's [design document](/docs/design-document.md), a [GAP](/docs/gaps/README.md) may be necessary. When in doubt, contributors are advised to file an issue in the repository for the [maintainers](MAINTAINERS.txt) to determine the best way forward. gittuf uses the NYU Secure Systems Lab [development workflow](https://github.com/secure-systems-lab/lab-guidelines/blob/master/dev-workflow.md). Pull requests must include tests for the changes in behavior they introduce. They are reviewed by one or more [maintainers](MAINTAINERS.txt) and undergo automated testing such as (but not limited to): * Unit and build testing * Static analysis using linters * Developer Certificate of Origin (DCO) check In future, as gittuf matures, this repository will also be secured using gittuf. At that point, the contributor workflow may evolve to record gittuf specific information. ## Other Guidelines Contributors to gittuf must abide by the project's [code of conduct](https://github.com/gittuf/community/blob/main/CODE-OF-CONDUCT.md). Any questions regarding the gittuf community's governance and code of conduct may be directed to the project's [Technical Steering Committee](https://github.com/gittuf/community/blob/main/TECHNICAL-STEERING-COMMITTEE.md). gittuf-0.9.0/LICENSE000066400000000000000000000261401475150141000140300ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright gittuf a Series of LF Projects, LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. gittuf-0.9.0/MAINTAINERS.txt000066400000000000000000000013301475150141000153300ustar00rootroot00000000000000gittuf is built and maintained by developers from New York University (NYU), Chainguard, and New Jersey Institute of Technology (NJIT). Aditya Sirish A Yelgundhalli Email: aditya.sirish@nyu.edu GitHub username: @adityasaky Affiliation: NYU Justin Cappos Email: jcappos@nyu.edu GitHub username: @JustinCappos Affiliation: NYU Billy Lynch Email: billy@chainguard.dev GitHub username: @wlynch Affiliation: Chainguard Reza Curtmola Email: reza.curtmola@njit.edu GitHub username: @reza-curtmola Affiliation: NJIT Patrick Zielinski Email: patrick.z@nyu.edu GitHub username: @patzielinski Affiliation: NYU Neil Naveen Email: naveen.neil@icloud.com GitHub username: @neilnaveen Affiliation: NYUgittuf-0.9.0/Makefile000066400000000000000000000023221475150141000144570ustar00rootroot00000000000000# SPDX-License-Identifier: Apache-2.0 GIT_VERSION ?= $(shell git describe --tags --always --dirty) LDFLAGS=-buildid= -X github.com/gittuf/gittuf/internal/version.gitVersion=$(GIT_VERSION) .PHONY : build test install fmt default : install build : test ifeq ($(OS),Windows_NT) set CGO_ENABLED=0 go build -trimpath -ldflags "$(LDFLAGS)" -o dist/gittuf . go build -trimpath -ldflags "$(LDFLAGS)" -o dist/git-remote-gittuf ./internal/git-remote-gittuf set CGO_ENABLED= else CGO_ENABLED=0 go build -trimpath -ldflags "$(LDFLAGS)" -o dist/gittuf . CGO_ENABLED=0 go build -trimpath -ldflags "$(LDFLAGS)" -o dist/git-remote-gittuf ./internal/git-remote-gittuf endif install : test just-install just-install : ifeq ($(OS),Windows_NT) set CGO_ENABLED=0 go install -trimpath -ldflags "$(LDFLAGS)" github.com/gittuf/gittuf go install -trimpath -ldflags "$(LDFLAGS)" github.com/gittuf/gittuf/internal/git-remote-gittuf set CGO_ENABLED= else CGO_ENABLED=0 go install -trimpath -ldflags "$(LDFLAGS)" github.com/gittuf/gittuf CGO_ENABLED=0 go install -trimpath -ldflags "$(LDFLAGS)" github.com/gittuf/gittuf/internal/git-remote-gittuf endif test : go test -timeout 20m -v ./... fmt : go fmt ./... generate : go generate ./... gittuf-0.9.0/README.md000066400000000000000000000042751475150141000143070ustar00rootroot00000000000000gittuf logo [![OpenSSF Best Practices](https://www.bestpractices.dev/projects/7789/badge)](https://www.bestpractices.dev/projects/7789) ![Build and Tests (CI)](https://github.com/gittuf/gittuf/actions/workflows/ci.yml/badge.svg) [![Coverage Status](https://coveralls.io/repos/github/gittuf/gittuf/badge.svg)](https://coveralls.io/github/gittuf/gittuf) gittuf is a security layer for Git repositories. With gittuf, any developer who can pull from a Git repository can independently verify that the repository's security policies were followed. gittuf's policy, inspired by [The Update Framework (TUF)], handles key management for all trusted developers in a repository, allows for setting permissions for repository branches, tags, files, etc., protects against [other attacks] Git is vulnerable to, and more — all while being backwards compatible with forges such as GitHub and GitLab. gittuf is a sandbox project at the [Open Source Security Foundation (OpenSSF)] as part of the [Supply Chain Integrity Working Group]. ## Current Status gittuf is currently in alpha. gittuf's metadata may have breaking changes, meaning a repository's gittuf policy may have to be reinitialized from time to time. As such, gittuf is currently not intended to be the primary mechanism for enforcing a repository's security. That said, we're actively seeking feedback from users. Take a look at the [get started guide] to learn how to install and try gittuf out! Additionally, contributions are welcome, please refer to the [contributing guide], our [roadmap], and the issue tracker for ways to get involved. ## Installation & Get Started See the [get started guide]. [The Update Framework (TUF)]: https://theupdateframework.io/ [other attacks]: https://ssl.engineering.nyu.edu/papers/torres_toto_usenixsec-2016.pdf [contributing guide]: /CONTRIBUTING.md [roadmap]: /docs/roadmap.md [Open Source Security Foundation (OpenSSF)]: https://openssf.org/ [Supply Chain Integrity Working Group]: https://github.com/ossf/wg-supply-chain-integrity [get started guide]: /docs/get-started.md gittuf-0.9.0/SECURITY.md000066400000000000000000000016221475150141000146120ustar00rootroot00000000000000# Reporting Security Issues Please report security issues **confidentially** using [GitHub's form](https://github.com/gittuf/gittuf/security/advisories/new). Alternatively, you can send an encrypted email to `jcappos@nyu.edu` using the following PGP key: > E9C0 59EC 0D32 64FA B35F 94AD 465B F9F6 F8EB 475A **Note:** Please do not report such issues publicly on the issue tracker. The *issue tracker is intended for bug reports and feature requests. ## Responding to Reports A gittuf maintainer will respond to the report as soon as possible. After the report is triaged and the vulnerability is confirmed, a fix will be prepared under embargo. Once the fix is accepted, a new release will be prepared along with a report detailing the vulnerability. This report will identify the reporter unless they request to be kept anonymous. Finally, a CVE may be requested if appropriate for the vulnerability report. gittuf-0.9.0/debug/000077500000000000000000000000001475150141000141065ustar00rootroot00000000000000gittuf-0.9.0/debug/Dockerfile.Git_2_34_1000066400000000000000000000013331475150141000175710ustar00rootroot00000000000000# This Dockerfile can be used to build a container image with Git 2.34.1 but the # latest Go and delve release. This is useful when debugging gittuf errors with # Git 2.34.1, a version of Git we support due to it being the latest on Ubuntu # 22.04 LTS. After building the container image, run it with the local # repository as a mounted volume. That way, changes can be made outside the # container but iteratively debugged within the container. FROM ubuntu:22.04 RUN apt-get update && apt-get install git software-properties-common -y RUN add-apt-repository ppa:longsleep/golang-backports -y RUN apt-get update && apt-get install golang -y RUN go install github.com/go-delve/delve/cmd/dlv@latest ENV PATH=/root/go/bin:$PATH gittuf-0.9.0/debug/README.md000066400000000000000000000007521475150141000153710ustar00rootroot00000000000000# Debug Helpers This directory contains tools or artifacts used by the gittuf developers during debugging. ## Debugging gittuf with Git 2.34.1 Build and run the container from the root of the gittuf repository. ```bash docker build -t debug-gittuf-2-34-1 -f debug/Dockerfile.Git_2_34_1 . docker run -it --rm -v $PWD:/gittuf -w /gittuf debug-gittuf-2-34-1 ``` As the gittuf repository is mounted as a volume, you can make changes on the host and iteratively debug within the container. gittuf-0.9.0/docs/000077500000000000000000000000001475150141000137505ustar00rootroot00000000000000gittuf-0.9.0/docs/cli/000077500000000000000000000000001475150141000145175ustar00rootroot00000000000000gittuf-0.9.0/docs/cli/gittuf.md000066400000000000000000000024471475150141000163520ustar00rootroot00000000000000## gittuf A security layer for Git repositories, powered by TUF ### Options ``` -h, --help help for gittuf --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf add-hooks](gittuf_add-hooks.md) - Add git hooks that automatically create and sync RSL * [gittuf attest](gittuf_attest.md) - Tools for attesting to code contributions * [gittuf clone](gittuf_clone.md) - Clone repository and its gittuf references * [gittuf dev](gittuf_dev.md) - Developer mode commands * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust * [gittuf verify-mergeable](gittuf_verify-mergeable.md) - Tools for verifying mergeability using gittuf policies * [gittuf verify-ref](gittuf_verify-ref.md) - Tools for verifying gittuf policies * [gittuf version](gittuf_version.md) - Version of gittuf gittuf-0.9.0/docs/cli/gittuf_add-hooks.md000066400000000000000000000013341475150141000202750ustar00rootroot00000000000000## gittuf add-hooks Add git hooks that automatically create and sync RSL ``` gittuf add-hooks [flags] ``` ### Options ``` -f, --force overwrite hooks, if they already exist -h, --help help for add-hooks ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF gittuf-0.9.0/docs/cli/gittuf_attest.md000066400000000000000000000013221475150141000177250ustar00rootroot00000000000000## gittuf attest Tools for attesting to code contributions ### Options ``` -h, --help help for attest ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF * [gittuf attest authorize](gittuf_attest_authorize.md) - Add or revoke reference authorization gittuf-0.9.0/docs/cli/gittuf_attest_authorize.md000066400000000000000000000016131475150141000220220ustar00rootroot00000000000000## gittuf attest authorize Add or revoke reference authorization ``` gittuf attest authorize [flags] ``` ### Options ``` -f, --from-ref string ref to authorize merging changes from -h, --help help for authorize -r, --revoke revoke existing authorization -k, --signing-key string signing key to use for creating or revoking an authorization ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf attest](gittuf_attest.md) - Tools for attesting to code contributions gittuf-0.9.0/docs/cli/gittuf_clone.md000066400000000000000000000016771475150141000175360ustar00rootroot00000000000000## gittuf clone Clone repository and its gittuf references ``` gittuf clone [flags] ``` ### Options ``` --bare make a bare Git repository -b, --branch string specify branch to check out -h, --help help for clone --root-key public-keys set of initial root of trust keys for the repository (supported values: paths to SSH keys, GPG key fingerprints, Sigstore/Fulcio identities) ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF gittuf-0.9.0/docs/cli/gittuf_dev.md000066400000000000000000000031251475150141000172020ustar00rootroot00000000000000## gittuf dev Developer mode commands ### Synopsis These commands are meant to be used to aid gittuf development, and are not expected to be used during standard workflows. If used, they can undermine repository security. To proceed, set GITTUF_DEV=1. ### Options ``` -h, --help help for dev ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF * [gittuf dev add-github-approval](gittuf_dev_add-github-approval.md) - Record GitHub pull request approval as an attestation (developer mode only, set GITTUF_DEV=1) * [gittuf dev attest-github](gittuf_dev_attest-github.md) - Record GitHub pull request information as an attestation (developer mode only, set GITTUF_DEV=1) * [gittuf dev dismiss-github-approval](gittuf_dev_dismiss-github-approval.md) - Dismiss GitHub pull request approval as an attestation (developer mode only, set GITTUF_DEV=1) * [gittuf dev populate-cache](gittuf_dev_populate-cache.md) - Populate persistent cache (developer mode only, set GITTUF_DEV=1) * [gittuf dev rsl-record](gittuf_dev_rsl-record.md) - Record explicit state of a Git reference in the RSL, signed with specified key (developer mode only, set GITTUF_DEV=1) gittuf-0.9.0/docs/cli/gittuf_dev_add-github-approval.md000066400000000000000000000023621475150141000231160ustar00rootroot00000000000000## gittuf dev add-github-approval Record GitHub pull request approval as an attestation (developer mode only, set GITTUF_DEV=1) ``` gittuf dev add-github-approval [flags] ``` ### Options ``` --approver string identity of the reviewer who approved the change --base-URL string location of GitHub instance (default "https://github.com") -h, --help help for add-github-approval --pull-request-number int pull request number (default -1) --repository string path to base GitHub repository the pull request is opened against, of form {owner}/{repo} --review-ID int pull request review ID (default -1) -k, --signing-key string signing key to use for signing attestation ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf dev](gittuf_dev.md) - Developer mode commands gittuf-0.9.0/docs/cli/gittuf_dev_attest-github.md000066400000000000000000000024061475150141000220470ustar00rootroot00000000000000## gittuf dev attest-github Record GitHub pull request information as an attestation (developer mode only, set GITTUF_DEV=1) ``` gittuf dev attest-github [flags] ``` ### Options ``` --base-URL string location of GitHub instance (default "https://github.com") --base-branch string base branch for pull request, used with --commit --commit string commit to record pull request attestation for -h, --help help for attest-github --pull-request-number int pull request number to record in attestation (default -1) --repository string path to base GitHub repository the pull request is opened against, of form {owner}/{repo} -k, --signing-key string signing key to use for signing attestation ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf dev](gittuf_dev.md) - Developer mode commands gittuf-0.9.0/docs/cli/gittuf_dev_dismiss-github-approval.md000066400000000000000000000021031475150141000240320ustar00rootroot00000000000000## gittuf dev dismiss-github-approval Dismiss GitHub pull request approval as an attestation (developer mode only, set GITTUF_DEV=1) ``` gittuf dev dismiss-github-approval [flags] ``` ### Options ``` --base-URL string location of GitHub instance (default "https://github.com") --dismiss-approver string identity of the reviewer whose review was dismissed -h, --help help for dismiss-github-approval --review-ID int pull request review ID (default -1) -k, --signing-key string signing key to use for signing attestation ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf dev](gittuf_dev.md) - Developer mode commands gittuf-0.9.0/docs/cli/gittuf_dev_populate-cache.md000066400000000000000000000012621475150141000221540ustar00rootroot00000000000000## gittuf dev populate-cache Populate persistent cache (developer mode only, set GITTUF_DEV=1) ``` gittuf dev populate-cache [flags] ``` ### Options ``` -h, --help help for populate-cache ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf dev](gittuf_dev.md) - Developer mode commands gittuf-0.9.0/docs/cli/gittuf_dev_rsl-record.md000066400000000000000000000016701475150141000213410ustar00rootroot00000000000000## gittuf dev rsl-record Record explicit state of a Git reference in the RSL, signed with specified key (developer mode only, set GITTUF_DEV=1) ``` gittuf dev rsl-record [flags] ``` ### Options ``` --dst-ref string name of destination reference, if it differs from source reference -h, --help help for rsl-record -k, --signing-key string path to PEM encoded SSH or GPG signing key -t, --target string target ID ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf dev](gittuf_dev.md) - Developer mode commands gittuf-0.9.0/docs/cli/gittuf_policy.md000066400000000000000000000046261475150141000177320ustar00rootroot00000000000000## gittuf policy Tools to manage gittuf policies ### Options ``` -h, --help help for policy -k, --signing-key string signing key to use to sign policy file ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF * [gittuf policy add-key](gittuf_policy_add-key.md) - Add a trusted key to a policy file * [gittuf policy add-person](gittuf_policy_add-person.md) - Add a trusted person to a policy file (requires developer mode and v0.2 policy metadata to be enabled, set GITTUF_DEV=1 and GITTUF_ALLOW_V02_POLICY=1) * [gittuf policy add-rule](gittuf_policy_add-rule.md) - Add a new rule to a policy file * [gittuf policy apply](gittuf_policy_apply.md) - Validate and apply changes from policy-staging to policy * [gittuf policy discard](gittuf_policy_discard.md) - Discard the currently staged changes to policy * [gittuf policy init](gittuf_policy_init.md) - Initialize policy file * [gittuf policy list-principals](gittuf_policy_list-principals.md) - List principals for the current policy in the specified rule file * [gittuf policy list-rules](gittuf_policy_list-rules.md) - List rules for the current state * [gittuf policy remote](gittuf_policy_remote.md) - Tools for managing remote policies * [gittuf policy remove-key](gittuf_policy_remove-key.md) - Remove a key from a policy file * [gittuf policy remove-person](gittuf_policy_remove-person.md) - Remove a person from a policy file (requires developer mode and v0.2 policy metadata to be enabled, set GITTUF_DEV=1 and GITTUF_ALLOW_V02_POLICY=1) * [gittuf policy remove-rule](gittuf_policy_remove-rule.md) - Remove rule from a policy file * [gittuf policy reorder-rules](gittuf_policy_reorder-rules.md) - Reorder rules in the specified policy file * [gittuf policy sign](gittuf_policy_sign.md) - Sign policy file * [gittuf policy tui](gittuf_policy_tui.md) - Start the TUI for managing policies * [gittuf policy update-rule](gittuf_policy_update-rule.md) - Update an existing rule in a policy file gittuf-0.9.0/docs/cli/gittuf_policy_add-key.md000066400000000000000000000022551475150141000213240ustar00rootroot00000000000000## gittuf policy add-key Add a trusted key to a policy file ### Synopsis This command allows users to add trusted keys to the specified policy file. By default, the main policy file is selected. Note that the keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::". ``` gittuf policy add-key [flags] ``` ### Options ``` -h, --help help for add-key --policy-name string name of policy file to add key to (default "targets") --public-key stringArray authorized public key ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_add-person.md000066400000000000000000000032061475150141000220370ustar00rootroot00000000000000## gittuf policy add-person Add a trusted person to a policy file (requires developer mode and v0.2 policy metadata to be enabled, set GITTUF_DEV=1 and GITTUF_ALLOW_V02_POLICY=1) ### Synopsis This command allows users to add a trusted person to the specified policy file. By default, the main policy file is selected. Note that the person's keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::". ``` gittuf policy add-person [flags] ``` ### Options ``` --associated-identity stringArray identities on code review platforms in the form 'providerID::identity' (e.g., 'https://github.com::') --custom stringArray additional custom metadata in the form KEY=VALUE -h, --help help for add-person --person-ID string person ID --policy-name string name of policy file to add key to (default "targets") --public-key stringArray authorized public key for person ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_add-rule.md000066400000000000000000000026521475150141000215040ustar00rootroot00000000000000## gittuf policy add-rule Add a new rule to a policy file ### Synopsis This command allows users to add a new rule to the specified policy file. By default, the main policy file is selected. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::". ``` gittuf policy add-rule [flags] ``` ### Options ``` --authorize stringArray authorize the principal IDs for the rule -h, --help help for add-rule --policy-name string name of policy file to add rule to (default "targets") --rule-name string name of rule --rule-pattern stringArray patterns used to identify namespaces rule applies to --threshold int threshold of required valid signatures (default 1) ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_apply.md000066400000000000000000000013561475150141000211340ustar00rootroot00000000000000## gittuf policy apply Validate and apply changes from policy-staging to policy ``` gittuf policy apply [flags] ``` ### Options ``` -h, --help help for apply ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_discard.md000066400000000000000000000013521475150141000214140ustar00rootroot00000000000000## gittuf policy discard Discard the currently staged changes to policy ``` gittuf policy discard [flags] ``` ### Options ``` -h, --help help for discard ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_init.md000066400000000000000000000014461475150141000207520ustar00rootroot00000000000000## gittuf policy init Initialize policy file ``` gittuf policy init [flags] ``` ### Options ``` -h, --help help for init --policy-name string name of policy file to create (default "targets") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_list-principals.md000066400000000000000000000017321475150141000231220ustar00rootroot00000000000000## gittuf policy list-principals List principals for the current policy in the specified rule file ``` gittuf policy list-principals [flags] ``` ### Options ``` -h, --help help for list-principals --policy-name string specify rule file to list principals for (default "targets") --policy-ref string specify which policy ref should be inspected (default "policy") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_list-rules.md000066400000000000000000000015161475150141000221100ustar00rootroot00000000000000## gittuf policy list-rules List rules for the current state ``` gittuf policy list-rules [flags] ``` ### Options ``` -h, --help help for list-rules --target-ref string specify which policy ref should be inspected (default "policy") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_remote.md000066400000000000000000000015751475150141000213050ustar00rootroot00000000000000## gittuf policy remote Tools for managing remote policies ### Options ``` -h, --help help for remote ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies * [gittuf policy remote pull](gittuf_policy_remote_pull.md) - Pull policy from the specified remote * [gittuf policy remote push](gittuf_policy_remote_push.md) - Push policy to the specified remote gittuf-0.9.0/docs/cli/gittuf_policy_remote_pull.md000066400000000000000000000014001475150141000223240ustar00rootroot00000000000000## gittuf policy remote pull Pull policy from the specified remote ``` gittuf policy remote pull [flags] ``` ### Options ``` -h, --help help for pull ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy remote](gittuf_policy_remote.md) - Tools for managing remote policies gittuf-0.9.0/docs/cli/gittuf_policy_remote_push.md000066400000000000000000000013761475150141000223430ustar00rootroot00000000000000## gittuf policy remote push Push policy to the specified remote ``` gittuf policy remote push [flags] ``` ### Options ``` -h, --help help for push ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy remote](gittuf_policy_remote.md) - Tools for managing remote policies gittuf-0.9.0/docs/cli/gittuf_policy_remove-key.md000066400000000000000000000020641475150141000220670ustar00rootroot00000000000000## gittuf policy remove-key Remove a key from a policy file ### Synopsis This command allows users to remove keys from the specified policy file. The public key ID is required. By default, the main policy file is selected. ``` gittuf policy remove-key [flags] ``` ### Options ``` -h, --help help for remove-key --policy-name string name of policy file to remove key from (default "targets") --public-key string public key ID to remove from the policy ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_remove-person.md000066400000000000000000000022301475150141000226000ustar00rootroot00000000000000## gittuf policy remove-person Remove a person from a policy file (requires developer mode and v0.2 policy metadata to be enabled, set GITTUF_DEV=1 and GITTUF_ALLOW_V02_POLICY=1) ### Synopsis This command allows users to remove a person from the specified policy file. The person's ID is required. By default, the main policy file is selected. ``` gittuf policy remove-person [flags] ``` ### Options ``` -h, --help help for remove-person --person-ID string person ID --policy-name string name of policy file to remove person from (default "targets") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_remove-rule.md000066400000000000000000000015671475150141000222550ustar00rootroot00000000000000## gittuf policy remove-rule Remove rule from a policy file ``` gittuf policy remove-rule [flags] ``` ### Options ``` -h, --help help for remove-rule --policy-name string name of policy file to remove rule from (default "targets") --rule-name string name of rule ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_reorder-rules.md000066400000000000000000000022661475150141000226020ustar00rootroot00000000000000## gittuf policy reorder-rules Reorder rules in the specified policy file ### Synopsis This command allows users to reorder rules in the specified policy file. By default, the main policy file is selected. The rule names need to be passed as arguments, in the new order they must appear in, starting from the first to the last rule. Rule names may contain spaces, so they should be enclosed in quotes if necessary. ``` gittuf policy reorder-rules [flags] ``` ### Options ``` -h, --help help for reorder-rules --policy-name string name of policy file to reorder rules in (default "targets") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_sign.md000066400000000000000000000015741475150141000207510ustar00rootroot00000000000000## gittuf policy sign Sign policy file ### Synopsis This command allows users to add their signature to the specified policy file. ``` gittuf policy sign [flags] ``` ### Options ``` -h, --help help for sign --policy-name string name of policy file to sign (default "targets") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_tui.md000066400000000000000000000023541475150141000206070ustar00rootroot00000000000000## gittuf policy tui Start the TUI for managing policies ### Synopsis This command allows users to start a terminal-based interface to manage policies. The signing key specified will be used to sign all operations while in the TUI. Changes to the policy files in the TUI are staged immediately without further confirmation and users are required to run `gittuf policy apply` to commit the changes ``` gittuf policy tui [flags] ``` ### Options ``` -h, --help help for tui --policy-name string name of policy file to make changes to (default "targets") --target-ref string specify which policy ref should be inspected (default "policy") ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_policy_update-rule.md000066400000000000000000000027051475150141000222350ustar00rootroot00000000000000## gittuf policy update-rule Update an existing rule in a policy file ### Synopsis This command allows users to update an existing rule to the specified policy file. By default, the main policy file is selected. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::". ``` gittuf policy update-rule [flags] ``` ### Options ``` --authorize stringArray authorize the principal IDs for the rule -h, --help help for update-rule --policy-name string name of policy file to add rule to (default "targets") --rule-name string name of rule --rule-pattern stringArray patterns used to identify namespaces rule applies to --threshold int threshold of required valid signatures (default 1) ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign policy file --verbose enable verbose logging ``` ### SEE ALSO * [gittuf policy](gittuf_policy.md) - Tools to manage gittuf policies gittuf-0.9.0/docs/cli/gittuf_rsl.md000066400000000000000000000024321475150141000172240ustar00rootroot00000000000000## gittuf rsl Tools to manage the repository's reference state log ### Options ``` -h, --help help for rsl ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF * [gittuf rsl annotate](gittuf_rsl_annotate.md) - Annotate prior RSL entries * [gittuf rsl log](gittuf_rsl_log.md) - Display the repository's Reference State Log * [gittuf rsl propagate](gittuf_rsl_propagate.md) - Propagate contents of remote repositories into local repository (developer mode only, set GITTUF_DEV=1) * [gittuf rsl record](gittuf_rsl_record.md) - Record latest state of a Git reference in the RSL * [gittuf rsl remote](gittuf_rsl_remote.md) - Tools for managing remote RSLs * [gittuf rsl skip-rewritten](gittuf_rsl_skip-rewritten.md) - Creates an RSL annotation to skip RSL reference entries that point to commits that do not exist in the specified ref gittuf-0.9.0/docs/cli/gittuf_rsl_annotate.md000066400000000000000000000014151475150141000211150ustar00rootroot00000000000000## gittuf rsl annotate Annotate prior RSL entries ``` gittuf rsl annotate [flags] ``` ### Options ``` -h, --help help for annotate -m, --message string annotation message -s, --skip mark annotated entries as to be skipped ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log gittuf-0.9.0/docs/cli/gittuf_rsl_log.md000066400000000000000000000012311475150141000200610ustar00rootroot00000000000000## gittuf rsl log Display the repository's Reference State Log ``` gittuf rsl log [flags] ``` ### Options ``` -h, --help help for log ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log gittuf-0.9.0/docs/cli/gittuf_rsl_propagate.md000066400000000000000000000013461475150141000212710ustar00rootroot00000000000000## gittuf rsl propagate Propagate contents of remote repositories into local repository (developer mode only, set GITTUF_DEV=1) ``` gittuf rsl propagate [flags] ``` ### Options ``` -h, --help help for propagate ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log gittuf-0.9.0/docs/cli/gittuf_rsl_record.md000066400000000000000000000016611475150141000205650ustar00rootroot00000000000000## gittuf rsl record Record latest state of a Git reference in the RSL ``` gittuf rsl record [flags] ``` ### Options ``` --dst-ref string name of destination reference, if it differs from source reference -h, --help help for record --skip-duplicate-check skip check to see if latest entry for reference has same target --skip-propagation skip propagation workflow ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log gittuf-0.9.0/docs/cli/gittuf_rsl_remote.md000066400000000000000000000016161475150141000206020ustar00rootroot00000000000000## gittuf rsl remote Tools for managing remote RSLs ### Options ``` -h, --help help for remote ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log * [gittuf rsl remote pull](gittuf_rsl_remote_pull.md) - Pull RSL from the specified remote * [gittuf rsl remote push](gittuf_rsl_remote_push.md) - Push RSL to the specified remote * [gittuf rsl remote reconcile](gittuf_rsl_remote_reconcile.md) - Reconcile local RSL with remote RSL gittuf-0.9.0/docs/cli/gittuf_rsl_remote_pull.md000066400000000000000000000012411475150141000216300ustar00rootroot00000000000000## gittuf rsl remote pull Pull RSL from the specified remote ``` gittuf rsl remote pull [flags] ``` ### Options ``` -h, --help help for pull ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl remote](gittuf_rsl_remote.md) - Tools for managing remote RSLs gittuf-0.9.0/docs/cli/gittuf_rsl_remote_push.md000066400000000000000000000012371475150141000216400ustar00rootroot00000000000000## gittuf rsl remote push Push RSL to the specified remote ``` gittuf rsl remote push [flags] ``` ### Options ``` -h, --help help for push ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl remote](gittuf_rsl_remote.md) - Tools for managing remote RSLs gittuf-0.9.0/docs/cli/gittuf_rsl_remote_reconcile.md000066400000000000000000000023051475150141000226210ustar00rootroot00000000000000## gittuf rsl remote reconcile Reconcile local RSL with remote RSL ### Synopsis This command checks the local RSL against the specified remote and reconciles the local RSL if needed. If the local RSL doesn't exist or is strictly behind the remote RSL, then the local RSL is updated to match the remote RSL. If the local RSL is ahead of the remote RSL, nothing is updated. Finally, if the local and remote RSLs have diverged, then the local only RSL entries are reapplied over the latest entries in the remote if the local only RSL entries and remote only entries are for different Git references. ``` gittuf rsl remote reconcile [flags] ``` ### Options ``` -h, --help help for reconcile ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl remote](gittuf_rsl_remote.md) - Tools for managing remote RSLs gittuf-0.9.0/docs/cli/gittuf_rsl_skip-rewritten.md000066400000000000000000000014021475150141000222670ustar00rootroot00000000000000## gittuf rsl skip-rewritten Creates an RSL annotation to skip RSL reference entries that point to commits that do not exist in the specified ref ``` gittuf rsl skip-rewritten [flags] ``` ### Options ``` -h, --help help for skip-rewritten ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf rsl](gittuf_rsl.md) - Tools to manage the repository's reference state log gittuf-0.9.0/docs/cli/gittuf_trust.md000066400000000000000000000057441475150141000176160ustar00rootroot00000000000000## gittuf trust Tools for gittuf's root of trust ### Options ``` -h, --help help for trust -k, --signing-key string signing key to use to sign root of trust ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF * [gittuf trust add-github-app](gittuf_trust_add-github-app.md) - Add GitHub app to gittuf root of trust * [gittuf trust add-global-rule](gittuf_trust_add-global-rule.md) - Add a new global rule to root of trust (developer mode only, set GITTUF_DEV=1) * [gittuf trust add-policy-key](gittuf_trust_add-policy-key.md) - Add Policy key to gittuf root of trust * [gittuf trust add-propagation-directive](gittuf_trust_add-propagation-directive.md) - Add propagation directive into gittuf root of trust (developer mode only, set GITTUF_DEV=1) * [gittuf trust add-root-key](gittuf_trust_add-root-key.md) - Add Root key to gittuf root of trust * [gittuf trust apply](gittuf_trust_apply.md) - Validate and apply changes from policy-staging to policy * [gittuf trust disable-github-app-approvals](gittuf_trust_disable-github-app-approvals.md) - Mark GitHub app approvals as untrusted henceforth * [gittuf trust enable-github-app-approvals](gittuf_trust_enable-github-app-approvals.md) - Mark GitHub app approvals as trusted henceforth * [gittuf trust init](gittuf_trust_init.md) - Initialize gittuf root of trust for repository * [gittuf trust remote](gittuf_trust_remote.md) - Tools for managing remote policies * [gittuf trust remove-github-app](gittuf_trust_remove-github-app.md) - Remove GitHub app from gittuf root of trust * [gittuf trust remove-global-rule](gittuf_trust_remove-global-rule.md) - Remove a global rule from root of trust (developer mode only, set GITTUF_DEV=1) * [gittuf trust remove-policy-key](gittuf_trust_remove-policy-key.md) - Remove Policy key from gittuf root of trust * [gittuf trust remove-propagation-directive](gittuf_trust_remove-propagation-directive.md) - Remove propagation directive from gittuf root of trust (developer mode only, set GITTUF_DEV=1) * [gittuf trust remove-root-key](gittuf_trust_remove-root-key.md) - Remove Root key from gittuf root of trust * [gittuf trust set-repository-location](gittuf_trust_set-repository-location.md) - Set repository location * [gittuf trust sign](gittuf_trust_sign.md) - Sign root of trust * [gittuf trust update-policy-threshold](gittuf_trust_update-policy-threshold.md) - Update Policy threshold in the gittuf root of trust * [gittuf trust update-root-threshold](gittuf_trust_update-root-threshold.md) - Update Root threshold in the gittuf root of trust gittuf-0.9.0/docs/cli/gittuf_trust_add-github-app.md000066400000000000000000000022311475150141000224500ustar00rootroot00000000000000## gittuf trust add-github-app Add GitHub app to gittuf root of trust ### Synopsis This command allows users to add a trusted key for the special GitHub app role. This key is used to verify signatures on GitHub pull request approval attestations. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::". ``` gittuf trust add-github-app [flags] ``` ### Options ``` --app-key string app key to add to root of trust -h, --help help for add-github-app ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_add-global-rule.md000066400000000000000000000021421475150141000226160ustar00rootroot00000000000000## gittuf trust add-global-rule Add a new global rule to root of trust (developer mode only, set GITTUF_DEV=1) ``` gittuf trust add-global-rule [flags] ``` ### Options ``` -h, --help help for add-global-rule --rule-name string name of rule --rule-pattern stringArray patterns used to identify namespaces rule applies to --threshold int threshold of required valid signatures (default 1) --type string type of rule (threshold|block-force-pushes) ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_add-policy-key.md000066400000000000000000000021131475150141000224740ustar00rootroot00000000000000## gittuf trust add-policy-key Add Policy key to gittuf root of trust ### Synopsis This command allows users to add a new trusted key for the main policy file. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::". ``` gittuf trust add-policy-key [flags] ``` ### Options ``` -h, --help help for add-policy-key --policy-key string policy key to add to root of trust ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_add-propagation-directive.md000066400000000000000000000023521475150141000247130ustar00rootroot00000000000000## gittuf trust add-propagation-directive Add propagation directive into gittuf root of trust (developer mode only, set GITTUF_DEV=1) ``` gittuf trust add-propagation-directive [flags] ``` ### Options ``` --from-reference string reference to propagate from in upstream repository --from-repository string location of upstream repository -h, --help help for add-propagation-directive --into-path string path to propagate upstream contents into in downstream reference --into-reference string reference to propagate into in downstream repository --name string name of propagation directive ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_add-root-key.md000066400000000000000000000014641475150141000221700ustar00rootroot00000000000000## gittuf trust add-root-key Add Root key to gittuf root of trust ``` gittuf trust add-root-key [flags] ``` ### Options ``` -h, --help help for add-root-key --root-key string root key to add to root of trust ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_apply.md000066400000000000000000000013551475150141000210150ustar00rootroot00000000000000## gittuf trust apply Validate and apply changes from policy-staging to policy ``` gittuf trust apply [flags] ``` ### Options ``` -h, --help help for apply ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_disable-github-app-approvals.md000066400000000000000000000014531475150141000253350ustar00rootroot00000000000000## gittuf trust disable-github-app-approvals Mark GitHub app approvals as untrusted henceforth ``` gittuf trust disable-github-app-approvals [flags] ``` ### Options ``` -h, --help help for disable-github-app-approvals ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_enable-github-app-approvals.md000066400000000000000000000014461475150141000251620ustar00rootroot00000000000000## gittuf trust enable-github-app-approvals Mark GitHub app approvals as trusted henceforth ``` gittuf trust enable-github-app-approvals [flags] ``` ### Options ``` -h, --help help for enable-github-app-approvals ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_init.md000066400000000000000000000014341475150141000206310ustar00rootroot00000000000000## gittuf trust init Initialize gittuf root of trust for repository ``` gittuf trust init [flags] ``` ### Options ``` -h, --help help for init --location string location of repository ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_remote.md000066400000000000000000000015711475150141000211630ustar00rootroot00000000000000## gittuf trust remote Tools for managing remote policies ### Options ``` -h, --help help for remote ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust * [gittuf trust remote pull](gittuf_trust_remote_pull.md) - Pull policy from the specified remote * [gittuf trust remote push](gittuf_trust_remote_push.md) - Push policy to the specified remote gittuf-0.9.0/docs/cli/gittuf_trust_remote_pull.md000066400000000000000000000013761475150141000222220ustar00rootroot00000000000000## gittuf trust remote pull Pull policy from the specified remote ``` gittuf trust remote pull [flags] ``` ### Options ``` -h, --help help for pull ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust remote](gittuf_trust_remote.md) - Tools for managing remote policies gittuf-0.9.0/docs/cli/gittuf_trust_remote_push.md000066400000000000000000000013741475150141000222230ustar00rootroot00000000000000## gittuf trust remote push Push policy to the specified remote ``` gittuf trust remote push [flags] ``` ### Options ``` -h, --help help for push ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust remote](gittuf_trust_remote.md) - Tools for managing remote policies gittuf-0.9.0/docs/cli/gittuf_trust_remove-github-app.md000066400000000000000000000014041475150141000232160ustar00rootroot00000000000000## gittuf trust remove-github-app Remove GitHub app from gittuf root of trust ``` gittuf trust remove-github-app [flags] ``` ### Options ``` -h, --help help for remove-github-app ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_remove-global-rule.md000066400000000000000000000015371475150141000233720ustar00rootroot00000000000000## gittuf trust remove-global-rule Remove a global rule from root of trust (developer mode only, set GITTUF_DEV=1) ``` gittuf trust remove-global-rule [flags] ``` ### Options ``` -h, --help help for remove-global-rule --rule-name string name of rule ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_remove-policy-key.md000066400000000000000000000015451475150141000232510ustar00rootroot00000000000000## gittuf trust remove-policy-key Remove Policy key from gittuf root of trust ``` gittuf trust remove-policy-key [flags] ``` ### Options ``` -h, --help help for remove-policy-key --policy-key-ID string ID of Policy key to be removed from root of trust ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_remove-propagation-directive.md000066400000000000000000000016231475150141000254600ustar00rootroot00000000000000## gittuf trust remove-propagation-directive Remove propagation directive from gittuf root of trust (developer mode only, set GITTUF_DEV=1) ``` gittuf trust remove-propagation-directive [flags] ``` ### Options ``` -h, --help help for remove-propagation-directive --name string name of propagation directive ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_remove-root-key.md000066400000000000000000000015271475150141000227350ustar00rootroot00000000000000## gittuf trust remove-root-key Remove Root key from gittuf root of trust ``` gittuf trust remove-root-key [flags] ``` ### Options ``` -h, --help help for remove-root-key --root-key-ID string ID of Root key to be removed from root of trust ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_set-repository-location.md000066400000000000000000000014761475150141000245120ustar00rootroot00000000000000## gittuf trust set-repository-location Set repository location ``` gittuf trust set-repository-location [flags] ``` ### Options ``` -h, --help help for set-repository-location --location string location of repository ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_sign.md000066400000000000000000000014371475150141000206310ustar00rootroot00000000000000## gittuf trust sign Sign root of trust ### Synopsis This command allows users to add their signature to the root of trust file. ``` gittuf trust sign [flags] ``` ### Options ``` -h, --help help for sign ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_update-policy-threshold.md000066400000000000000000000017611475150141000244420ustar00rootroot00000000000000## gittuf trust update-policy-threshold Update Policy threshold in the gittuf root of trust ### Synopsis This command allows users to update the threshold of valid signatures required for the policy. ``` gittuf trust update-policy-threshold [flags] ``` ### Options ``` -h, --help help for update-policy-threshold --threshold int threshold of valid signatures required for main policy (default -1) ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_trust_update-root-threshold.md000066400000000000000000000017511475150141000241250ustar00rootroot00000000000000## gittuf trust update-root-threshold Update Root threshold in the gittuf root of trust ### Synopsis This command allows users to update the threshold of valid signatures required for the root of trust. ``` gittuf trust update-root-threshold [flags] ``` ### Options ``` -h, --help help for update-root-threshold --threshold int threshold of valid signatures required for root (default -1) ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") -k, --signing-key string signing key to use to sign root of trust --verbose enable verbose logging ``` ### SEE ALSO * [gittuf trust](gittuf_trust.md) - Tools for gittuf's root of trust gittuf-0.9.0/docs/cli/gittuf_verify-mergeable.md000066400000000000000000000016461475150141000216570ustar00rootroot00000000000000## gittuf verify-mergeable Tools for verifying mergeability using gittuf policies ``` gittuf verify-mergeable [flags] ``` ### Options ``` --base-branch string base branch for proposed merge --bypass-RSL bypass RSL when identifying current state of feature ref --feature-branch string feature branch for proposed merge -h, --help help for verify-mergeable ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF gittuf-0.9.0/docs/cli/gittuf_verify-ref.md000066400000000000000000000017221475150141000205030ustar00rootroot00000000000000## gittuf verify-ref Tools for verifying gittuf policies ``` gittuf verify-ref [flags] ``` ### Options ``` --from-entry string perform verification from specified RSL entry (developer mode only, set GITTUF_DEV=1) -h, --help help for verify-ref --latest-only perform verification against latest entry in the RSL --remote-ref-name string name of remote reference, if it differs from the local name ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF gittuf-0.9.0/docs/cli/gittuf_version.md000066400000000000000000000011731475150141000201120ustar00rootroot00000000000000## gittuf version Version of gittuf ``` gittuf version [flags] ``` ### Options ``` -h, --help help for version ``` ### Options inherited from parent commands ``` --no-color turn off colored output --profile enable CPU and memory profiling --profile-CPU-file string file to store CPU profile (default "cpu.prof") --profile-memory-file string file to store memory profile (default "memory.prof") --verbose enable verbose logging ``` ### SEE ALSO * [gittuf](gittuf.md) - A security layer for Git repositories, powered by TUF gittuf-0.9.0/docs/cli/main.go000066400000000000000000000011621475150141000157720ustar00rootroot00000000000000//go:generate go run . // SPDX-License-Identifier: Apache-2.0 package main import ( "fmt" "os" "github.com/gittuf/gittuf/internal/cmd/root" "github.com/spf13/cobra" "github.com/spf13/cobra/doc" ) var ( dir string cmd = &cobra.Command{ Use: "gendoc", Short: "Generate help docs", Args: cobra.NoArgs, RunE: func(*cobra.Command, []string) error { return doc.GenMarkdownTree(root.New(), dir) }, } ) func init() { cmd.Flags().StringVarP(&dir, "dir", "d", ".", "Path to directory in which to generate docs") } func main() { if err := cmd.Execute(); err != nil { fmt.Println(err) os.Exit(1) } } gittuf-0.9.0/docs/design-document.md000066400000000000000000001220771475150141000173700ustar00rootroot00000000000000# gittuf Design Document Last Modified: January 20, 2025 ## Introduction This document describes gittuf, a security layer for Git repositories. With gittuf, any developer who can pull from a Git repository can independently verify that the repository's security policies were followed. gittuf's policy, inspired by [The Update Framework (TUF)](https://theupdateframework.io/), handles key management for all trusted developers in a repository, allows for setting permissions for repository namespaces such as branches, tags, and files, and provides protections against [attacks targeting Git metadata](https://www.usenix.org/conference/usenixsecurity16/technical-sessions/presentation/torres-arias). At the same time, gittuf is backwards compatible with existing source control platforms ("forges") such as GitHub, GitLab, and Bitbucket. gittuf is currently a sandbox project at the [Open Source Security Foundation (OpenSSF)](https://openssf.org/) as part of the [Supply Chain Integrity Working Group](https://github.com/ossf/wg-supply-chain-integrity). The core concepts of gittuf described in this document have been [peer reviewed](https://ssl.engineering.nyu.edu/papers/yelgundhalli_gittuf_ndss_2025.pdf). This document is scoped to describing how gittuf's write access control policies are applied to Git repositories. Other additions to gittuf's featureset are described in standalone [gittuf Augmentation Proposals (GAPs)](/docs/gaps/). ## Definitions This document uses several terms or phrases in specific ways. These are defined here. ### Git References (Refs) and Objects A Git reference is a "simple name" that typically points to a particular Git commit. Generally, development in Git repositories are centered in one or more refs, and they're updated as commits are added to the ref under development. By default, Git defines two of refs: branches ("heads") and tags. Git allows for the creation of other arbitrary refs that users can store other information as long as they are formatted using Git's object types. Git employs a content addressed object store, with support for four types of objects. An essential Git object is the "commit", which is a self-contained representation of the whole repository. Each commit points to a "tree" object that represents the state of the files in the root of the repository at that commit. A tree object contains one or more entries that are either other tree objects (representing subdirectories) or "blob" objects (representing files). The final type of Git object is the "tag" object, used as a static pointer to another Git object. While a tag object can point to any other Git object, it is frequently used to point to a commit. ``` Repository | |-- refs | | | |-- heads | | |-- main (refers to commit C) | | |-- feature-x (refers to commit E) | | | |-- tags | | |-- v1.0 (refers to tag v1.0) | | | |-- arbitrary | |-- custom-ref (formatted as Git object type) | |-- objects |-- A [Initial commit] |-- B [Version 1.0 release] |-- C [More changes on main] |-- D [Initial commit on feature-x] |-- E [More changes on feature-x] |-- v1.0 [Tag object referring to commit B] ``` ### Actors and Authentication In a Git repository, an "actor" is any party, human or bot, who makes changes to the repository. These changes can involve any part of the repository, such as modifying files, branches or tags. In gittuf, each actor is identified by a unique signing key that they use to cryptographically sign their contributions. gittuf uses cryptographic signatures to authenticate actors as these signatures can be verified by anyone who has the corresponding public key, fundamental to gittuf's mission to enable independent verification of repository actions. Note that gittuf does not rely on Git commit metadata (e.g., author email, committer email) to identify the actor who created it, as that may be trivially spoofed. In practice, a gittuf policy allows an actor to make certain changes by granting trust to the actor's signing key to make those changes. To maintain security, all actions made in the repository, such as adding or modifying files, are checked for authenticity. This is done by verifying the digital signature attached to the action, which must match the trusted public key associated with the actor who is supposed to have made the change. ### State The term "state" refers to the latest values or conditions of the tracked references (like branches and tags) in a Git repository. These are determined by the most recent entries in the [reference state log](#reference-state-log-rsl). Note that when verifying changes in the repository, a workflow may only verify specific references rather than all state updates in the reference state log. ## Threat Model The following threat model is taken from the [peer reviewed publication](https://ssl.engineering.nyu.edu/papers/yelgundhalli_gittuf_ndss_2025.pdf) describing gittuf. We consider the standard scenario where a forge is used to manage a Git repository on a centralized synchronization point. This forge can be a publicly hosted solution (e.g., the github.com service), or self-hosted on premises by an enterprise. Either option exposes the forge instance to both external attackers and insider threats. External attackers may circumvent security measures and compromise the version control system, manifesting themselves as advanced persistent threats (APT) and making unnoticed changes to the system. Similarly, insider threats may be posed by rogue employees with escalated privileges who abuse their authority to make unnoticed changes. To protect the integrity of the repository’s contents, the maintainers of the repository define security controls such as which contributors can write to different parts of the repository. gittuf is meant to protect against scenarios where any party, individual developers, bots that make changes, or the forge itself, may be compromised and act in an arbitrarily malicious way as seen in prior incidents. This includes scenarios such as: * T1: Modifying configured repository security policies, such as to weaken them * T2: Tampering with the contents of the repository’s activity log, such as by reordering, dropping, or otherwise manipulating log entries * T3: Subverting the enforcement of security policies, such as by accepting invalid changes instead of rejecting them Note that we consider out of scope a freeze attack, where the forge serves stale data, as development workflows involve a substantial amount of out-of-band communication which prevents such attacks from going unnoticed. We similarly consider weaknesses in cryptographic algorithms as out of scope. ## gittuf Design gittuf records additional metadata describing the repository's policy and activity in the repository itself. Effectively, gittuf treats security policies, activity information, and policy decisions as a content tracking problem. To avoid collisions with regular repository contents, gittuf stores its metadata in custom references under `refs/gittuf/`. ### gittuf Policy Note: This section assumes some prior knowledge of the [TUF specification](https://theupdateframework.github.io/specification/). The repository's policy metadata handles the distribution of the repository's trusted keys (representing actors) as well as write access control rules. There are two types of metadata used by gittuf, which are stored in a custom reference `refs/gittuf/policy`. #### Root of Trust gittuf's policy metadata includes root of trust metadata, which establishes why the policy must be trusted. The root of trust metadata (similar to TUF's root metadata) declares the keys belonging to the repository owners as well as a numerical threshold that indicates the minimum number of signatures for the metadata to be considered valid. The root of trust metadata is signed by a threshold of root keys, and the initial set of root keys for a repository must be distributed using out-of-band mechanisms or rely on trust-on-first-use (TOFU). Subsequent changes to the set of root keys are handled in-band, with a new version of the root of trust metadata created. This new version must be signed by a threshold of root keys trusted in the previous version. #### Rule Files The rules protecting the repository's namespaces are declared in one or more rule files. A rule file is similar to TUF's targets metadata. It declares the public keys for the trusted actors, as well as namespaced "delegations" which specify protected namespaces within the repository and which actors are trusted to write to them. A threshold of trusted actors for any delegation (or rule) can extend this trust to other actors by signing a new rule file with the same name as the delegation. In this rule file, they can add the actors who must be trusted for the same (or a subset) of namespaces. All repositories must contain a primary rule file (typically called "targets.json" to match TUF's behavior). This rule file may contain no rules, signifying that no repository namespaces are protected. The primary rule file derives its trust directly from the root of trust metadata; it must be signed by a threshold of actors trusted to manage the repository's primary rule file. All other rule files derive their trust directly or indirectly from the primary rule file through delegations. ![Policy delegations](/docs/media/policy-delegations.png) _In this example, the repository administrator grants write permissions to Carol for the main branch, to Alice for the alice-dev branch, and to Bob for the /tests folder (under any of the existing branches)._ A significant difference between typical TUF metadata and those used by gittuf is in the expectations of the policies. Typical TUF deployments are explicit about the artifacts they are distributing. Any artifact not listed in TUF metadata is rejected. In gittuf, policies are written only to express _restrictions_. As such, when verifying changes to unprotected namespaces, gittuf must allow any key to sign for these changes. This means that after all explicit policies (expressed as delegations) are processed, and none apply to the namespace being verified, an implicit `allow-rule` is applied, allowing verification to succeed. #### Example gittuf Policy The following example is taken from the [peer reviewed publication](https://ssl.engineering.nyu.edu/papers/yelgundhalli_gittuf_ndss_2025.pdf) of gittuf's design. It shows a gittuf policy state with its root of trust and three distinct rule files connected using delegations. The root of trust declares the trusted signers for the next version of the root of trust as well as the primary rule file. Signatures are omitted. ``` rootOfTrust: keys: {R1, R2, R3, P1, P2, P3} signers: rootOfTrust: (2, {R1, R2, R3}) primary: (2, {P1, P2, P3}) ruleFile: primary keys: {Alice, Bob, Carol, Helen, Ilda} rules: protect-main-prod: {git:refs/heads/main, git:refs/heads/prod} -> (2, {Alice, Bob, Carol}) protect-ios-app: {file:ios/*} -> (1, {Alice}) protect-android-app: {file:android/*} -> (1, {Bob}) protect-core-libraries: {file:src/*} -> (2, {Carol, Helen, Ilda}) ruleFile: protect-ios-app keys: {Dana, George} rules: authorize-ios-team: {file:ios/*} -> (1, {Dana, George}) ruleFile: protect-android-app keys: {Eric, Frank} rules: authorize-android-team: {file:android/*} -> (1, {Eric, Frank}) ``` ### Tracking Repository Activity gittuf leverages a "Reference State Log (RSL)" to track changes to the repository's references. In addition, gittuf uses the [in-toto Attestation Framework](https://github.com/in-toto/attestation) to record other repository activity such as code review approvals. #### Reference State Log (RSL) Note: This document presents a summary of the RSL. For a full understanding of the attacks mitigated by the RSL, please refer to the [academic](https://www.usenix.org/system/files/conference/usenixsecurity16/sec16_paper_torres-arias.pdf) [papers](https://ssl.engineering.nyu.edu/papers/yelgundhalli_gittuf_ndss_2025.pdf) underpinning gittuf's design. The Reference State Log contains a series of entries that each describe some change to a Git ref. Such entries are known as RSL reference entries. Each entry contains the ref being updated, the new location it points to, and a hash of the parent RSL entry. The entry is signed by the actor making the change to the ref. Additionally, the RSL supports annotation entries that refer to prior reference entries. An annotation entry can be used to attach additional user-readable messages to prior RSL entries or to mark those entries as revoked. Given that each entry points to its parent entry using its hash, an RSL is a hash chain. gittuf's implementation of the RSL uses Git's underlying Merkle graph. Generally, gittuf is designed to ensure the RSL is linear but a privileged attacker may be able to cause the RSL to branch, resulting in a fork* attack where different actors are presented different versions of the RSL. The feasibility and implications of such an attack are discussed later in this document. The RSL is tracked at `refs/gittuf/reference-state-log`, and is implemented as a distinct commit graph. Each commit corresponds to one entry in the RSL, and standard Git signing mechanisms are employed for the actor's signature on the RSL entry. The latest entry is identified using the tip of the RSL Git ref. Note that the RSL and liveness of the repository in Git remove the need for some traditional TUF roles. As the RSL records changes to other Git refs in the repository, it incorporates TUF's [snapshot role](https://theupdateframework.github.io/specification/latest/#snapshot) properties. At present, gittuf does not include an equivalent to TUF's [timestamp role](https://theupdateframework.github.io/specification/latest/#timestamp) to guarantee the freshness of the RSL. This is because the timestamp role in the context of gittuf at most provides a non-repudiation guarantee for each claim of the RSL's tip. The use of an online timestamp does not guarantee that actors will receive the correct RSL tip. This may evolve in future versions of the gittuf design. ##### RSL Reference Entries These entries are the standard variety described above. They contain the name of the reference they apply to and a target ID. As such, they have the following structure. ``` RSL Entry ref: targetID: number: ``` The `targetID` is typically the ID of a commit for references that are branches. However, for entries that record the state of a Git tag, `targetID` is the ID of the annotated tag object. ##### RSL Annotation Entries Apart from regular entries, the RSL can include annotations that apply to prior RSL entries. Annotations can be used to add more information as a message about a prior entry, or to _explicitly_ mark one or more entries as ones to be skipped. This semantic is necessary when accidental or possibly malicious RSL entries are recorded. Since the RSL history cannot be overwritten, an annotation entry must be used to communicate to gittuf clients to skip the corresponding entries. Annotations have the following schema. ``` RSL Annotation entryID: entryID: ... skip: number: -----BEGIN MESSAGE----- ------END MESSAGE------ ``` ##### Example Entries Here's a sample RSL, with the output taken from `gittuf rsl log`: ``` entry a5ea2c6ee7e8b577f6be6fcee5b06e6cac7166fa (skipped) Ref: refs/heads/main Target: 6cb8e5c546eab3d0e1d245014de8003febb8e9b3 Number: 5 Annotation ID: cccfb6f27b2a71c33e9a55bc82f084e2445aa398 Skip: yes Number: 6 Message: Skipping RSL entry entry 40c82851f78c7018f4c360030a83923b0925c28d Ref: refs/gittuf/policy Target: b7cf91ec9b5b6b17334ab1378dc85375236524f5 Number: 4 entry 94c153bff6d684a956ed27f0abd70624e875657c Ref: refs/gittuf/policy-staging Target: b7cf91ec9b5b6b17334ab1378dc85375236524f5 Number: 3 entry fed977a5ca07e566af3a37808284dc7c5a67d6dc Ref: refs/gittuf/policy-staging Target: dcbb536bd86a69e555692aec7b65c20de8257ee2 Number: 2 entry e026a62f1c63c6db58bb357f9a85cafe05c64fb6 Ref: refs/gittuf/policy-staging Target: 603fc733218a0a1e54ccde47d1d9864f67e0bb75 Number: 1 ``` Specifically, the latest reference entry `a5ea2c6ee7e8b577f6be6fcee5b06e6cac7166fa` has been skipped by an annotation entry `cccfb6f27b2a71c33e9a55bc82f084e2445aa398`. The commit object for the reference entry is as follows: ```bash ~/tmp/repo $ git cat-file -p a5ea2c6ee7e8b577f6be6fcee5b06e6cac7166fa tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904 parent 40c82851f78c7018f4c360030a83923b0925c28d author Aditya Sirish A Yelgundhalli 1729514863 -0400 committer Aditya Sirish A Yelgundhalli 1729514863 -0400 gpgsig -----BEGIN SSH SIGNATURE----- U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAg8g2CmHSb7guzi6MUNgwHUQnxPN X1x8urScZyJrUB6MMAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5 AAAAQGQMSviwqF+cE/wgEo0U73vu86YHi4f5crzzFIctjyMGOOy2isYfHgGvSzs5bv6V2Q EtMumBSVbCxvnRqJpiFAs= -----END SSH SIGNATURE----- RSL Reference Entry ref: refs/heads/main targetID: 6cb8e5c546eab3d0e1d245014de8003febb8e9b3 number: 5 ``` Similarly, the commit object for the annotation entry is as follows: ```bash ~/tmp/repo $ git cat-file -p cccfb6f27b2a71c33e9a55bc82f084e2445aa398 tree 4b825dc642cb6eb9a060e54bf8d69288fbee4904 parent a5ea2c6ee7e8b577f6be6fcee5b06e6cac7166fa author Aditya Sirish A Yelgundhalli 1729514924 -0400 committer Aditya Sirish A Yelgundhalli 1729514924 -0400 gpgsig -----BEGIN SSH SIGNATURE----- U1NIU0lHAAAAAQAAADMAAAALc3NoLWVkMjU1MTkAAAAg8g2CmHSb7guzi6MUNgwHUQnxPN X1x8urScZyJrUB6MMAAAADZ2l0AAAAAAAAAAZzaGE1MTIAAABTAAAAC3NzaC1lZDI1NTE5 AAAAQNf32yJvhGfLIIeeStHgkSB7iuRGJl6LhbRTpX/q49lUu4TrEiCeGa3H8LMJ/5D1EE in3QAhlzdowYnmCKglTAw= -----END SSH SIGNATURE----- RSL Annotation Entry entryID: a5ea2c6ee7e8b577f6be6fcee5b06e6cac7166fa skip: true number: 6 -----BEGIN MESSAGE----- U2tpcHBpbmcgUlNMIGVudHJ5 -----END MESSAGE----- ``` #### Attestations for Authorization Records gittuf makes use of the signing capability provided by Git for commits and tags significantly. However, it is sometimes necessary to attach more than a single signature to a Git object or repository action. For example, a policy may require more than one developer to sign off and approve a change such as merging something to the `main` branch. To support these workflows (while also remaining compatible with standard Git clients), gittuf uses the concept of "detached authorizations", implemented using signed [in-toto attestations](https://github.com/in-toto/attestation). Attestations are tracked in the custom Git reference `refs/gittuf/attestations`. The gittuf design currently supports the "reference authorization" type to represent code review approvals. Other types may be added to this document or via [GAPs](/docs/gaps/) in future. A reference authorization is an attestation that accompanies an RSL reference entry, allowing additional developers to issue signatures authorizing the change to the Git reference in question. Its structure is similar to that of a reference entry: ``` TargetRef string FromTargetID string ToTargetID string ``` The `TargetRef` is the Git reference the authorization is for, while `FromTargetID` and `ToTargetID` record the change in the state of the reference authorized by the attestation (as Git hashes). The information pertaining to the prior state of the Git reference is explicitly recorded in the attestation unlike a standard RSL reference entry. This is because, for a reference entry, this information can be implicitly identified using the RSL by examining the previous entry for the reference in question. If the authorization is for a brand new reference (say a new branch or any tag), `FromTargetID` must be set to zero. For a change to a branch, `ToTargetID` pre-computes the Git merge tree resulting from the change being approved. Thus, when verifying the change to the branch, it must be followed by an RSL reference entry that points to a commit which has the same Git tree ID. For a tag, `ToTargetID` records the Git object the tag object is expected to point to. Reference authorizations are stored in a directory called `reference-authorizations` in the attestations namespace. Each authorization must have the in-toto predicate type: `https://gittuf.dev/reference-authorization/v`. ## gittuf Workflows gittuf introduces some new workflows that are gittuf-specific, such as the creation of policies and their verification. In addition, gittuf interposes in some Git workflows so as to capture repository activity information. ### Policy Initialization and Changes When the policy is initialized or updated (this can be a change to the root of trust metadata or one or more rule files), a new policy state is created that contains the full set of gittuf policy metadata. This is recorded as a commit in the custom ref used to track the policy metadata (typically `refs/gittuf/policy`). In turn, the commit to the custom ref is recorded in the RSL, indicating the policy state to use for subsequent changes in the repository. ### Syncing gittuf References As the RSL must be linear with no branches, gittuf employs a variation of the `Secure_Fetch` and `Secure_Push` workflows described in the [RSL academic paper](https://www.usenix.org/system/files/conference/usenixsecurity16/sec16_paper_torres-arias.pdf). ![Using gittuf with legacy servers](/docs/media/gittuf-with-legacy-servers.png) _Note that gittuf can be used even if the synchronization point is not gittuf-enabled. The repository can host the gittuf namespaces which other gittuf clients can pull from for verification. In this example, a gittuf client with a changeset to commit to the dev branch (step 1), creates in its local repository a new commit object and the associated RSL entry (step 2). These changes are pushed next to a remote Git repository (step 3), from where other gittuf or legacy Git clients pull the changes (step 4)._ #### `RSLFetch`: Receiving Remote RSL Changes Before local RSL changes can be made or pushed, it is necessary to verify that they are compatible with the remote RSL state. If the remote RSL has entries that are unavailable locally, entries made locally will be rejected by the remote. For example, let the local RSL tip be entry A and the new entry be entry C. If the remote has entry B after A with B being the tip, attempting to push C which also comes right after A will fail. Instead, the local RSL must first fetch entry B and then create entry C. This is because entries in the RSL must be made serially. As each entry includes the ID of the previous entry, a local entry that does not incorporate the latest RSL entry on the remote is invalid. The workflow is as follows: 1. Fetch remote RSL to the local remote tracker `refs/remotes/origin/gittuf/reference-state-log`. 1. If the last entry in the remote RSL is the same as the local RSL, terminate successfully. 1. Perform the verification workflow for the new entries in the remote RSL, incorporating remote changes to the local policy namespace. The verification workflow is performed for each Git reference in the new entries, relative to the local state of each reference. If verification fails, abort and warn user. Note that the verification workflow must fetch each Git reference to its corresponding remote tracker, `refs/remotes/origin/`. 1. For each modified Git reference, update the local state. As all the refs have been successfully verified, each ref's remote state can be applied to the local repository, so `refs/heads/` matches `refs/remotes/origin/`. 1. Set local RSL to the remote RSL's tip. NOTE: Some aspects of this workflow are under discussion and are subject to change. The gittuf implementation does not implement precisely this workflow. Specifically, the implementation does not verify new entries in the remote automatically. Additionally, the RSL may contain entries for references a client does not have, making verification of those entries unfeasible. See https://github.com/gittuf/gittuf/issues/708. #### `RSLPush`: Submitting Local RSL Changes 1. Execute `RSLFetch` repeatedly until there are no new RSL entries in the remote RSL. Every time there is a remote update, the user must be prompted to fetch and re-apply their changes to the RSL. This process could be automated but user intervention may be needed to resolve conflicts in the refs they modified. Changes to the gittuf policy must be fetched and applied locally. 1. Verify the validity of the RSL entries being submitted using locally available gittuf policies to ensure the user is authorized for the changes. If verification fails, abort and warn user. 1. Perform an atomic Git push to the remote of the RSL as well as the modified Git references. If the push fails, it is likely because another actor pushed their changes first. Restart the `RSLPush` workflow. NOTE: Some aspects of this workflow are under discussion and are subject to change. The gittuf implementation does not implement precisely this workflow. This workflow is closely related to other push operations performed in the repository, and therefore, this section may be incorporated with other workflows. See https://github.com/gittuf/gittuf/issues/708. ### Regular Pushes When an actor pushes a change to a remote repository, this update to the corresponding ref (or refs) must be recorded in the RSL. For each ref being pushed, the gittuf client creates a new RSL entry. Then, `RSLPush` is used to submit these changes to the remote repository. ### Force Pushes Due to the linear nature of the RSL, it is not possible to remove a reference entry. A force push makes one or more prior reference entries for the pushed ref invalid as the targets recorded in those entries may not be reachable any longer. Thus, these entries must be marked as "skipped" in the RSL using an annotation entry. After an annotation for these reference entries is created, a reference entry is created recording the current state of the ref. Then, `RSLPush` is used to submit these changes to the remote repository. ### Verification Workflow There are several aspects to verification. First, the right policy state must be identified by walking back RSL entries to find the last change to that namespace. Next, authorized keys must be identified to verify that commit or RSL entry signatures are valid. #### Identifying Authorized Signers for Protected Namespaces When verifying a commit or RSL entry, the first step is identifying the set of keys authorized to sign a commit or RSL entry in their respective namespaces. This is achieved by performing pre-ordered depth first search over the delegations graph in a gittuf policy state. Assume the relevant policy state entry is `P` and the namespace being checked is `N`. Then: 1. Validate `P`'s root metadata using the TUF workflow starting from the initial root of trust metadata, ignore expiration date checks (see https://github.com/gittuf/gittuf/issues/280). 1. Create empty set `K` to record authorized verifiers for `N`. 1. Create empty set `queue` to track the rules (or delegations) that must be checked. 1. Begin traversing the delegations graph rooted at the primary rule file metadata. 1. Verify the signatures of the primary rule file using the trusted keys in the root of trust. If a threshold of signatures cannot be verified, abort. 1. Populate `queue` with the rules in the primary rule file. 1. While `queue` is not empty: 1. Set `rule` to the first item in `queue`, removing it from `queue`. 1. If `rule` is the `allow-rule`: 1. Proceed to the next iteration. 1. If the patterns of `rule` match `N` (i.e., the rule applies to the namespace being verified): 1. Create a verifier with the trusted keys in `rule` and the specified threshold. 1. Add this verifier to `K`. 1. If `P` contains a rule file with the same name as `rule` (i.e., a delegated rule file exists): 1. Verify that the delegated rule file is signed by a threshold of valid signatures using the keys declared in delegating rule file. Abort if verification fails. 1. Add the rules in `current` to the front of `queue` (ensuring the delegated rules are prioritized to match pre-order depth first search behavior). 1. Return `K`. #### Verifying Changes Made In gittuf, verifying the validity of changes is _relative_. Verification of a new state depends on comparing it against some prior, verified state. For some ref `X` that is currently at verified entry `S` in the RSL and its latest available state entry is `D`: 1. Fetch all changes made to `X` between the commit recorded in `S` and that recorded in `D`, including the latest commit into a temporary branch. 1. Walk back from `S` until an RSL entry `P` is found that updated the gittuf policy namespace. This identifies the policy that was active for changes made immediately after `S`. If a policy entry is not found, abort. 1. Walk back from `S` until an RSL entry `A` is found that updated the gittuf attestations ref. This identifies the set of attestations applicable for the changes made immediately after `S`. 1. Validate `P`'s metadata using the TUF workflow, ignore expiration date checks (see https://github.com/gittuf/gittuf/issues/280). 1. Walk back from `D` until `S` and create an ordered list of all RSL updates that targeted either `X` or gittuf namespaces. Entries pertaining to other refs MAY be ignored. Annotation entries MUST be recorded. 1. The verification workflow has an ordered list of states `[I1, I2, ..., In, D]` that are to be verified. 1. Set trusted set for `X` to `S`. 1. For each set of consecutive states starting with `(S, I1)` to `(In, D)`: 1. Check if an annotation exists for the second state. If it does, verify if the annotation indicates the state is to be skipped. It true, proceed to the next set of consecutive states. 1. If second state changes gittuf policy: 1. Validate new policy metadata using the TUF workflow and `P`'s contents to established authorized signers for new policy. Ignore expiration date checks (see https://github.com/gittuf/gittuf/issues/280). If verification passes, update `P` to new policy state. 1. If second state is for attestations: 1. Set `A` to the new attestations state. 1. Verify the second state entry was signed by an authorized key as defined in `P` for the ref `X`. If the gittuf policy requires more than one signature, search for a reference authorization attestation for the same change. Verify the signatures on the attestation are issued by authorized keys to meet the threshold, ignoring any signatures from the same key as the one used to sign the entry. 1. If `P` contains rules protecting files in the repository: 1. Enumerate all commits between that recorded in trusted state and the second state with the signing key used for each commit. 1. Identify the net or combined set of files modified between the commits in the first and second states as `F`. 1. If all commits are signed by the same key, individual commits need not be validated. Instead, `F` can be used directly. For each path: 1. Find the set of keys authorized to make changes to the path in `P`. 1. Verify key used is in authorized set. If not, terminate verification workflow with an error. 1. If not, iterate over each commit. For each commit: 1. Identify the file paths modified by the commit. For each path: 1. Find the set of keys authorized to make changes to the path in `P`. 1. Verify key used is in authorized set. If not, check if path is present in `F`, as an unauthorized change may have been corrected subsequently. This merely acts as a hint as path may have been also changed subsequently by an authorized user, meaning it is in `F`. If path is not in `F`, continue with verification. Else, request user input, indicating potential policy violation. 1. Set trusted state for `X` to second state of current iteration. 1. Return indicating successful verification. NOTE: Some aspects of this workflow are under discussion and are subject to change. The gittuf implementation does not implement precisely this workflow, instead also including aspects of the recovery workflow to see if a change that fails verification has already been recovered from. See https://github.com/gittuf/gittuf/issues/708. ### Recovery If every user were using gittuf and were performing each operation by generating all of the correct metadata, following the specification, etc., then the procedure for handling each situation is fairly straightforward. However, an important property of gittuf is to ensure that a malicious or erroneous party cannot make changes that impact the state of the repository in a negative manner. To address this, this section discusses how to handle situations where something has not gone according to protocol. The goal is to recover to a "known good" situation which does match the metadata which a set of valid gittuf clients would generate. #### Recovery Mechanisms When gittuf verification fails, the following recovery workflow must be employed. This mechanism is utilized in scenarios where some change is rejected. For example, one or more commits may have been pushed to a branch that do not meet gittuf policy. The repository is updated such that these commits are neutralized and all Git refs match their latest RSL entries. This can take two forms: 1. The rejected commit is removed and the state of the repo is set to the prior commit which is known to be good. This is used when all rejected commits are together at the end of the commit graph, making it easy to remove all of them. 2. The rejected commit is _reverted_ where a new commit is introduced that reverses all the changes made in the reverted commit. This is needed when "good" commits that must be retained are interspersed with "bad" commits that must be rejected. In both cases, new RSL entries and annotations must be used to record the incident and skip the invalid RSL entries corresponding to the rejected changes. gittuf, by default, prefers the second option, with an explicit revert commit that is tree-same as the last good commit. This ensures that a client can always fast-forward to a fix rather than rewind. By resetting the affected branch to a prior good commit, Git clients that have already pulled in the invalid commit will not reset as well. Instead, they will assume they are ahead of the remote in question and will continue to use the bad commit as the latest commit. When the gittuf verification workflow encounters an RSL entry for some Git reference that does not meet policy, it looks to see if a subsequent entry for the same reference contains a fix that aligns with the last known good state. Any intermediate entries between the original invalid entry and the fix for the reference in question are also considered to be invalid. Therefore, in addition to the fix RSL entry, gittuf also expects skip annotations for the original invalid entry and intermediate entries for the reference. #### Recovery Scenarios These scenarios are some examples where recovery is necessary. This is not meant to be an exhaustive set of gittuf's recovery scenarios. ##### An Incorrect RSL Entry is Added There are several ways in which an RSL entry can be considered "incorrect". If an entry is malformed (structurally), Git may catch it if it's not a valid commit. In such instances, the push from a buggy client is rejected altogether, meaning other users are not exposed to the malformed commit. Invalid entries that are not rejected by Git must be caught by gittuf. Some examples of such invalid entries are: * RSL entry is for a non-existing Git reference * Commit recorded in RSL entry does not exist * Commit recorded in RSL entry does not match the tip of the corresponding Git reference * RSL annotation contains references to RSL entries that do not exist or are not RSL entries (i.e. the annotation points to other commits in the repository) Note that as invalid RSL entries are only created by buggy or malicious gittuf clients, these entries cannot be detected prior to them being pushed to the synchronization point. As correctly implemented gittuf clients verify the validity of RSL entries when they pull from the synchronization point, the user is warned if invalid entries are encountered. Then, the user can then use the recovery workflow to invalidate the incorrect entry. Other clients with the invalid entry only need to fetch the latest RSL entries to recover. Additionally, the client that created the invalid entries must switch to a correct implementation of gittuf before further interactions with the main repository, but this is left to out-of-band synchronization between the actors who notice the issue and the actor using a buggy client. ##### A gittuf Access Control Policy is Violated An actor, Bob, creates an RSL entry for a branch he's not authorized for by gittuf policy. He pushes a change to that branch. Another actor, Alice, notices this when her gittuf client indicates a failure in the verification workflow. Alice creates an RSL annotation marking Bob's entry as one to be skipped. Alice also reverses Bob's change, creating a new RSL entry reflecting that. ##### Attacker Modifies or Deletes Historical RSL Entry Overwriting or deleting an historical RSL entry is a complicated proposition. Git's content addressable properties mean that a SHA-1 collision is necessary to overwrite an existing RSL entry in the Git object store. Further, the attacker also needs more than push access to the repository as Git will not accept an object it already has in its store. Similarly, deleting an entry from the object store preserves the RSL structure cosmetically but verification workflows that require the entry will fail. This ensures that such an attack is detected, at which point the owners of the repository can restore the RSL state from their local copies. Also note that while Git uses SHA-1 for its object store, cryptographic signatures are generated and verified using stronger hash algorithms. Therefore, a successful SHA-1 collision for an RSL entry will not go undetected as all entries are signed. ##### Forge Attempts Fork* Attacks An attacker who controls the forge may attempt a fork* attack where different developers receive different RSL states. For example, the attacker may drop a push from an actor, Alice, from the RSL. Other developers such as Bob and Carol would continue adding their RSL entries, unaware of the dropped entry. However, Alice will observe the divergence in the RSL as she cannot receive Bob's and Carol's changes. The attacker cannot simply reapply Bob's and Carol's changes over Alice's RSL entry without also controlling Bob's and Carol's keys. The attacker may attempt a freeze attack targeted against Alice, where she's always told her entry is the latest in the RSL. However, any out-of-band communication between Alice and either Bob or Carol (common during development workflows) will highlight the attack. ##### An Authorized Key is Compromised When a key authorized by gittuf policy is compromised, it must be revoked and rotated so that an attacker cannot use it to sign repository objects. gittuf policies that grant permissions to the key must be updated to revoke the key, possibly adding the actor's new key in the process. Further, if a security analysis shows that the key was used to make malicious changes, those changes must be reverted and the corresponding RSL entries signed with the compromised key must be skipped. This ensures that gittuf clients do not consider attacker created RSL entries as valid states for the corresponding Git references. Clients that have an older RSL from before the attack can skip past the malicious entries altogether. ## Example of Using gittuf Consider project `foo`'s Git repository maintained by Alice and Bob. Alice and Bob are the only actors authorized to update the state of the main branch. This is accomplished by defining a TUF delegation to Alice and Bob's keys for the namespace corresponding to the main branch. All changes to the main branch's state MUST have a corresponding entry in the repository's RSL signed by either Alice or Bob. Further, `foo` has another contributor, Clara, who does not have maintainer privileges. This means that Clara is free to make changes to other Git branches but only Alice or Bob may merge Clara's changes from other unprotected branches into the main branch. Over time, `foo` grows to incorporate several subprojects with other contributors Dave and Ella. Alice and Bob take the decision to reorganize the repository into a monorepo containing two projects, `bar` and `baz`. Clara and Dave work exclusively on bar and Ella works on baz with Bob. In this situation, Alice and Bob retain their privileges to merge changes to the main branch. Further, they set up delegations for each subproject's path within the repository. Clara and Dave are only authorized to work on files within `bar/*` and Ella is restricted to `baz/*`. As Bob is a maintainer of foo, he is not restricted to working only on `baz/*`. gittuf-0.9.0/docs/dogfood.md000066400000000000000000000045351475150141000157220ustar00rootroot00000000000000# Dogfooding gittuf Last Modified: April 24, 2024 As noted in gittuf's [roadmap](/docs/roadmap.md), we want to use gittuf to secure the development of gittuf itself. Note that when we are dogfooding gittuf, we do not expect the policy to remain consistent over time, especially as gittuf itself may have breaking changes in the coming months. After gittuf reaches v1, we expect to reset the policy and start over with a formal root signing. We envision dogfooding to happen in several phases. ## Phase 1 At this stage, we will rely on automation to create and sign RSL entries on behalf of the gittuf maintainers. While this is quite a bit less secure than signatures issued directly by the maintainers, we believe this serves as a starting point for us to feel gittuf's pain points ourselves. In addition to signing RSL entries using sigstore online, we will be recording a GitHub attestation of each pull request merged into the main branch. This will serve as an auditable paper trail to inspect using gittuf in future. ## Phase 2 With command compatibility and improved usability of the gittuf tool, we will begin transitioning to at least some RSL entries being issued by local keys held by maintainers. This may also be accompanied by the development of helper tools such as a gittuf merge bot that can verify whose signatures / approvals are still needed in a pull request and present them with the commands to run to meet those requirements. ## Phase 3 Finally, as gittuf nears v1, we expect to transition more seamlessly to primarily offline signatures. This can, as before, only be achieved with further usability improvements. In this final phase, we hope to essentially have worked out the kinks with using gittuf actively so that we can proceed with a stable release. ## Verifying gittuf using gittuf To install gittuf, please refer to our [get started guide]. First, clone the repository and fetch the gittuf specific metadata. ```bash gittuf clone https://github.com/gittuf/gittuf ``` Alternatively, you can use Git as follows. ```bash git clone https://github.com/gittuf/gittuf cd gittuf git fetch origin refs/gittuf/*:refs/gittuf/* ``` Next, the latest release of gittuf as well as changes to the `main` branch can be verified using gittuf. ```bash gittuf verify-ref --verbose v0.4.0 gittuf verify-ref --verbose main ``` [get started guide]: /docs/get-started.md gittuf-0.9.0/docs/gaps/000077500000000000000000000000001475150141000147025ustar00rootroot00000000000000gittuf-0.9.0/docs/gaps/1/000077500000000000000000000000001475150141000150425ustar00rootroot00000000000000gittuf-0.9.0/docs/gaps/1/README.md000066400000000000000000000160541475150141000163270ustar00rootroot00000000000000# Providing SHA-256 Identifiers Alongside Existing SHA-1 Identifiers ## Metadata * **Number:** 1 * **Title:** Providing SHA-256 Identifiers Alongside Existing SHA-1 Identifiers * **Implemented:** No * **Withdrawn/Rejected:** No * **Sponsors:** Aditya Sirish A Yelgundhalli (adityasaky) * **Last Modified:** January 20, 2025 ## Abstract Git stores all its objects in a content addressed store located under `.git/objects`. The content address for each object is calculated using SHA-1. Due to known weaknesses with SHA-1, this GAP explores using gittuf to provide cryptoagility for the hash algorithm used in Git, whether SHA-256 or other algorithms that may be desirable to adopt in future. ## Specification Git stores all its objects in a content addressed store located under `.git/objects`. This directory contains subdirectories that act as an index to the hashes themselves. For example, the Git object for commit `4dcd174e182cedf597b8a84f24ea5a53dae7e1e7` is stored as `.git/objects/4d/cd174e182cedf597b8a84f24ea5a53dae7e1e7`. The hash is calculated across the corresponding object prior to compressing it, and it can be recalculated as follows: ``` cat .git/objects/4d/cd174e182cedf597b8a84f24ea5a53dae7e1e7 | zlib-flate -uncompress | sha1sum 4dcd174e182cedf597b8a84f24ea5a53dae7e1e7 - ``` There are several types of Git objects: commits, blobs, trees, and tags. Commits record changes made in the repository. Blobs are files in the repository while trees map to the directory structure of the repository. Trees contain a record of blobs and subtrees. Git commits store a record of their one or more parent commits (creating a Merkle DAG). Each commit also points to the specific tree object that represents the root of the repository. ``` git cat-file -p db1c7b0210513a452b0b971e1912d5eb2e3ffcd0 tree 7b968da28453b323a0d3333e3be4030b870d26e4 parent 4dcd174e182cedf597b8a84f24ea5a53dae7e1e7 ... ``` Finally, tag objects serve as static pointers to other Git objects (frequently commits). As with Git commits and trees, the tag object also identifies the target Git object using its identifier. This GAP proposes recomputing SHA-256 identifiers for every object in the repository. In this method, gittuf would maintain a SHA-1 to SHA-256 mapping for every object in Git's content addressed store. This mapping can be a simple key value dictionary. When gittuf is invoked to calculate new identifiers, say when creating a new commit, it must use Git's default semantics to create the object with SHA-1 identifiers. For each new object created, it must replace SHA-1 identifiers with their SHA-256 equivalents, calculating them recursively if necessary, and then finally calculate the SHA-256 hash. For every new object encountered, a SHA-1 to SHA-256 entry must be added to the key value record. Note that in this method, the new objects are not written to `.git/objects`. Instead, the objects continue to be stored with their SHA-1 identifiers. The only change is the addition of the file with the key value mapping. TODO: Should a parallel set of objects be maintained with SHA-256 identifiers that are symbolic links to their SHA-1 counterparts? This will probably not play well with Git's packfiles while only maintaining a separate mapping will. TODO: How much extra space does it take to store both versions of the objects? TODO: How must this support arbitrary hash algorithms, beyond SHA-256? TODO: How must the mapping of SHA-1 to SHA-256 (or other) hashes be stored and synchronized in gittuf workflows? ### Impact on Commit / Tag signing By default, Git signs commits using a SHA-256 representation of the commit objects. However, these commit objects contain SHA-1 references. A collision of the tree object referenced in the commit wouldn't be caught. As such, the verification workflow for a commit must also validate that the objects referenced by SHA-1 hashes also have the correct SHA-256 hashes. After they are validated, the signature can be verified using the relevant public key to check the identify of the committer. TODO: Verification of SHA-256 hashes requires that the object be present as well. How does this work when fetching new objects? Only a malicious object that has a SHA-1 collision may be presented, meaning we don't have a reference of the correct SHA-256 hash. ## Motivation By default, Git uses the SHA-1 hash algorithm to calculate unique identifiers. Due to known weaknesses with SHA-1, the Git community has proposed moving to SHA-256. There is experimental support for SHA-256 identifiers, but: 1. repositories can't currently use both SHA-1 and SHA-256 identifiers, so converting existing repositories means the loss of development history. 1. most Git servers or forges don't support SHA-256 identifiers. Since gittuf maintains a separate set of metadata about the Git objects in a repository, it can also provide a mapping to SHA-256 identifiers. This requires gittuf to maintain a SHA-256 reference to every SHA-1 identifier that exists in a repository. ## Reasoning ### Forward Compatibility with Git's SHA-256 Support One reason for recomputing SHA-256 hashes for all objects is forward compatibility. As noted before, Git includes experimental support for SHA-256. Here, a repository must be initialized with the object format set to SHA-256. From then on, all object identifiers are calculated using SHA-256 and stored in `.git/objects`. The same data structures are maintained, except all SHA-1 identifiers are replaced with SHA-256 identifiers. This is similar to the technique described here, meaning that SHA-256 identifiers calculated by gittuf are the same as Git's SHA-256 identifiers. This will play well with any transition techniques provided by Git for SHA-1 repositories to SHA-256. ### Alternate Solution A simpler solution is to calculate the SHA-256 hash of commit objects, rather than recompute hashes for all objects. This would look similar to: ``` cat .git/objects/4d/cd174e182cedf597b8a84f24ea5a53dae7e1e7 | zlib-flate -uncompress | sha256sum c9262d30f2dd6e50088acbfb67fa49bb3e80c30e57779551727bc89fcf02e21b - ``` However, if a SHA-1 collision is successfully performed within the repository, this technique has some blind spots. A collision with a commit object will be detected as two distinct commit objects may collide in SHA-1 but overwhelmingly won't in SHA-256. However, a collision in the tree object is more dangerous. In this situation, the commit object can remain the same but point to a malicious version of the tree. The SHA-256 identifier will not detect this change. ## Backwards Compatibility This GAP does not impact the backwards compatibility of gittuf as it only suggests recording additional information in gittuf metadata. ## Security TODO: A detailed security analysis is necessary before this GAP can be implemented. ## Prototype Implementation A prototype implementation was proposed in https://github.com/gittuf/gittuf/pull/105. ## Changelog * January 20th, 2025: moved from `/docs/extensions` to `/docs/gaps` as GAP-1 ## References * [Git Hash Function Transition](https://git-scm.com/docs/hash-function-transition/2.48.0) * [SHAttered](https://shattered.io/) gittuf-0.9.0/docs/gaps/2/000077500000000000000000000000001475150141000150435ustar00rootroot00000000000000gittuf-0.9.0/docs/gaps/2/README.md000066400000000000000000000245061475150141000163310ustar00rootroot00000000000000# gittuf on the Forge ## Metadata * **Number:** 2 * **Title:** gittuf on the Forge * **Implemented:** No * **Withdrawn/Rejected:** No * **Sponsors:** Aditya Sirish A Yelgundhalli (adityasaky) * **Last Modified:** January 20, 2025 ## Abstract gittuf can be deployed at the forge that all developers push changes to. This instance of gittuf must behave differently from regular gittuf clients because multiple developers may be pushing and fetching from the forge at any given point in time. This GAP explores several configurations for how a gittuf-aware forge might behave. ## Specification TODO: The specification for this GAP depends on the configurations selected by the community. Eventually, this GAP may be split into multiple GAPs with each one handling a configuration that provides a subset of the desired properties for gittuf on the forge. ## Motivation There are several motivating factors to consider supporting gittuf on the forge: * **Ease of deployment:** in some threat models, it may be acceptable to trust the forge to record gittuf metadata which can be used to keep the forge honest. In turn, these deployments are easier as lesser client-side tooling needs to be installed and updated. * **High traffic repositories:** for repositories with a high volume of pushes, client-side RSL entry creation may be impractical. * **Reject rather than recover:** in some deployments, especially with a mix of gittuf-enabled and Git-only clients, it may be preferable to have the forge reject bad changes rather than recover post facto to avoid serving these unauthorized changes to Git-only clients in the interim period before a gittuf client can initiate the recovery workflow. * **Standardized Git security protocol:** a subset of gittuf's features can be adopted as the standardized protocol for how forge security policies are configured and enforced, thus enabling cross-forge validation of a repository's historic security decisions. ## Reasoning There are several aspects that must be considered in integrating gittuf with a forge. These are enumerated here with a description of the default configuration in gittuf: * RSL Entry Creation: in the standard gittuf model, all RSL entries are created and signed by clients when they push their changes. Thus, every push can be authenticated using the signature on the RSL entry, and the synchronization point is not responsible for ordering pushes in the repository meaning it cannot reorder or drop pushes. * Verification: in the standard gittuf model, every gittuf client performs verification when it receives changes from the synchronization point. Typically, this means that a change that fails verification must be fixed after the fact. * Git Reference Updates: in the standard gittuf model, a gittuf client pushes directly to the references on the synchronization point the user wishes to update along with corresponding RSL entries. The RSL entries are submitted to the synchronization point's RSL directly after the client fetches the latest state of the RSL to ensure the new entries are added at the very end of the RSL. The gittuf client makes this push atomically, meaning either all references are updated or none are updated, ### Configuration A **Summary:** Clients create RSL entries, forge performs pre-receive verification, users update references directly. In this configuration, users push directly to the Git references (e.g., the branch they update and the RSL with a corresponding entry) and the forge is integrated to perform gittuf verification at the pre-receive phase of a push. **Pros:** * The forge can reject pushes that fail verification, offering better protections to Git-only clients. **Cons:** * The forge can carry out denial of service attacks that may or may not be immediately obvious to the pushing actor. * Client-side RSL entry creation can be a bottleneck for high traffic repositories. TODO: Should the pushing gittuf client be investigated for submitting something that fails verification? ### Configuration B **Summary:** Clients create RSL entries, forge performs post-receive verification, users update references directly. In this configuration, users push directly to the Git references (e.g., the branch they update and the RSL with a corresponding entry) and the forge is integrated to perform gittuf verification at the post-receive phase of a push. **Pros:** * The forge cannot carry out denial of service attacks beyond the freeze attacks it can already perform. * This configuration can be implemented in popular forges using existing features (e.g., GitHub Actions). **Cons:** * The forge cannot prevent unauthorized changes from being pushed, requiring the recovery workflow to be executed by a gittuf client after the fact. * Client-side RSL entry creation can be a bottleneck for high traffic repositories. TODO: Should the pushing gittuf client be investigated for submitting something that fails verification? TODO: Explore making forge capable of carrying out recovery workflow. This needs to account for race conditions with verification / recovery in high traffic repositories. ### Configuration C **Summary:** Forge creates pre-receive RSL entries, forge performs pre-receive verification, users update references directly. In this configuration, users push directly to the Git references (e.g., the branch they update) **without** a corresponding RSL entry. The forge performs verification at the pre-receive phase (optionally by creating a provisional RSL entry) and rejects pushes that fail verification. If the verification passes, the forge makes the change available along with an RSL entry signed by it (if a provisional RSL entry was created, this can be adopted as the final RSL entry). **Pros:** * The forge can reject pushes that fail verification, offering better protections to Git-only clients. * Deployments are simpler as client-side tooling requires fewer changes. **Cons:** * The forge is trusted far more than in the standard gittuf model, as it can reorder or drop RSL entries (drops may be prevented by local "receipts", potentially). * With only an RSL entry for the push, there is no way to authenticate the pushing user. If this is attested to by the forge, the forge must be trusted not to lie. * A malicious forge can carry out a denial of service attack by falsely claiming verification failed. While more trust is placed in the forge (approaching cases where the forge is trusted solely to enforce security controls), this configuration still requires the forge to explicitly record its decisions in the repository in a manner that any gittuf client can verify the forge's honesty. TODO: Must the forge attest to how it authenticated a user? TODO: Can clients record some local-only "receipt" of a push that they validate is in the RSL next time? TODO: Can the forge still order pushes to handle high-traffic cases? Is a staging area necessary? ### Configuration D **Summary:** Forge creates post-receive RSL entries, forge performs post-receive verification, users update references directly. In this configuration, users push directly to the Git references (e.g., the branch they update) **without** a corresponding RSL entry. The forge creates an RSL entry in the post-receive phase and then performs verification. **Pros:** * The forge cannot carry out denial of service attacks beyond the freeze attacks it can already perform. * Deployments are simpler as client-side tooling requires fewer changes. * This configuration can be implemented in popular forges using existing features (e.g., GitHub Actions). **Cons:** * The forge may run into race conditions with creating RSL entries in high traffic repositories. * The forge is trusted far more than in the standard gittuf model, as it can reorder or drop RSL entries (drops may be prevented by local "receipts", potentially). * With only an RSL entry for the push, there is no way to authenticate the pushing user. If this is attested to by the forge, the forge must be trusted not to lie. TODO: Must the forge attest to how it authenticated a user? TODO: Can clients record some local-only "receipt" of a push that they validate is in the RSL next time? ### Configuration E **Summary:** Forge creates pre-receive RSL entries, forge performs pre-receive verification, users push changes to staging references. In this configuration, users push to special Git references (e.g., a staging area for the branch they want to update) **without** a corresponding RSL entry. The forge performs verification at the pre-receive phase (optionally by creating a provisional RSL entry) and rejects pushes that fail verification. If the verification passes, the forge makes the change available along with an RSL entry signed by it (if a provisional RSL entry was created, this can be adopted as the final RSL entry). **Pros:** * The forge can reject pushes that fail verification, offering better protections to Git-only clients. * The forge is responsible for ordering pushes at the pre-receive phase, simplifying RSL entry creation in high traffic repositories. * Deployments are simpler as client-side tooling requires fewer changes. **Cons:** * The forge is trusted far more than in the standard gittuf model, as it can reorder or drop RSL entries (drops may be prevented by local "receipts", potentially). * With only an RSL entry for the push, there is no way to authenticate the pushing user. If this is attested to by the forge, the forge must be trusted not to lie. * A malicious forge can carry out a denial of service attack by falsely claiming verification failed. While more trust is placed in the forge (approaching cases where the forge is trusted solely to enforce security controls), this configuration still requires the forge to explicitly record its decisions in the repository in a manner that any gittuf client can verify the forge's honesty. TODO: Must the forge attest to how it authenticated a user? TODO: Can clients record some local-only "receipt" of a push that they validate is in the RSL next time? TODO: Is this configuration necessary compared to C? ## Backwards Compatibility TODO: Consider backwards compatibility after one or more configurations are adopted. ## Security TODO: Consider the security model of each configuration. ## Prototype Implementation None yet. ## Changelog * January 20th, 2025: moved from `/docs/extensions` to `/docs/gaps` as GAP-2 ## References * [Atomic Git Pushes](https://git-scm.com/docs/git-push#Documentation/git-push.txt---no-atomic) gittuf-0.9.0/docs/gaps/3/000077500000000000000000000000001475150141000150445ustar00rootroot00000000000000gittuf-0.9.0/docs/gaps/3/README.md000066400000000000000000000123351475150141000163270ustar00rootroot00000000000000# Authentication Evidence Attestations ## Metadata * **Number:** 3 * **Title:** Authentication Evidence Attestations * **Implemented:** No * **Withdrawn/Rejected:** No * **Sponsors:** Aditya Sirish A Yelgundhalli (adityasaky) * **Related GAPs:** [GAP-2](/docs/gaps/2/README.md) * **Last Modified:** January 20, 2025 ## Abstract In certain workflows, it is necessary to authenticate an actor outside of the context of gittuf. For example, a gittuf user might create an RSL entry on behalf of a non-gittuf user after authenticating them. gittuf requires evidence of this authentication to be recorded in the repository using an attestation. ## Specification ### Authentication Evidence Structure Primarily, this attestation is recorded for pushes that are not accompanied by RSL reference entries. As such, this attestation workflow focuses on that scenario. It has the following format: ``` TargetRef string FromTargetID string ToTargetID string PushActor string EvidenceType string Evidence object ``` Note that this attestation's schema is a superset of the reference authorization attestation. While that one allows for detached authorizations for a reference update, this one is focused on providing evidence for a push. As such, to identify the push in question, the schema consists of many of the same fields. The `PushActor` field identifies the actor performing the push, but did not create an RSL entry. `EvidenceType` is a string that identifies the type of evidence gathered. It dictates how `Evidence` must be parsed, as this field is an opaque object that differs from one evidence type to another. TODO: `PushActor` has this notion of tracking actors in the policy even if they're not gittuf users. This is somewhat reasonable as this could just be a key ID, which is used just with Git. However, we're fast approaching a separation of actor identifier from their key ID. There's also a TAP for this that we should look at, and think about how OIDC bits can also connect here. TODO: Add some example evidence types for common scenarios. Push certificate and GitHub API result (subset) ought to do the trick to explore verifiable and unverifiable evidence. Authentication evidence attestations are stored in a directory called `authentication-evidence` in the attestations namespace. Each attestation must have the in-toto predicate type: `https://gittuf.dev/authentication-evidence/v`. ### Using Authentication Evidence The authentication evidence can be used to create RSL entries on behalf of other developers. This mechanism is necessary for adoptions where a subset of developers do not use gittuf. When they submit changes to the main copy of the repository, they do not include RSL entries. Therefore, when a change is pushed to a branch by a non-gittuf user A, a gittuf user B can submit an RSL entry on their behalf. Additionally, the entry must identify the original user and include some evidence about why B thinks the change came from A. The evidence that the change came from A may be of several types, depending on the context. If user B completely controls the infrastructure hosting that copy of the repository, the evidence could be the communication of A to B that submitted the change. For example, if A pushes to B's repository using an SSH key associated with A, B has reasonable guarantees the change was indeed pushed by A. Here, B may be another developer managing a "local" copy of the repository or an online bot used by a self hosted Git server, where the bot can reason about the communication from A. In cases where this degree of control is unavailable, for example when using a third party forge such as GitHub, B has no means to reason directly about A's communication with the remote repository. In such cases, B may rely on other data to determine the push was from A, such as the GitHub API for repository activity which logs all pushes after authenticating the user performing the push. Note that if A is a Git user who still signs their commits, a commit signature signed with A's key is not sufficient to say A performed the push. Creating a commit is distinct from pushing it to a remote repository, and can be performed by different users. When creating an RSL entry on behalf of another user in gittuf, the push event (which is captured in the RSL) is more important than the commit event. ## Motivation In some repositories, it may not be possible to require all developers to use gittuf (e.g., open source contexts). In such cases, it's necessary to enable gittuf-enabled clients or systems to record the activity of Git-only users. ## Reasoning TODO: flesh this out once there are two types of evidence, verifiable and unverifiable. ## Backwards Compatibility This GAP does not impact the backwards compatibility of gittuf. ## Security The changes proposed in this GAP requires trusting the actor creating the attestation. This may be mitigated by having the evidence be independently verifiable (e.g., a Git push certificate from the original push actor). TODO: flesh this out with specific evidence types. ## Changelog * January 20th, 2025: moved from `/docs/extensions` to `/docs/gaps` as GAP-3 ## References * [Git signed push certificate](https://git-scm.com/docs/git-push#Documentation/git-push.txt---signedtruefalseif-asked) gittuf-0.9.0/docs/gaps/4/000077500000000000000000000000001475150141000150455ustar00rootroot00000000000000gittuf-0.9.0/docs/gaps/4/README.md000066400000000000000000000307201475150141000163260ustar00rootroot00000000000000# Supporting Global Constraints in gittuf ## Metadata * **Number:** 4 * **Title:** Supporting Global Constraints in gittuf * **Implemented:** No * **Withdrawn/Rejected:** No * **Sponsors:** Aditya Sirish A Yelgundhalli (adityasaky), Patrick Zielinski (patzielinski) * **Last Modified:** January 21, 2025 ## Abstract gittuf implements the "delegation" mechanism for specifying the actors trusted to make changes to repository namespaces. These mechanisms cannot be used by repository owners to set baseline security controls or controls that are not typical delegations of trust. This GAP introduces "global constraints", declared in a repository's root of trust, to address this shortcoming. ## Specification This GAP introduces the notion of "global constraints" (also referred to as "global rules"). ### Declaring Global Constraints Global constraints are declared by the repository's owners in the root of trust metadata. Unlike the delegations implemented in gittuf today, global constraints can be of distinct types, with each type indicating how it is verified by a gittuf client. As with any changes to root of trust metadata, adding, removing, or updating a global constraint requires approval from a threshold of repository owners. ### Verifying Global Constraints The gittuf verification workflow is extended to include the verification of all applicable global constraints after the standard workflow protecting the namespace being verified. The workflow used to verify a specific constraint depends on its type, and is discussed below alongside the proposed constraint types. ### Global Constraint Schema Each global rule MUST have a name. This name MUST be unique among other global rules (irrespective of type), though the implementation may determine if the name must also be unique among all delegation names as well. Depending on the type of the global rule, additional information may be necessary. The extension defers the specific schema of global rules to the implementation, as different policy metadata versions may require different schemas. ### Global Constraint Types This extension builds on the motivating examples and proposes two types of constraints. In time, more types may be added and these MUST be recorded in this extension document. #### Minimum Threshold Another common security control is the ability to require a minimum threshold of approvals for changes to some namespace, without also specifying the specific actors trusted for these approvals. Thus, the `threshold` global constraint requires one or more patterns that identify the namespaces protected by the constraint as well as a numeric threshold value. The `threshold` constraint is verified against the set of accepted actors identified by the standard verification workflow. Consider a delegation that protects the `main` branch requiring two of Alice, Bob, and Carol to approve changes. The successful termination of the standard verification workflow requires two signatures to be verified, and the workflow returns the corresponding two actors. The global constraint is then verified against this returned set to see if the number of verified actors meets the global constraint's threshold. Depending on the existence and configuration of delegations protecting the same namespace as a global constraint, several situations are possible. ##### Delegation(s) exist and they all require a threshold equal to or higher than the global constraint In this scenario, there may be multiple delegations (at multiple levels) all protecting the same namespace as a threshold global constraint. If all delegations have a threshold equal to or higher than that declared in the global constraint, then the global constraint will always be satisfied. This is similar to the scenario described above. ##### Delegation(s) exist and only some require a threshold equal to or higher than the global constraint There may be multiple delegations (at multiple levels) of which only some require a threshold equal to of greater than that of the global constraint. Consider for example that the primary rule file requires two of Alice, Bob, and Carol to approve changes to the namespace. In turn, a threshold of Alice, Bob, and Carol delegate trust for this namespace to Dana and Erin, but require only a threshold of one. In this scenario, when the standard verification workflow successfully verifies signatures against the Alice, Bob, and Carol delegation (i.e., two of the three have approved the changes), then the global constraint is met as well. However, if only one of Dana and Erin issue a signature, while this satisfies the standard verification workflow, this does not satisfy the global constraint. TODO: what if both Dana and Erin sign despite only one needing to? Should we verify exhaustively and return both, thus meeting the global constraint? ##### Delegations do not exist but a threshold global constraint is set When no delegations exist protecting a namespace, the standard verification workflow terminates without verifying any signatures. This extension proposes a special case for this scenario where no delegations exist but a global constraint does. Specifically, all actors' keys declared across all metadata are used to verify the signatures associated with the change (RSL entry signature, reference authorizations, other attestations as applicable). The assumption here is that any actor declared in the metadata is trusted to make write changes to a namespace that is not protected by any explicit delegations. This may be updated in a future version of gittuf policy metadata that allows declaring granular permissions on a per-actor basis. #### Prevent Force Pushes A force push results in the history of the branch being rewritten. Thus, the `prevent-force-pushes` global constraint prevents rewriting history for the specific Git references. As such, this contraint requires one or more patterns that identify the repository references protected by the constraint. This constraint type is verified for every entry for a reference protected by the constraint. When verifying an RSL reference entry, the previous unskipped RSL reference entry for the same reference is identified. To meet this constraint, the verification workflow determines if the current RSL reference entry's target commit is a descendent of the previous RSL reference entry's target commit. If yes, then the constraint is met. If not, then verification terminates with an error indicating the rule that was not met. ### Example The following snippet shows the declaration of both types of rules in a repository's root of trust metadata. TODO: add example snippet and explanation. ## Motivation gittuf currently supports explicitly granting write permission to a namespace to a specific set of actors as well as a threshold that identifies the number of actors that must agree to a change to that namespace. This extension of trust uses the concept of "delegation", and a set of actors granted some trust can choose to delegate this further to other actors. This mechanism does not support setting generic controls over changes in the repository, such as to enforce a baseline security posture _irrespective of the configuration of specific delegations controlling who can make changes to a namespace_. This GAP considers some motivating scenarios that influence the design of global constraints. These are not exhaustive and may be expanded at a later date. ### Minimum Threshold Organizations frequently look to require a minimum number of approvers for source code changes, irrespective of the specific actors who are trusted as approvers for some repository namespace, which can be declared using standard rules / delegations. For example, a large monorepo may include several levels of delegations determining which actors are trusted for which subprojects. The repository owners wish to set a minimum threshold of two for all changes to a namespace irrespective of the specific subprojects or the actors trusted, leaving that to the more specific delegations. To achieve this in gittuf without global constraints, the owners must ensure every delegation that matches the namespace has a threshold of at least two, which is impractical. ### Prevent Force Pushes A repository's owners may choose (perhaps to conform with the upcoming SLSA source track that requires this constraint) to block force pushes to some branches, thus preventing alterations of their histories. This ensures continuity of important repository branches meant for downstream consumption. There is currently no way to enforce such a rule using gittuf's delegations; indeed, this constraint isn't a delegation of trust at all, but rather a specific property that must be enforced for repository changes. ### Specify Merge Strategies A repository's owners may choose to enforce specific merge strategies (e.g., always create a merge commit, merge using a squashed commit that applies a change atomically, etc.). ### Require Execution of Additional Checks A repository's owners may require additional source code checks to be executed prior to some change being merged. For example, a repository may require its unit tests to be executed for every change, and expect all checks to pass for the change to be merged. Another example is the execution of linters and secrets scanners that enforce source code quality and hygiene. ## Reasoning The GAP introduces the generic notion of "global constraints" that can be extended to support a variety of repository-wide security controls. ### Limiting to Root of Trust Global constraints must be declared in a repository's root of trust as they are repository-wide constraints, and not specific to any one rule file. In the future, there may be a preference to move (or otherwise support) this in the primary rule file, for repositories where there is a strict separation between what the root of trust and primary rule file metadata are used for. This is currently not part of the GAP so as to keep rule file schemas consistent across primary and delegated rule files. ### Supporting Types of Global Constraints This GAP introduces the notion of global constraints and does not further group specific constraint types together based on any shared characteristics. This is because, ultimately, each security control that is supported via a global constraint type is likely to have a unique verification workflow that must be added to the implementation. Over time, the implementation (and this GAP) may evolve in a way where different constraint types that share verification characteristics are grouped together. ### Alternative Solution: User Programmable Checks Supporting types of global constraints in gittuf may not be preferable due to maintainability concerns. Additionally, changes in semantics of a named constraint type can cause inconsistencies in verification. An alternative approach may be to introduce a generic programmable layer into gittuf. In such a model, global constraints would be expressed as small check programs executed in a pre-determined environment built into gittuf. TODO: connect this with gittuf + lua / hooks work ## Backwards Compatibility This GAP impacts backwards compatibility in certain cases. If a repository's metadata does not declare global constraints, any version of gittuf (with or without support for global constraints) can be used for verification. If a repository's metadata declares global constraints, then a version of gittuf released prior to the addition of this feature will ignore the global rules altogether. Additionally, even a gittuf client with knowledge of the concept of global constraints may not support a specific type of constraint. In such scenarios, the client is unable to verify the unsupported global constraint(s), and must abort verification with a message to the user to update their gittuf client. ## Security Adding more mechanisms or types of rules to gittuf does not inherently pose a security threat. The concerns with security relate to incompatibility of clients, similar to those discussed in the backwards compatibility section. Essentially, support for global constraints must be added in a manner such that an older client can: * abort verification with a warning when it encounters an unrecognized global constraint type * preserve global constraints when making changes to the repository's root of trust metadata, even if the client entirely lacks support for global constraints ## Prototype Implementation Initial support for global constraints has been implemented as a gittuf developer mode feature. ## References * [SLSA Source Track Draft](https://slsa.dev/spec/draft/source-requirements) gittuf-0.9.0/docs/gaps/README.md000066400000000000000000000231051475150141000161620ustar00rootroot00000000000000# gittuf Augmentation Proposals (GAPs) A gittuf Augmentation Proposal (GAP) is a design document that describes new features, extensions, or changes to the gittuf design document. A GAP is a **living document** that provides a concise technical specification of the feature, describes the motivation for the change, and discusses the rationale behind the design. gittuf is an implementation-first project, unlike sister projects like in-toto and TUF that are primarily specification projects. However, gittuf's implementation includes a design document that describes how different aspects of gittuf work (e.g., communication with forges, verification and recovery workflows, etc.). The design document ensures gittuf's core features are developed in a reasoned manner, which is especially important as gittuf is a security project. GAPs ensure that changes to gittuf's design are similarly developed in a reasoned manner with input from gittuf maintainers and community members alike. **Note:** A GAP cannot be used to propose changes to gittuf community processes, structure, or governance. Process-related changes must instead be proposed to the gittuf/community repository. ## GAP Format All GAPs must have the following sections to be merged into the gittuf repository. A template of this document is available alongside this document. 1. **Metadata:** A list at the very top of a GAP that contains the GAP's number, title, implemented status, withdrawn/rejected status, sponsors, contributors, related GAPs and the date it was last modified. 1. **Abstract:** A short description of the GAP. 1. **Specification:** The technical specification of the proposed changes or additions to the gittuf design. 1. **Motivation:** A description of the motivation for the proposed changes. The motivation may precede the specification section in cases the context it provides is important to reason about the specification. 1. **Reasoning:** A discussion of the reasoning behind specific design or architectural decisions proposed in the specification. The sponsors should also try and include summaries of discussions from the community related to these decisions (perhaps raised on the pull request proposing the GAP or in synchronous discussions in gittuf community meetings). 1. **Backwards Compatibility:** A discussion of how the proposed changes impact the backwards compatibility of gittuf's design and implementation. If a GAP does not break backwards compatibility, that must be stated explicitly. 1. **Security:** As gittuf is fundamentally a security project, any changes to gittuf's design must be considered carefully with respect to how it changes the security model of the project. Each GAP must discuss the security impact of the proposed changes, potential issues that may arise as a consequence of the proposed changes, their mitigations, and any implementation-specific footguns developers must be mindful of. 1. **Changelog:** Every time a change to a GAP is **merged** into the repository, an entry must be added to this section with a brief summary of the changes and the date they were made. This section may be omitted for the very first iteration of a GAP. 1. **References:** A list of references that include links to discussions pertaining to the GAP as well as any external links relevant to the proposed changes. A GAP document may also include the following optional sections. 1. **Acknowledgements:** Any relevant acknowledgements to people (who are not sponsors or contributors of the GAP) or projects (that in some way inspired the GAP). An **unimplemented** GAP may also include the following sections. These are optional. 1. **Prototype Implementation:** A description or link to a prototype of the proposed changes. When a GAP is implemented, the document must be updated to reflect this in the metadata section. Additionally, the following sections must be added to the document. 1. **Implementation:** A description of how the GAP was implemented in gittuf. If a prototype implementation was accepted as the final implementation, this section may indicate as such and refer to the prototype implementation section. If the features proposed in an implemented GAP are later removed, the GAP must be updated to reflect this in the document's metadata section. The GAP's reasoning must also be updated to indicate why the feature was removed. ## GAP Responsibilities The participants in a GAP have specific responsibilities. ### Sponsor Every GAP must have at least one sponsor. There is no limit to the number of sponsors a GAP may have. Each sponsor must be listed in the GAP's metadata section. The sponsors are the original proposers of a GAP and take on the responsibility of authoring the document and submitting it for review by the gittuf maintainers. Additionally, the sponsors should update the GAP based on feedback or changes to the corresponding implementations, thus ensuring the document reflects the latest status of the proposed changes. ### Contributor A GAP may have one or more contributors. These are members of the community who contribute to the GAP but do not wish to sponsor it. Each contributor must be listed in the GAP's metadata section. ### gittuf Maintainers Ultimately, the gittuf maintainers are responsible for overseeing GAPs and keeping them updated. The responsibilities of the gittuf maintainers include (but are not limited to): * Engaging in discussions of problems to determine if a GAP is necessary * Reviewing and providing feedback on a GAP in a timely manner with a focus on the impact of the proposed changes on gittuf's security model * Ensuring the GAP follows the prescribed format * Updating merged GAPs (if the sponsors do not) to reflect the state of their implementation; for example, if a GAP is implemented, if an implemented GAP feature is removed, or a GAP feature is updated significantly, the maintainers must ensure this is reflected in the living GAP document Changes to GAP documents are subject to approval from the same threshold of gittuf maintainers as all implementation changes. That is, if the implementation requires two maintainers to approve some change, the same threshold applies to changes to GAPs. ## GAP Workflow A GAP may go through the following phases in its evolution from problem to implemented solution. ### Discuss the problem A GAP solves specific problems that the gittuf design currently does not. Rather than directly approach the community with a GAP, it is a good idea to discuss with the maintainers and the broader community the problem itself. This can help confirm that the problem in fact exists (instead of being a misunderstanding of the gittuf design), has not already been explored in a previous GAP, and a solution ought to be part of gittuf (rather than another complementary system). This discussion may happen in any forum used by the gittuf community, though the repository's issue tracker is recommended for maximum visibility. ### Propose the GAP After the maintainers agree that a GAP is required, one or more sponsors can author a draft of the GAP. To submit a GAP, one of the sponsors must open a pull request with the document to the gittuf repository. The GAP must follow the format specified in this document (or copy the template provided alongside this document), minus the number as that is assigned when the document is merged. ### Merging a proposed GAP All proposed GAPs must be merged into the repository in a timely manner, regardless of their implementation status. This increases the visibility of each GAP, thus making it easier for other interested parties to discover the GAP, propose further changes, and contribute to the implementation of the GAP. The sponsors of a GAP may choose to withdraw their proposal or the maintainers may choose to reject the proposed changes after assessing the GAP. Even in these cases, the document must be merged into the repository with the status indicated in the document's metadata section. Note that a GAP must only be withdrawn or rejected on the basis of technical reasons (e.g., a better solution is proposed or a security issue is discovered as a consequence of the proposal). The reasoning section of the GAP must capture these technical considerations. ### Implementing a GAP The changes proposed in a GAP may be implemented via patches to the gittuf implementation. The changes to the implementation need not be submitted by the sponsors of the GAP. When the gittuf maintainers think a GAP has been implemented, they can propose an update to the document reflecting this. The sponsors of a GAP may also propose marking a GAP as implemented, which is subject to approval from the gittuf maintainers. ### Removing a GAP's implementation After a GAP is implemented, the corresponding changes or feature additions may be reverted (e.g., the feature leads to repeated security issues while being used rarely or gittuf's design as a whole evolves in a way that makes the GAP redundant). In such scenarios, the GAP must be updated to indicate this change. In addition to the corresponding changes to the metadata section, other sections such as the reasoning, implementation, backwards compatibility, and changelog must also be updated. ## Acknowledgements The GAP format and process in inspired by similar mechanisms in the in-toto (in-toto Enhancements) and TUF (TUF Augmentation Proposals) communities. ## References * [gittuf Design Document](/docs/design-document.md) * [gittuf Maintainers](/MAINTAINERS.txt) * [gittuf/community](https://github.com/gittuf/community) * [GAP Template](/docs/gaps/template.md) * [in-toto Enhancements (ITE)](https://github.com/in-toto/ite) * [TUF Augmentation Proposals (TAPs)](https://github.com/theupdateframework/taps) gittuf-0.9.0/docs/gaps/template.md000066400000000000000000000007021475150141000170360ustar00rootroot00000000000000# GAP Template (update with title of GAP) ## Metadata * **Number:** * **Title:** GAP Template (update with title of GAP) * **Implemented:** No * **Withdrawn/Rejected:** No * **Sponsors:** * **Contributors:** * **Related GAPs:** * **Last Modified:** ## Abstract ## Specification ## Motivation ## Reasoning ## Backwards Compatibility ## Security ## Prototype Implementation ## Implementation ## Changelog ## Acknowledgements ## References gittuf-0.9.0/docs/get-started.md000066400000000000000000000175521475150141000165270ustar00rootroot00000000000000# Get Started This guide presents a quick primer to using gittuf. Note that gittuf is currently in alpha, and it is not intended for use in a production repository. ## Install gittuf using pre-built binaries > [!NOTE] > Please use release v0.1.0 or higher, as prior releases were created to > test the release workflow. This repository provides pre-built binaries that are signed and published using [GoReleaser]. The signature for these binaries are generated using [Sigstore], using the release workflow's identity. Make sure you have [cosign] installed on your system, then you will be able to securely download and verify the gittuf release: ### Unix-like operating systems ```sh # Modify these values as necessary. # One of: amd64, arm64 ARCH=amd64 # One of: linux, darwin, freebsd OS=linux # See https://github.com/gittuf/gittuf/releases for the latest version VERSION=0.8.0 cd $(mktemp -d) curl -LO https://github.com/gittuf/gittuf/releases/download/v${VERSION}/gittuf_${VERSION}_${OS}_${ARCH} curl -LO https://github.com/gittuf/gittuf/releases/download/v${VERSION}/gittuf_${VERSION}_${OS}_${ARCH}.sig curl -LO https://github.com/gittuf/gittuf/releases/download/v${VERSION}/gittuf_${VERSION}_${OS}_${ARCH}.pem cosign verify-blob \ --certificate gittuf_${VERSION}_${OS}_${ARCH}.pem \ --signature gittuf_${VERSION}_${OS}_${ARCH}.sig \ --certificate-identity https://github.com/gittuf/gittuf/.github/workflows/release.yml@refs/tags/v${VERSION} \ --certificate-oidc-issuer https://token.actions.githubusercontent.com \ gittuf_${VERSION}_${OS}_${ARCH} sudo install gittuf_${VERSION}_${OS}_${ARCH} /usr/local/bin/gittuf cd - gittuf version ``` ### Windows #### Winget gittuf can be installed on Windows from winget, provided winget is installed on the system: ```powershell winget install gittuf ``` #### Manual installation Copy and paste these commands in PowerShell to install gittuf. Please remember to change the version number (0.8.0 in this example) and architecture (amd64 in this example) according to your use-case and system. ```powershell curl "https://github.com/gittuf/gittuf/releases/download/v0.8.0/gittuf_0.8.0_windows_amd64.exe" -O "gittuf_0.8.0_windows_amd64.exe" curl "https://github.com/gittuf/gittuf/releases/download/v0.8.0/gittuf_0.8.0_windows_amd64.exe.sig" -O "gittuf_0.8.0_windows_amd64.exe.sig" curl "https://github.com/gittuf/gittuf/releases/download/v0.8.0/gittuf_0.8.0_windows_amd64.exe.pem" -O "gittuf_0.8.0_windows_amd64.exe.pem" cosign verify-blob --certificate gittuf_0.8.0_windows_amd64.exe.pem --signature gittuf_0.8.0_windows_amd64.exe.sig --certificate-identity https://github.com/gittuf/gittuf/.github/workflows/release.yml@refs/tags/v0.8.0 --certificate-oidc-issuer https://token.actions.githubusercontent.com gittuf_0.8.0_windows_amd64.exe ``` The gittuf binary is now verified on your system. You can run it from the terminal as `gittuf.exe` from this directory, or add it to your PATH as desired. ## Building from source > [!NOTE] > `make` needs to be installed manually on Windows as it is not packaged with > the OS. The easiest way to install `make` on Windows is to use the > `ezwinports.make` package: Simply type `winget install ezwinports.make` > in PowerShell. > You can also install it from the [GNU website] or the [chocolatey] package manager. To build from source, clone the repository and run `make`. This will also run the test suite prior to installing gittuf. Note that Go 1.23 or higher is necessary to build gittuf. ```sh git clone https://github.com/gittuf/gittuf cd gittuf make ``` This will automatically put `gittuf` in the `GOPATH` as configured. ## Create keys First, create some keys that are used for the gittuf root of trust, policies, as well as for commits created while following this guide. > [!NOTE] > If running on Windows, do not use the `-N ""` flag in the `ssh-keygen` commands. > Instead, enter an empty passphrase when prompted. ```bash mkdir gittuf-get-started && cd gittuf-get-started mkdir keys && cd keys ssh-keygen -q -t ecdsa -N "" -f root ssh-keygen -q -t ecdsa -N "" -f policy ssh-keygen -q -t ecdsa -N "" -f developer ``` ## Create a Git repository gittuf can be used with either a brand new repository or with an existing repository. Here, we assume gittuf is being deployed with a fresh repository. Initialize the repository and gittuf's root of trust metadata using the key. ```bash cd .. && mkdir repo && cd repo git init -q -b main git config --local gpg.format ssh git config --local user.signingkey ../keys/developer ``` ## Initialize gittuf Initialize gittuf's root of trust metadata. ```bash gittuf trust init -k ../keys/root ``` After that, add a key for the primary policy. gittuf allows users to specify rules in one or more policy files. The primary policy file (called `targets`, from TUF) must be signed by keys specified in the root of trust. ```bash gittuf trust add-policy-key -k ../keys/root --policy-key ../keys/policy.pub gittuf policy init -k ../keys/policy --policy-name targets ``` Then, use the policy key to initialize a policy and add a rule protecting the `main` branch. ```bash gittuf policy add-key -k ../keys/policy --public-key ../keys/developer.pub gittuf policy add-rule -k ../keys/policy --rule-name protect-main --rule-pattern git:refs/heads/main --authorize-key ../keys/developer.pub ``` Note that `add-key` can also be used to specify a GPG key or a [Sigstore] identity for use with [gitsign]. However, we're using SSH keys throughout in this guide, as gittuf policy metadata currently cannot be signed using GPG (see [#229]). Also, `--authorize-key` in `gittuf policy add-rule` may return a deprecation warning. This guide will be updated with the new `--authorize` flag in its place. After adding the required policies, _apply_ them from the policy-staging area. This means the policy will be applicable henceforth. ```bash gittuf policy apply ``` ## Making repository changes You can make changes in the repository using standard Git workflows. However, changes to Git references (i.e., branches and tags) must be recorded in gittuf's reference state log (RSL). Currently, this must be executed manually or using a pre-push hook (see `gittuf add-hook -h` for more information about adding the hook and [#220] for planned gittuf and Git command compatibility). ```bash echo "Hello, world!" > README.md git add . && git commit -q -S -m "Initial commit" gittuf rsl record main ``` ## Verifying policy gittuf allows for verifying rules for Git references and files. ```sh gittuf verify-ref --verbose main ``` ## Communicating with a remote gittuf includes helpers to push and fetch the policy and RSL references. However, there are some known issues (see [#328]) with these commands. In the meantime, Git can be used to keep gittuf's references updated. ```sh git push refs/gittuf/* git fetch refs/gittuf/*:refs/gittuf/* ``` ## Verify gittuf itself You can also verify the state of the gittuf source code repository with gittuf itself. For more information on verifying gittuf with gittuf, visit the [dogfooding] document. ## Conclusion This is a very quick primer to gittuf! Please take a look at gittuf's [CLI docs] to learn more about using gittuf. If you find a bug, please [open an issue] on the gittuf repository. [Sigstore]: https://www.sigstore.dev/ [cosign]: https://github.com/sigstore/cosign [gitsign]: https://github.com/sigstore/gitsign [GoReleaser]: https://goreleaser.com/ [#276]: https://github.com/gittuf/gittuf/issues/276 [#229]: https://github.com/gittuf/gittuf/issues/229 [#220]: https://github.com/gittuf/gittuf/issues/220 [#328]: https://github.com/gittuf/gittuf/issues/328 [CLI docs]: /docs/cli/gittuf.md [open an issue]: https://github.com/gittuf/gittuf/issues/new/choose [dogfooding]: /docs/dogfood.md [GNU website]: https://gnuwin32.sourceforge.net/packages/make.htm [chocolatey]: https://community.chocolatey.org/packages/make [official Go guide for Windows]: https://go.dev/wiki/SettingGOPATH# gittuf-0.9.0/docs/media/000077500000000000000000000000001475150141000150275ustar00rootroot00000000000000gittuf-0.9.0/docs/media/gittuf-with-legacy-servers.png000066400000000000000000004275721475150141000227620ustar00rootroot00000000000000‰PNG  IHDR°Ã4¯ã pHYsÃÃÇo¨dtEXtSoftwarewww.inkscape.org›î< IDATxœìÝwt”eöÀñï3™dÒC$!$„„’Ð{P,(ŠbCEA,(ÜÕ]ËÚÖUl«èú³¡"EDÐÕwUT¤w)Ò!Ògîï‘)“™îçœçÀ¼ó”û¾3pÎÜó#"(¥”RJ)¥”ò^Æ´úY,tñ÷¡ÃA‚Šìy:>¥”ç«Ǭ†cÆð{^1ë•À|Ùééø\ÅhK)¥”RJ)¥¼“1¦ 0Ü߇; ìDÆ„Ppaü’bi^ßìA~žŽR)åi9…SÛ²„M(Y˜‚ý`6_R Š™L‘]žŽ³64¥”RJ)¥”R^ÆÓÛfåÙÂ.ëØˆ¢‘íºdhæéÈ”Rg‹mYðÕf˜¸V wÂ××ÂWEvþ!"ë=[MhK)¥”RJ)¥¼„1&ÆÏ‡;¸áúVØŸìc¬]b<•Rêl÷ó.xá)þyV‹… vOˆÈaOÇUšÀRJ)¥”RJ)/`Œ¹Åjáã¶‘ø~8ÈøuÕÄ•RÊÅ~úFÏ–¢Ôr‹ìÜ""?y:¦ªÒ–RJ)¥”RJy1ÆêëÃ;vw½p‰1õÁXŒ§£RJ«ŠìðØâxk9ÃßívyÞÓ1U…&°”RJ)¥”RÊCŒ1~>|àËeÿ»Íøõnâ鈔Rç‹Y[`È—b· “‹íÜ#"vOÇt:šÀRJ)¥”RJ)0ÆøÙ¬|× €>óî0~-#<‘Rê|³<.Ÿ*ÅùÅÌ(v0\¼8IdñtJ)¥”RJ)u¾1Æ_¦ûÑgá(M^)¥<£G,Ìa|­> ±ZxÙÓñœŽ&°”RJ)¥”RÊýö1Ü8w¸ñK÷t(J©óY—˜5ÄX£ƘÁžŽçT4¥”RJ)¥”RndŒéb1¼úÁ5ÆÚ!ÚÓÑ(¥ôoïgŒ¯“Œ1M=Oe4¥”RJ)¥”RnbŒñ±Y™4¸5fxOG£”RxòBL—|m>¼çéX*£›¸+¥”RJ)¥”›cî ôå_;2ÖF!¥×.™êÃÁ_Ϧ”:¿9ŠY>ÊÎŽCÐñ}q8„A"ò?O‡u"«§PJ)¥”RJ©ó1ÆÏæÃߟ¹èäÀ¦ƒ\w3;wö\pJ©óÖž={xýõ×qˆ¡]ÜÓ>ù•—M`)¥”RJ)¥Ôyèv«õîëVñ0dÈ÷G¤”:ï­\¹’×_Ýùú±>ÆòájicŒ¹\D~ð`håèXJ)¥”RJ)å¾>>.ŽÔÅD`ùrX¿öíƒÌLjô¡ûúBt44i½{C³f®õáë5…ïvp1ð¹§ãM`)¥”RJ)¥T2Æ_ \o\²~077¶“º7ƒ’»+ºt²X,4ŠiHÛv-uißµ%"lߞŽm{8z4×åý‡†Ó¼eZ´ˆÇ¸æ£r™Ã‡²qÃvÒÓàpÔ,iu*V«±qÑ´mׂàà@—ö]kyyðÚk0q"ìÙãúþ»v…‡†Ûn+Íhªr.M0¾óvÉ¥žŽ£Œ&°”RJ)¥”Rªn5*vØ6²öý¶q›~ÛÝîÚ$F‡ÃÁ¾ÔLÒöí§E‹x:vNÆbñüÎ3‡e³lÙz²äÔÙGæ²fÕ&vîØKÏží ¯VgcU•Ãá`íšÍìØ¾©ÉŒ£*()±³{×>öîI§MÛæ´nÓ¼NÆ©¶~€»ï®›ÄU™U«`Ø0øà˜<ên¬³Pë†PXB¼1ÆGD\›-¯ÏÿO¤”RJ)¥”R綃$†×¼aÅòõlX¿­Î’W'·mÛnæÿ¼²ÆK]%33‹ys—×iòêDÙGr˜ûÓ2ÒÓ¸e¼S))±óË‚Õlß–RgÉ«ÙíÖ¯ÛÆ²%ëp8ê~¼Óš:®¾ºn“W'Z¸.¸Ö¬qÏxg‰¤à¬@OÇšÀRJ)¥”RJ©ºÖ Èb¿Zl3´bù~ߙ꺈ªhf‹~Yí–Je²²Ž°ÀI´’; YMVÖ·Ž[FDX¸`éîO¢íÞ½•+6¸}\§¯¾‚# ¸Ø½ãfdÀe—ÁîÝî׋5ücEi„ÃpÒ–RJ)¥”RJÕ­à _jn}çvî„Q£j¶A»+> 7ß EEžßËú‚1à'hK)¥”RJ)¥ê–ÍÏJ~‘—°vÍfWÇSm›~ÛINÎ1·ŽùÛÆí;–ïÖ1OvìX>›~ÛáÖ1ssóØôÛN·ŽY™µk6»?y÷È#íÞ1O¶r%L˜àÙ¼„1àkÁø{:ЖRJ)¥”RJy­m[w{dÐÉD„͛ܗT)((dÇŽ½nït¶oK!?¿Àmãý¶q‡Ç–lž¨¨¨˜íÛRÜ7௿ÂìÙîïtƃ|Ï&OUEšÀRJ)¥”RJ)/µs‡›6±®‚Ý»Ò()©ÚAd………,X°€¬¬š-ƒÛ½kö*ŽU×ìv)»Óª\çάX±¢Fc•”Ø«5V]«Î÷/77—9sæPTÓåw~è¹¥ƒ'KO‡o¿õtê$VO ”RJ)¥”Rª¢ì#9äå¹oæÏ™823³hÜ8ò”u Y¶l‹/¦¸¸˜¤¤¤åéO–ž~€äV‰§­³sçNæÎKzz:;v¬Ñ8™=¶çWeòò ÈÎÎ%,,ø”urssY¼x1+W®Än·Ó¯_¿š 6gNÍÚՕᅦoôtêšÀRJ)¥”RJ)/´ÿ!O‡PÁþS$°NL\9ìöšÏž8\›0]îÀþÈƘ ï•%®222jµüÏ[?óÊX¹¹¹,Z´ˆU«VÔê3gÏžÒ Ü½ÉÏ?{:uM`)¥”RJ)¥”ò¦ÙWeNŽ©¨¨ˆ•+W²`Áìv»KfyÕ,$(}VPPH@À{YïÙ³‡Ÿ~ú‰ÔÔÒÓú\±oÕÙð™çåå±xñbç2ÉZ%®Ê¤zî”ÍSJM-]ÒXIÒRy†&°”RJ)¥”RÊ z:„ Êb*K\ýòË/”””œ2áTPP@^^^µÆ8r$申<©  ˆ€ÿ*%®JJJª}ßÇrÝ{ÒcU”}æ''®JJJ*­ŸŸŸ_ý¤Ö¾}øûø`qE2ÌU áÈ÷t$ê8M`)¥”RJ)¥”²xYJc:pàS¦L!77÷Œõ'Ož\£q¢"›R/¬aÚÖ¥Y³fñ믿ž±ÞÆÙ¸qcµû &®q«š„Vg,Æ’’´iÓ(.>ó‰˜o½õVƹ=>žÄ߯QÛ:ãëëéÔ 4¥”RJ)¥”R^È?Àæé*ð°Ñ°aCî¾ûn.\ÈêÕ«±X,ØíöJgMÝtÓM4hРZcóËü5® Û%ý4h Ì›7œœœSοýö‹å”÷~çwâççW½A6l |Ü8DëBAA|êÍë•ûiK)¥”RJ)¥¼P`P€§C¨ èxL¡¡¡ 8¾}û²xñbV­Z…1¦B"«~ýúDFžúÔÂʈ~~))ñžådV«~~¾chß¾=íÚµcóæÍüðÃ=z´Â2Bÿjß7@Ƈ½.Uö=lРƒæÒK/eáÂ…üúë¯Îäå‰6lˆÍVÍäkÛ¶P…Ù]nÕ´©§#P'±x:¥”RJ)¥”REGWoæ’;œShh(W^y%cÇŽ¥S§Nc°Zk7OÂC”—Ý{Ttƒr³ËŒ1´nÝš±cÇríµ×†ÅRûŸ×ÞvßQQå^‡‡‡3hÐ ÆŒCrr2@íï=*ª4‰åMú÷÷tê$šÀRJ)¥”RJ)/@h˜÷,aòõµÒ aåZ—ÍÈzøá‡‰¬Úhã]û_*c :t`ìØ± 4ˆÐÐÐZÝ{ÆáøúzÏB©Ð°`‚ƒ+}¯lFÖý÷ßO«V.Ø·kàÀÚ÷áJW^ééÔI4¥”RJ)¥”R^ªeRSO‡àÔ¼EügÚœ˜ÈêÖ­>>>5«iÓÆØlÕÜG©ŽØl~49mY×]wõëׯÑX‹…æ-šÔ¨m]HªÂ÷ïÄDVÛÚÌ¢ºï>¨îÞYu¥uk¸ì2OG¡N¢ ,¥”RJ)¥”òR‰‰±^±–ÕêCr«„*× 媫®¢aÚͤªîxu)¹UÖ*Ί²X,tèо}ûÖb¼D¬Öš%þ\)((€„ÄØ*×oР7Þxcõ÷¿*Ó¤ ÜqGÍںڳς –„*×ÒOD)¥”RJ)¥¼”Åb¡gÏöµ^’W[]º¶qûŒ¨¤äD4¨|É¢»Ô#)Ù½‰4›ÍÎ]Z»uÌ“cèÖ£Köõª–—_†'.¯¾nºÉ³1¨JiK)¥”RJ)¥¼XdTmÛµðØø ‰±Õš‰ã*‹¡WïŽ[Jh³ùÑ»w'÷'q€Äfq$6sÿ3/Ó¶] Ï"Ó§C€‡f6oS§‚‡ƪršÀRJ)¥”RJ)/צmsÌÊIlK÷íÜ>n™  .íß“ 7/£tŽ{Š ÌÝ¡[÷vÙ«m»´iÛÜíã:õèóæA7'ÐÚ¶…Ÿ†zõÜ;®ª2M`)¥”RJ)¥ÔY eRSúôíL` åççK×nméÞÃóËCCƒ¹ìò ˆ‹‹vËxqqÑ\vù„†zöHc ]»µ¥k·¶øùùÖùxþôéÛÙ£³ýœzö„… ᢋê~,‹F†Å‹!Ös³ÞÔ™yÏùœJ)¥”RJ)¥N+6.šèF Ù¾m7)»Ó8r$Ç¥ýÒ$¾IÉ ^s @@€Þ};“™™ÅŽm)¤¥Àn·»¬bbÒ¼e?¡ÐSBCƒ½>ÑX'êׇ>}<…ò ÝK)¥”RJ)¥”RJy5M`)¥”RJ)¥”—Ú¾};&L`Ó¦Mž¥VÊîcëÖ­žÅí¦OŸÎ]wÝÅÝwßÍìÙ³«Ý¾¸¸˜[o½•Ÿ~ú‰ƒ2a–.]Z‘Ö^ZZ&L`õêÕ5¨ˆ!C†ðóÏ?Ÿ¶ÞÂ… ™0aGŽ©ñXêì¢ ,¥”RJ)¥”òRË—/gôèÑ,X°ÀÓ¡ÔʲeË=z4‹-òt(n5~üxn»í6>þøc>þøã%"—.]ÊçŸNLL {÷îeôèÑ|ñÅumímÛ¶Ñ£GóÍ7ßԸŋ3cÆ 7n|ÚzÓ¦McôèÑdffÖx,uvÑ=°”RJ)¥”RJ©:0kÖ,¬V++V¬ S§N5:9qΜ9ÄÅÅѺukÖ®][Qz—9sæ@Ë–-=Šò2šÀRJ)¥”RJ)¥ê@jj*ÑÑÑtêÔ ŸêŸtøý÷ßsÅW¸:4¯u®Ýozz:sçÎeÑ¢ElÙ²…””²³³°ÙlDGGÓ¢E ºvíJ¿~ýèÚµ+‹.–«Œ&°”RJ)¥”Rê,°hÑ"¦L™BNNmÚ´á®»î"::ÚùþÞ½{ùì³Ï¸öÚkÙ¸q#ÿþ÷¿iÙ²%>ø 4Àn·3{ölæÏŸOzz:>>>$''sûí·“˜˜èìgùòåÌŸ?Ÿûî»7òÙgŸ±ÿ~¹ãŽ;HNN®ÛîÝ»™2e Û¶m#00=z0|øp|}}+Ô]¿~=“&M"55•˜˜† F×®]+ÔÛ¿?'Ndýúõcèܹ3£F"<¼üéƒEEE|öÙg,[¶ŒC‡ÃÀéß¿¹z³fÍbË–-\{íµ•ÞC™‚‚Þzë-.¸à, ï¿ÿ> 4àþûï§yóæìܹ“É“'³mÛ6üüüèÓ§ÇÇß߀5kÖðã?rèÐ!Œ1¼òÊ+ñÀ8ߟ1c{öìÁÏÏîÝ»3lØ0ÂÂÂÊÅ’™™É¯¿þÊO]*3uêTä‚ .cŒÄÅÅIrr²c¤^½z²téRgÝùóç #GŽcŒÈ–-[äðáÃÒ½{w$66Vzöì)qqqH@@€,Y²ÄÙÏË/¿,€üùÏcŒDDDHtt´âçç'óæÍ+ã”)SÄßß_|||¤mÛ¶’˜˜(€têÔIŽ=ê¬Èe—]&V«U¢¢¢$!!AŒ1âãã#Ÿþy¹>çÍ›'aaab±X$))I’’’Ä#QQQ²jÕ*g½cÇŽI§N-ZHÏž=¥^½zÈ=÷ÜS®Ï[n¹E™9sf¥ÏºLVV–2dÈ t>Ç/¾øBDD&Ož,~~~bµZ¥}ûö/€$%%ÉîÝ»÷›˜˜(V«U¬V«$&&J=DDdüøñHxx¸ôìÙSZ´h!€4nÜXRRR*|þV«U>,""kÖ¬@z÷î-!!!R¯^=iÞ¼¹X­VäÙgŸ-×~óæÍÎøâãã¥]»vbµZ% @¾úê«ru¿ýö[ i×®tíÚU‚ƒƒ^½zIqq±³îŽ;$!!AIHH–-[ —\rI…8^{íµJï766VöîÝ[.†O>ùD|}}%;;ÛymòäÉbµZÅ××W:vì(,ݺus~¿ËlÚ´Iš4i"€4mÚÔy¿2kÖ,g½AƒUh[fàÀbµZeß¾}•AÎ`ÆŒ’ AAArß}÷É’%KÄápT©mzzº¼ùæ›Ò¦M±Ùlòàƒ:?ûº´bÅ $÷ÉŠÿ?ùùP\)Þð©§ТE‹-Z´hÑ¢E‹–s¹¸"ÈÛo¿í¼>{ölñóó“¦M›Jaa¡ˆü‘À2ÆÈ /¼ ›7o–Ù³g‹ˆÈ_ÿúWäùçŸwþ˜v8ÎdÊM7Ýäì»,ìüÑïp8ä½÷Þ@úõë笻iÓ&ñóó“„„Ù°aƒ³î¸qã±cÇŠÈ ,™4i’Øív™5k–c$99ÙÙgFF†„‡‡KýúõeÑ¢EÎë .”ˆˆ‰•cÇŽ‰ˆ8cz÷Ýwõrrr¤W¯^Èo¿ýæ¼>mÚ4yì±Çdýúõ•>ë2e ,cŒŒ9R6mÚ$_ýµÈÚµkÅ××WZ´h!›6mr¶™9s¦Øl6éÑ£G¹dEÓ¦M%!!Áù://O¥GÎ{y÷Ýw|°\,C‡• .¸Àùº,ÈSO=åüì×­['!!!b³Ù$77WDDŠ‹‹¥U«VbµZË}¿¶mÛ&-[¶”ÀÀ@Ù±c‡ˆˆJTT”„‡‡ËÆË=‹Þ½{ ?üðƒóúÅ_,‹E>ùäçµo¾ùFÊ%°rssÅßß_.¸àÉËËsÖ}ûí·‡~¸Üý2D.¼ðBçë]»v‰¿¿¿$$$8“MÅÅÅrï½÷–KЊˆIRR’øúú–KRnÙ²Eš7o.AAA²k×.ùú믧Ÿ~ºÜø™™™bµZ媫®’êJOO—ˆÍf“'žxBÊ8ùÏþ#-[¶”ÈÈÈrÉ·º  ,-Z´hÑ¢E‹-Z´hÑâ’Ve?¨Ë~Äýõ×"òG«sçÎê¾öÚkrÛm·IQQQ¹ëEEEbµZ¥W¯^Îke ¬G}´B?!!!ë|ýðà ß~ûm¹z‡C àLÈ”%°†Z¡Ïnݺ‰1ÆÛ?þñd„ ê–%Ü&Nœ(""O=õ”2cÆŒrõ6lØ _~ù¥ìß¿¿BgR–À ‘üüürï >\ùùçŸ+´3fŒå’n''°ÒÒÒœ3ÑNœÑTRR"“&M’Õ«W;¯Ùív‰ŒŒ”çŸÞy­,Õ±cÇ ã9RY»v­ˆˆ|õÕWå’ˆ'úá‡Ê}Æ{÷î•{î¹§\"°Ì„ Ê=ó-[¶ 7ß|s…ºO<ñD¹Öž={+®¸BJJJœõŠ‹‹eÒ¤I²fÍšrÏ ""BÆç¼öÜsÏ•ûŽŸX·yóæåX3gÎtÎ<Ù·ß~+€<öØc"Rú½ŒŒ”fÍš•K8¾ùæ›åfÛUÕâÅ‹%**Jºwï.[·n­VÛÓ)((¿ýíoâãã#ùË_ª<“«ºÎ––RJ)¥”R^nРA®]uÕU¼ÿþû,]º”ë®»Îy½W¯^ê>úè£Î¿±cǶnÝÊâÅ‹Ž;V¡Meûÿ4mÚ”””çëÅ‹зoßrõŒ1|÷ÝwÚwï޽µ¸¸8V®\INNõë×gáÂ…øúúòÓO?•«[¶§ÖâÅ‹9r$ `ܸq :”iÓ¦1pà@®¹æÚ¶mKÛ¶m+ŒU;vtîiUfáÂ…øøøPPPP!¶²ý«/^LïÞ½+í³lC÷Ÿ~ú‰öíÛsýõ×sÍ5×н{wFŒQ®îÚµkÙ¿¥šwë֭µ²=žŽ=êŒ <<¼B¬………cœŸ_ll,|ðóýC‡±mÛ6Ö¯_ÏçŸàüŽ,Y²€ .¸ B —^z)/½ô’óull,íÛ·gΜ9tèÐë®»î”÷»jÕ*²²²ÊÝoY|'åããÃE]ÄŽ;œ×Êî·^½zî·¸¸¸\¾¾¾ 6Œ7ÞxƒåË—Ó³gO>ýôS"""¸æšk*ÜÛ©|÷Ýw <˜áÇóÖ[or°š°Ùl¼ð ôë×Áƒ“––ÆäÉ“ktÀ¹@XJ)¥”RJ)ååbcc+\kܸ1YYYå®GDDT¨[RRÂûï¿Ï„ ؼy3%%%4oÞ¼l–X‘‘‘®Ùl6‡óõþýû %88¸J÷TáZÙ‰keqdff0räÈSö³ÿ~z÷îÍÔ©Syþùç™={6³gÏæ¾ûî£{÷îÜÿý 6¬JqU¦²ç˜™™‰ÝnçÊ+¯Ñ–RJ)¥”RJy¹üüü ײ³³*œäVÙÛ{ï½—?þ˜Ö­[óØcѽ{w:uêD\\¾¾¾•&±ªòÙ××—¼¼<ìv»Ëf…øùùaŒaݺu•&j¿í¶Û¸í¶Ûزe ³fÍâË/¿dÙ²e,[¶ »Ý^a¦OU•%ÖNŽ-88˜¥K—ž²ÝÉ' ž,>>žY³fqôèQ¾ÿþ{¾úê+þ÷¿ÿñöÛo³råJ–,Y‚1†9sæÐ¿ÿJã¨ÊgS6諯¾¢C‡•Ö)›Ñ–™™I÷îÝ9pà7Üp^x!]ºt¡C‡|[Î(Õ IDATñÅŒ5ÊÙ¦,YYöý;QAAA…kM›6å›o¾!;;›ï¾ûޝ¾úŠo¿ý–·Þz‹U«V±hÑ" 4awùå——»ßàà`Š‹‹ÉËË«ð]8y¬²û5kÖ)gßx*fÛ¶méÖ­3fÌ`üøñ|úé§Àé§'Ú³g×]wwÝuW“W»vírþÛ «4iZ¦C‡üïÿã’K.!99™Gy¤FcžÍ*þKPJ)¥”RJ©ó˜1¦âtÛ¶m[…këׯ8ãR¹¬¬,&MšDll,+W®ä…^`РAÄÅÅ‘žžNII ………5Š«eË–”””°sçÎ ïÝqÇ\rÉ%U«Ï¤¤$D„¬¬,Ë›ÍÆòåËÉ“åË—óñÇœœÌc=ÆÊ•+ËË–¿¹JRRû÷ïÇßß¿Bl¬Y³†¼¼¼S¶ÏÈÈ`ÆŒlܸ‘ÐÐPn¾ùf>ÿüs222hÛ¶-Ë–-#%%…ììl–.]ZéòÁªjÙ²%ééébŠŠbÑ¢EÎIS¦Laÿþýüõ¯åË/¿ä¡‡¢wïÞ“šš àüŽ´jÕ €­[·VsóæÍå^§§§3cÆ 6mÚDXXC† aæÌ™dddкuk/^ÌÞ½{9|ø0+V¬¨p¿­[·®òX§»ßÈÈH-Zäœ=VfäÈ‘8p€ùóç3cÆ Ú·oOçÎÏølC‡¥]»vŒ?þŒõ+óé§Ÿ’˜˜H³fÍhÖ¬/¿üòÛtïÞ÷ߟ'žx‚U«VÕhܳ™&°”RJ)¥”Rª¼0cÌdcL€§)3iÒ$rss¯³³³?~<\{íµ§m[XXˆÝn'::ºÜÌ%€7Þx¨|æLUÜpà ¼öÚkå®ïܹ“™3gâp8ª½'ÐàÁƒxúé§{Aé¶?ÿùÏÜvÛm¬Y³€÷ߟ»îº‹ùóç—ë#::(Ýÿ©Ì±cÇ8|øpµj•Åöøã—›µVTTÄÈ‘#¹é¦›œKÙ*³gφ RaÆN`` ¡¡¡øøøÂܹs±Ûí\~ùå5ŽõÆoÄb±ðòË/søðárï½öÚkŒ1‚¯¿þøc†_BBB¹z‡fâĉåêôíÛ—ØØX&OžÌ¾}ûœu‹ŠŠøç?ÿY®ýîÝ»2d/¼ðB¹ëAAAãããChh(?þø#"BÿþýËÕ»õÖ[x饗°ÛíÎë6là¿ÿýo…û5ÆðÒK/U˜öꫯ2bÄfÏž]îú!Cð÷÷çÙgŸeçÎUž}õᇲnÝ:>ýôÓÍ}ú0fÌ xï½÷HIIáwÞ9í²#(Ý«¨sçÎÎý}ȱcǘ>}ºsŸ¥œœD¤Úûê >œO>ù„>úˆƒrÍ5×pøðaÞ|óMD¤BB£* Äu×]ÇþózôèÁwÜAXX_~ù%ÿýï¹øâ‹‰¤‡~˜3fpã7òÀЪU+RSSyûí·ñóóãÁtö{çw2cÆ fΜÉM7ÝTí¸xà>ÿüs¦Nʾ}û¸å–[0ÆðÉ'Ÿ°bÅ FŽYéžUeºuëÆ¥—^ÊÌ™3)))áꫯvîµdÉî¼óN"""˜3gíÚµ«tŸ©ªjÙ²%?þ8ãÆ£cÇŽŒ3†ØØX~þùg&NœH‹-xøá‡¸òÊ+yþùçyæ™g0ÆÀ–-[xë­·œI’œœ¬V+ï¾û.×_=ݺuãOúaaaLœ8±ÂL¼ž={Ò¯_?¦OŸNqq1à믿fÅŠŒ=š°°0æÌ™CÇŽ‰ŠŠ*×¾_¿~Ü~ûíL:•þýû3tèP222xýõ× -—˜kݺ5ýë_yå•WèСcÆŒ!&&†Ÿþ™O>ù„¤¤$zè¡rý‡‡‡sýõ×3}út|}}:tèŸknn.ûÛßxöÙg+Ý›îLD„»îº‹C‡U»m™×_ääd¦L™Rå¤Û9ÁÓÇ jÑ¢E‹-Z´hÑ¢E‹·àJ@€t w-ûº§I=òN>ž¾¬D…ùÉôéÓ¥23fÌððp™9s¦\yå•bŒ@bcceòäÉåê.Z´HÂÃÃåå—_®ÐÏÎ;¥ÿþÎö~~~ríµ×ʦM›ä‘G‘ððpY½zµˆˆ¼ùæ›.?þøc…~.¾øb‰-w-''Gî¿ÿ~ @Œ1ÒµkWY¸pa…û˜:uj…>GŒ!ááá’••å¼VTT$Ï>û¬DFFÊñÏABBBäOú“dgg—k?oÞ<éÑ£‡óÞŒ1Ò­[7™;wn¹z£F’ððpùÏþSé³.sèÐ! —Ûo¿½Ò÷³³³e̘1æŒ-22R^xá)...W·C‡Ò±cÇrײ²²dôèÑèlß°aCyê©§¤°°PDDâããå/ùK…±×­['áááòä“OVxïå—_–ððpY´hQ¹ë|ð$&&:DzÙl2lØ0Ù³gO¹zŸ~ú©ÄÄÄ8ëEEEÉsÏ='G•ÆKŸ>}ÊÕÿî»ï¤uëÖÎúݺu“~ø¡ÂwðàÁƒr÷ÝwK@@@¹çõôÓOKQQ‘8iܸ±<ñÄ•>ï’’yúé§%44Ôùݽçž{ä•W^‘ððpÙ¾}{¹úï½÷ž$$$”»ßáÇKjjj¥ýÿøã.·Þzk¥ïŸìŸÿü§DEEI^^^•êŸì½÷ÞsÆvbyôÑG«ÕÏã?.IIIb·ÛkljV¬X!€ä>Yñÿ'?J€+Å þ_6"•Ÿ8¡”RJ)¥”Rç+cŒØDþÀXy¿†}ÝÓ¤o¦<\ù’Äèñ¾¼ùþd† rƾ>L^^5ªtsï3)(( ##ƒèèhüýý«ÝþtŠ‹‹ÉÈÈ ~ýú§Ü|½ºD„ŒŒ Œ1Îe§RXXHFFááá6¶¯ %%%dddpÆYp§’‘‘ˆé\Š&"ìÚµ‹† â²x8@aa!111§ýî8p€ââb5jT¥yÄápTzjåÉÊî7**ʃÃá`÷îÝDFFžö4K»ÝNZZ–žê>ŠŠŠjüo¥2"B‹-¸óÎ;yâ‰'ªÝ~×®]tèÐÁ9›íD>úh…¥¸§“™™I\\ß~û-—]vYµc9ÑÊ•+éÞ½;¹O‚NZñkû‡Ø‹ì\#"ßÕjÐ%„J)¥”RJ)uqcÞþüŸ1¦7pˆT<ÐMÂÃÃËíëT]þþþ4mÚÔuÀ××—¸¸8—öiŒ¡Q£FUªk³Ùˆwéø§cµZk´„ìD•%åŒ1$&&ÖªßÊ4lXµ³ ªZ¯Lƒ ª\·²ûµX,Uº_Ÿj}¿ª{U±|ùrvíÚŰaêÝÖáp0räÈJ“W5ÅW\Á´iÓjÀ:[è&îJ)¥”RJ)U¹‰@ô.cšx4*¥”G|ÿý÷tíÚµF‰Ú7Þxƒ 8__z饵Žçúë¯gΜ9œ/+ë4¥”RJ)¥”R•‘`𠈬ºGUƘ~žŒM)å~ .䢋.ªv»Í›7óôÓO;_1‚«¯¾ºÖñôë×ôôt¶oß^ë¾ÎšÀRJ)¥”RJ©S{èeŒé*"€þ”ÎÌúÑó˜gCSJ¹ÓÆéÔ©SµÚ”””0bÄ hÔ¨ãÇwI<‰‰‰„……ñÛo¿¹¤?o§ ,¥”RJ)¥”:Ù Ìî?þºDDFÏc¦SùæìJ©sGNNû÷ï§E‹Õj÷üóϳråJçëwß}·VûجY³füþûï.ëÏ›iK)¥”RJ)¥NïmàVcŒó˜5™ ô.ê¾XJÛŽ9P­äÓš5kxùå—¯GŽÉu×]çÒ¸ÂÃÃÉÎÎviŸÞJXJ)¥”RJ)uzßiÀ'^‘Քƒî‹¥Ô9­°°(=í²ªõ‡Nqq1111¼þúë.+ €ü|ŒêVšÀRJ)¥”RJ©Óð>p¯1ÆzÒ{º/–RçÀÀ@€*'‹üñr{S}ôÑG.]:XæØ±c¹¼_od=s¥”RJ)¥”:ï} < ¾:ñ )7Æü¼mŒIî;žør‹¢¢bŠŠŠkÔÖjµâïïçâˆÜÃápP_ˆC¤Úm-Æà`Ãb9?æuØí ‘š<+‹‹©ƒÈÎáááX,–*탵uëVþõ¯9_7jÔˆ °`ÁçµU«V•kóË/¿ðøãpÿý÷W¥¸8@ýúõ«zg5M`)¥”RJ)¥Ôic|€>À!àANJ`•‘ Ƙ|à`L]Æ”KÊî4Òöe’““‡Ýn¯U‹…  b7¤I| õ\©k•—ššÉž”4:JAAa­ûô÷·Q¿~(MâcˆÂê{nüL.**fïž öìIçÈá£պπ ÂiÒ¤c#ñññqA¤g‡€€âââØºu+½{÷>mÝC‡ápü‘¿NOOç•W^9m›+V°bÅ n¸á†*%°;wî¬öÆòg«sã_¦RJ)¥”RJ¹˜1& ¸ ijñghfö‹Hí2J§PPPȆõÛù}çÞͤ9‡ÃANÎ1¶n9ÆÖ-»‰Šn@—®­ vÙµµ{×>~]»Å%I«’–v€´´Øl~´iÛœ-ã1æìœm$"¤ìNcíšÍ.IZ(?¿Ô½¤îÍ ÐŸ’hšÐØ¥cx³:°jÕ*FåéPظq#ùùù´oßÞÓ¡¸…&°”RJ)¥”RêƘdà>J7m? LÞ9¾ßÕ™Duס¬lÌ_éò¤De23òý·‹èuAGâšD×ùx§ãp8X¶d{ö¤×ùX……E¬Y½‰ý™YôêÝñ¬›aTRbgñÂ5¤§Wå«Z;ùy,[ºŽÌÌ,ºuow^,/¼è¢‹øðÃ=†Ó¼yóhÙ²%ŸIÄóc±¯RJ)¥”RJ†1ÆbŒ¹Æó#° 茚ˆÈsUL^DQ ¬ý™YÌ›»Ì-É«2‡ƒ%‹×’²;ÍmcVÃüŸWº%yu¢ÔÔLÌ_…Ýî¶mÌj­¸¸„y?-sKòêD»~Oeñ¢5.è­ ÄÖ­[Ù°aÃiëuëÖC‡¶Œ7®\›xÀù^çΫÏÌ™3¹æškj|?gM`)¥”RJ)¥Î[ƘÈã'îfé@Gé#"_ß ½:\>+/¯€Å‹ÖRRR'«OKDX±|=‡e»}l€5«JgCyÂþÌ,Ö®Ù䑱kbÅòõòÐç´/5“¶{dlwjÞ¼9ݺucâĉ§­gµZ ?m (ׯßßßùžÕzæÅr›7ofÙ²e :´V÷t6Ñ–RJ)¥”Rê¼cŒénŒ™ ìîÞ‰ÈpY_Åö•ýžŠö»2ÖåK×¹uæÕÉìvK–üŠÃáÞ6ûR3Ù±c[Ç<ÙŽí{HÛçÒ³Nü¾3•½{êdåj•múm'öh î0vìX>úè#²²<“X-óÚk¯Ñ·o_:uêäÑ8ÜIXJ)¥”RJ©ó‚1ÆfŒfŒY,7I"2^DŽœ¡½¿1æcÌ*`90 ’j.•™™E¦‡f (çè1vïÚçÖ17xÉŒžuë¶zõò8‡Cøm£çŸ•ˆ°aý6O‡Qçn¾ùfbbbøûßÿî±Ö¯_ÏÔ©Syê©§<ƒ'è&îJ)¥”RJ©sš1¦1p/p7àL†‰H•~õcâ(ÝÔý.À˜HéRÀoOªîÒ=°6oÚ骮jmÓ¦$6‹uËXiiû9rø¨[Æ:“ì#9¤§ &&ÒÓ¡T*e÷>ŽË÷tdfd‘•u„ˆˆzž¥ÎX­VÞzë-®¾újn¿ývºuëV£~.½ôR>øàç몞$h·Û¹ï¾û8p ýû÷¯ÑØg+M`)¥”RJ)¥ÎIƘ.ÀX`°|p¦™V•´¿Ø ¼|$"ÇŒ1=€¥Æ˜$Ùz¼¾ÐÒäV­—xlÿ§Êäæä‘KXXpµoofQûR3½6µ/ÕûžÕ¹œÀ0`C‡åÖ[oeõêÕ„……U»víÚÑ®]»j·{î¹çزe ŸþyµÛžít ¡RJ)¥”Rêœq|™àpcÌZ`и‘Òe‚¯Tq™àpc̺ãíÃ+V"ò–ˆ‘åÀ*Jgv•‰|—d23º}ß©3IOsÏ~Pî>IïLÒÓ¼+ž2‡áEINðÞgåjï¼ó~~~ <˜¢"÷ìQ7mÚ4^zé%&OžL\\œ[Æô&šÀRJ)¥”RJõŒ11Ƙç€Tà_À/@sé/"ßÈ612Æ$c^>Þþ-`>("׈ÈO§hÿÀƘÐ㯣ÿé’%„ÙÙ¹®èÆ¥ÜSqq yyu>NuäåP\\z åE]D¯^½èÕ«Ó¦M«Rûƒ:ÛôêÕ‹yóæU©ÝÆ˵۴©ü©ˆyyù”W÷ ÌºåßÛºÌwß}ÇæÍ›¹é¦›ÈϯÛeœ3gÎdÔ¨Q¼ùæ›\}õÕu:–·Ò%„J)¥”RJ©³–1¦ðp=°øð©ˆäU¡­.î¡t3÷uÀ“UmÌ^†ïQšÀ*¨êÅ3)((tE7.U_÷1yã}Ci\¾¾V–-[æœqsýõ×W©mQQË–-s¾>xð`•Úåää”k—““S!&oãp8(**ÆÏÏ×Ó¡Ô¹øøxæÎËW\Aÿþý™9s&111.CDxõÕWùÛßþÆk¯½Æ<àÒþÏ&:K)¥”RJ)uV1Æcî1Æl t¦Tp¥ˆt‘ gJ>cBŒ1÷€ÿ¿<@DºT¥})>8ž séîÅ^6³Ü“7Þ7@q‘÷Åå½ÏªØÓ!¸MRRK–,ÁápбcG¾üòK—õ’’ÂUW]Å‹/¾ÈgŸ}ÆŸþô'—õ}6ÒXJ)¥”RJ©³‚1¦9¥'Þ ”ŸEdOÛ·îF…”žFøŽˆì­EXï.¦t–ËX6›Ÿ«ºr›ÝÇä÷ Üûm·ÝFqqi‚¦M›6UjÈСC¯ããã«Ô®aÆåÚ5hР|L^û¬lžÁ­bbbX°`ÿûß:t(|ðÏ=÷½{÷®Qà­·ÞbüøñtêÔ‰U«VѲeKG}öÑ–RJ)¥”RÊkc,À%ü±ÌïWà `ªˆœqÓ™Úk€Ç€)"Rë–D$Í3 xHÁ… ,7$‹ªËß ‰ o¼oø#®O>ù¤ÚmëիǧŸ~ZívÍ›7?m;w|Õeµú`µúx: ·óõõå…^àöÛoçÉ'Ÿä /¤K—.Ü~ûí\}õÕ$$$œ¶}nn.óçÏçóÏ?ç믿&&&†÷ߟaÆQ:ÁSiK)¥”RJ)åuŒ1õ€QÀ ˜ ô‘•Ul_ŸÒÙV÷Qzát «ˆ¬©ƒpÿ˜ ,§t.—¨QÏU]¹LDDXáããCX½²äœ¹²›Ô«‚÷%eý ð'?ß{6½÷Æï­;%%%ñïÿ›­[·òñÇóÚk¯ñÐCѤIÚ´iCóæÍ Æf³‘››Kjj*Û·ogݺuX­V ÀÔ©S¹öÚk½ò;çIšÀRJ)¥”RJy cL¥I«;`2ð¶ˆì«bûdJ“VwG ”. @@@QQQÜ|óͼúê«ôêÕ‹€€Gî½4¥”RJ)¥”ò¨ãËüRzšà¥”.óL‘3î]Iû%ÀHàkqWöçà ÓUZ,bc£Ø½«J¹»:×0²>þn«I|#¶lþÝ-cUE“&<Â)ÅÇÇxMËCl\´§Ãð:ÉÉÉ$''{:Œ³žžB¨”RJ)¥”òcLcÌãÀ.às`7ÐIDºŠÈ”3%¯Œ1‘Ƙ§€=”.Ü ´‘>"ò…“WӀøp,€6m›{Íþ7mÛ6wÛXõë‡yÍLžÆ# ¯_÷K'k*ºQ4 ÷t4MhLpp §ÃPç(M`)¥”RJ)u3Æ4öt '3Æt1Æ|ì¥tsö·8¹[DÖU¡}wcÌJW#€×X¹WD\¶UuˆÈ1JOEtÙ ,€ š5se—5Ò¨QC¢¢œ¹¢ µï„ÅâÙŸ¬‹…v’<CU´ïäñD§¯¯Õ­INuþÑ–RJ)¥”Rç¶¿y:cŒŸ1æVcÌb`%иh."ÿ‘Cgho3Æ 3Æ,– $/"Gêúªà] ÍÕvêÜšððPWw[eþþ6zôjïöqë…‡Ò©s+·{¢N]ZQ¯^ˆGc¨ŠÈÈú´ñpò¨k·¶éì+U‡4¥”RJ)¥Ô9Ê”NÉeŒ9ýùíuC#c̳”.üX ´‘ËEd¶ˆ8Îо±1æ@ ¥§ý-’EäJùߙڻ“ˆì‘Ý®î×ÇÇBŸ¾=²4ËfóãÂ~]ñ÷·¹}l€-ãIJöÌ×7)9-â=2vM´iÛœ„ÄXŒÝ¾CKâ›ÆxdluþЖRJ)¥”Rç®ÀFé©|neŒéeŒùŒÒÄÕ­ÀK”.ó{@D¶T¡}_cÌ J÷ǺxžÒe†cEd[†î•‚‚¹ìò^D4¨ç¶1CB‚¸ìò^Ô÷ðþO:·¢c§VX,îY"g±:vjåñÙ_ÕeŒ¡GÏö´nã¾}Ó||,tïQ:¦RuMO!TJ)¥”RêÜUvtÚ]Ƙ¿ß§©ÎclÀ àa '0 üWD¤Šíoþ´?Þþƪ¶?×ùûÛ¸¬/vîØË†õÛ(,,ª“q¬VZ·iFr«DïAU&¹U11 Y½z™ëlœ¨ètéҚаà:£®µïВظ(Ö¬ÚÄÁƒ‡ëlœÆ±QtêÜJ7mWn£ ,¥”RJ)¥Î]‘@ à tÔGu1ˆ1¦ p/p7¥¿1>†‹ÈÎ*¶o Œî<~écàúºXŽw¶3ÆÐ¼E›Å’‘q}©ûÉÍ=F~^!GÍVScð°H£˜†4މÄÇêãâÈk/4,˜‹/éNNÎ1öîÉ +ëùyö°ÊÓòóó% ПˆˆzÄ5‰&$$È…{Nýúa\vy/²ä°gOGg“—W@qqÍæô³ù@ƒõˆkÒˆ  F¬Ô™iK)¥”RJ©sW#JOÅ›ŒÅÅ ,cL—ãýÞ ì^>ªêL/cLà!àz`¥Ë«Üþ|f±Xˆ‰‰$&&ÒÓ¡¸]HH­Û4ótg…°z!´; 6¡Wª*4¥”RJ¹1&‚zÇÿî™o•RîT޹À1/IÂD”žŽ÷ˆ1¦¯ˆ,¬M‡Æàfà -ð-p%0·ŠËƒÛ€VÀwÀ•"òSmâRJ)unÓ–RJ)UƘ@ Ð,IVüÚ9°7§4i¥”Rø¿#|v”P´[UÀRÉscQ@¦ˆ¤cþKiÒ¨F ,cL"ppàL‰HJÛ7?ÞönJ—5~ ‘=5‰G¿²³³Ù·oDEEU»}VV»wï¦S§N^³Ç—RêÌ4¥”RJU‘1¦'p•ÿK'ÀNba8Ílá4µ„Oð#+ø€À=')¥íÝBé¾V-€YÀ€ª.ó3Æ„C(=°Ùñö‰ÈâÞÇyËápо}{š4irÞ'°ÒÓÓyæ™g¨_¿>÷ß?6›øøøj÷3gή¸â Ö®]K¿~ýxê©§4¥”—Ó–RJ)U cÌå>Øžz‡_˜Ä [sL”îg¥”ª+ÄÒ“XzÒûm¹d²ƒïã¶2ûñlRžµÿÅv Ÿ‘\8l4¥›¸—yxÎ󢈜\ùxÒþNJ“\…À$àÿª1c+‰?NÌ&o‹È¾ÚÜÄùÌáp°mÛ6š4iâéPÒÒÒX¿~½³}ZZGu]J©:£ ,¥”Rê¸ãK]®·à÷¬ÁÒ&‘‹íN-5i¥”r¹`¢èÈ:2–Å6Ö1µçN~øÎ×øo*¡ð9àkqÔr˜(Ê'°¦ã€›€©Pn™ßX` °xìÿÙ;ï°(εßïš)ÒQ)bA¬#5Õh¬1ÑÄ45šjŠIމI¾“^4õ¨'ÕÄ$DZý÷.Šˆ(‚ MÊË| UšÀ®òÞ×µ—ËÌ[ž™Avö7Ïó{y‰\×SØÿ^Ôl­; ûO~W%¯ŽñKªAQΞ=KZZ666¢×ë+l›ŸŸOLL YYY´mÛkëÊ?Þ222ˆ‰‰Àßß[%§¦¦K`` 666eöEGG“’’‚··7^^^åú M...åöåççsá®^½J‹-*õÆúçŸÐétôïßÿ†âÍÉÉáÌ™3 œiÓ¦M¥m³²²ˆŽŽÆÊÊŠÀÀÀ*ÇMLLäâÅ‹ØÛÛãçç'½¸$’@þ/‘H$‰BtÓ`µ_ƒþA éð júño+U߀J$‰)p%þüŸöA–j¹¿½ýÿtXíBt­í˜BàF‰Š¢\CÍŠzAá$„x8¬²ÞŠ¢tUenuâ•¢iaÿh`!„öŸ'Å«º³téÒb1jݺu!ø÷¿ÿ ¨™Y|ðžžžеkW:v숫«+ï¾ûn¹±–,Y‚¿¿?tîÜoooæÍ›G»víèÙ³gq»ÔÔTƇ‹‹ :u¢S§N¸¸¸ðôÓOc0ŠÛ%$$ „ÀÛÛ»ÚãxçwBð×_áëëKçÎiÓ¦MñxkÖ¬!((???ºv튷·7œ:uªxŒ   î¿ÿ~^zé%„¼÷Þ{üøãøúúÒ¦Mºv튧§'ݺucß¾}åbY³f ááá8::òôÓOsß}÷ðꫯ"„`Ïž=\½z•Gy‚ƒƒéÚµ+~~~´nÝš¿ÿþ»Ì˜LŸ>‚ƒƒiÛ¶-ÁÁÁüùçŸ!Êd‹|8ï¾û.o¼ñ:ubéÒ¥lܸ‘¯¾ú {{{f̘ÁŠ+ŠÇYµj#GŽ$??Ÿ¹sç²råJ† Æøñã9wî\™9Ÿ}öY~ûí7^}õUÖ¯_ÏßÿÍ€˜={6Ó§O¯Óñ<ýôÓ1nÜ8 „µµ5«W¯æÞ{ï%--¯¿þšµk×òÁ°wï^n»í6bcÕE*g͚ŴiÓxä‘GX°`Æ cÛ¶m<ñÄ´nÝš%K–ÉÇÌÑ£G¹÷Þ{ÉÌÌ,žßh4²víÚbÿ«§žzŠ×^{ €±cDz`ÁüüüP…ÁƒóË/¿0aÂV¯^ÍêÕ«™6m/^dôèѤ¤¤ûÒK/ñÞ{ïÆâÅ‹Y¸p!Œ3¦Ìñ Ltt4sæÌ!22’ß~û '''&NœÈºu7d1'‘4Zd ¡D"‘H-Bˆû4è~µÇÛövÞÞt•Ÿ‹‰Ä"h‚;ùP{‰=læýq׈!„«(ÊŠê{STCUFÀRå´âC`/°LQcu !¬€áÀ³@O`=ªIû_&(s”TBóæÍ1bcÆŒÁÃÑ#Gj¹ÜW_}E‹-X±bEq)`DDÞÞÞ >œ­[·–É.ÒëõlÙ²…Ö­[p÷ÝwãææÆ‡~Xfοþú‹®]»g7 8Ûn»„„’_%E©Ññøûû³aÆâr9£ÑÈ”)S°µµeëÖ­øùù0`ÀzöìI¿~ý˜>}:óæÍã®»î"/OMêëܹsñ¹xã7P…/¾ø‚°°0úöíKAA‹/æÌ™3tîÜ€½{÷’œœ\,`…††@§NŠÇܾ};Û·ogìØ±Ìš5«8þ;3ÔÔTæÌ™ÃÁƒéׯ111|ýõׄ……±nÝ:t:õVâþûï§OŸ>ìÞ½»¸ÿ¡C‡ˆ‹‹ãÍ7ßd„ ÅÛÛ¶m˸qã8}ú4 ¨Ñ9•H2K"‘H$!„^íg±¼ãG±ÀÊ›ZWéH$I½áM7F±Ðª3ãœb¹Vè?BTlpT/ GQ””ëw(Šò†¢(Kª¯„Bˆi¨e†s#@'EQ*вLŠWæA§ÓËÎ;ËùX9:ª+a¦¦¦ªñùÑ£G6lX±xUÄ /¼Pnlooo8ÀG}ÄÅ‹ª÷¾^¯g×®]Ì›7¯Nq5ªŒ×ÓˆŽŽfðàÁÅâU±|ùr *ÿ5+òÊzíµ×عs'F£ú+=mÚ4vïÞ],^¬^½wwwºtéReœááá$&&òå—_–Ùn4iÚ´)Pr~—/_N~~>Ï=÷\±x`eeÅ”)SÊô÷ôôDÁ?üÀÂ… ¹ví  iÇŽcòäÉUÆ%‘4vä“f‰D"‘4*„-5X-ÓcÓþ>-èaî$‰¤J4èèÆ¼è*Öóú3F ý…÷+Š[MWJù_Õ!D0xˆC-œ[‘&1Mš4!33“9sæpôèQN:ʼn'ˆ‹SŒ,rΜ9P¡©¸‡‡G± SÄÇÌÈ‘#yíµ×xýõ× aèС<ðÀtìØ±N1_/ ùäF…µµ5ýúõcèС :´Rãy‰D¢"3°$‰DÒhBtÔ ßÛŒvF²ÐJŠW‰äf¢=ÉB½+Aí5è÷ !ªS<)ïU)Bk!ÄH!Äv`7ÐUÀ Rå#)^YÿùÏhÕª“&Mbþüù(ŠÂC=Ä7ß|S¦]NŽêÅ/„¨pœëW,¼ï¾û8uêŸ}ö}úôáСC¼õÖ[óâ‹/Ö)æëW,2qoݺ5aaaå^>ø &L@«ÕV:¦ƒƒ;vì`ñâÅ<òÈ#(ŠÂ?þÈ!CèØ±c±‡VJJ »wï..¬Šüü|ÆŽKçÎyõÕWÙ°ažžž¼öÚkLš4©L۪ί••U¹mü1»wïfÚ´i´nÝšÕ«W3iÒ$Ú´iÃ’%KªM"iÌÈ ,‰D"‘4 „=4èþñ!Ü®º8¶›;$‰D"©íª·§™ó9"w !îVeK%Mo(KáL&ÖÀÀŠ¢œ0YГröìYž}öYÜÝÝY¿~==zô(Î*Ú¶mP’!äëë ÀùóçË“™™IRRmÚ´)³½eË–L:•©S§’˜˜ÈÒ¥K™1c³fÍâÑG­¶ïFqww [·ne¼¦jŠ••Æ cذaäççɬY³øûï¿yÿý÷™={6k×®EQ Tíx?üðóçϧ_¿~Ì›7-Zï{ë­·€ŠÏoeŒ]o_D·nÝèÖ­~ø!Çgþüù|ôÑGLž<™aÆÕêH$‹° !ý€æ€#Фðådθ$‰Edi@zá¿§E¹jÖ¨$7 Bˆ{Ú¥A ÓõæeÍš>899bmeS}g‰D"± ¹9¤¥¥óXÁÝ6>Õœ`ñ:!ÄŠ¢¬¬ ¹XÕxBˆ‰¨å'€7_Eɬª¤a)¦Š€={ö`4=z4½zõ*Ó>22€ììl@5üðCâââL&`õìÙ½^ϲeËxÿý÷±³³+Þ—™™Ihh(ÞÞÞ¬_¿¾Ò²¿‰'²téRΞ=‹ƒƒ:ŽÀßÿÍ… µ|°K—.Å¢Y¥ 勨¹s' šß—¯ äüe^Ý{ï½LŸ>ßÿQ£F•iûÃ?”ùùÿû/¿ü2Ÿ~ú)£G }ûö¼÷Þ{¬ZµŠƒb0ÊùšI$³XBÔÕK"ÐÑBà‰‚‚ž|¬0b‚Žš-m!‘Hn=  ¹hÈC‹-€Ð‰ 4œÅÈn ØD*ŠrɼÁJ, !Doviãõ]yZ¨Ûà—ù?s÷Ýw›;<‰D"©«V­bÈàh¸W568ZàÇ¥BˆþŠ¢l»®y0Z±DQ”Õ• ¹ ÐcE9T¯ÁKj…F£ÁÖÖ–S§NqàÀ¼½½‹ Ï7mÚT,J)ŠÂÂ… yÿý÷E§ÓñÎ;ïðä“OÁ'Ÿ|‚+W®ä7Þ(3—?ÿ¬~F:P3¾.\ˆ]q–Qzz:¯¼ò NNN|üñǵ:¶fÍšñøã3gÎÆŽËܹsqss#55•§žzŠÓ§O3|øð*=«ÚµkÇܹsyúé§ùöÛoqttÄ`0gtõë×EQX³f <òH¹þööö€š¹vâÄ Z¶lY|~W¬XQ왕••ÅÛo¿Í–-[Êœß.]º0bÄ-ZÄøñãyî¹çBðí·ß²reY]9,,ŒË—/3}út:wîLPPPñ<ÇŽ#<<\ŠWI4˜€%„pF¡g,º£ Å›\ü°¡à4€¾ð%‘H$åÉ’€+8D.Б³<†½°çÉc1êäfŽTbf„4èVµeˆ®H¼’H$’[‰®L"—LÝq¬Bô¾N„š Ä+„Ÿ¯+¥ÓmEQŽ !")¨¦í ¤_¿~¬\¹’ÐÐP&L˜Àœ9sxøá‡™?>>>>øûûOjj*ï¿ÿ>|ðA±A:ÀO¼¼¼hÛ¶m¹}vvvüùçŸlÞ¼™uëÖqåʼ½½éß¿?½{÷.Ó¶S§NÌ™3§LÙ¤N§cùòålذ­[·««+wÜqýúõ 55•I“&•+·hÚ´)ûöícÉ’% ‚‚‚hÚ´)‡fþüù;v €—_~™aÆammM“&MÊœ3{{{–/_Ή'8räöööôíÛ—Å‹ÏQÄäÉ“¹ë®»Xºt)±±±h4ÂÂÂ6lØ K‰¤1#®{cšA…ÐcÐó´¢3Š亇‰¤aH»0̦€OE¹hæÈ$ €B¯Åj‡Aîc¶^KÙU€~ÔöfÙ_Ke ¡D"¹é(*!|ÌX¶ZÐH.+˜”—ÄÉÃFr{)Š’Wz¿"ø5y¨¢(Ñ¥öi3À׊¢|VÿGÑøBLhÙ”Yç_¶í÷œ©gÖìŸyðÁëeþáÇsûí·óüóÏ—Ù¾iÓ&"""xã7xï½÷êeî[ýû÷óÉ'ŸðÄOg·ñúë¯óᇲyófúôéc¦%’êÙ³gÝ»wçÚ‚&×-žiýŠ1×È`EQV™'ºLž%„–ÏÑàN8Zú p¬¾ŸD"‘˜W`Ðkëyš+<#´b¼)—¿ÕѼ§Å*x •¯êJAA9Ù jùH¯×ammÚ˜ ƒ!—¼¼üêV€F£ÁÆÆææ«ä4 ÈÉ1PÛ‡~ÖÖVèõ³nΣ(YYµëom žž Õš6.I…h±bŸê0"ØHþ¿i¥÷+ŠrRüìBŒQåŸÂ}F!Äg…³E1–ŸAr3sá¦M›F›6mæq.“‘‘‰ÑXPç1mílðôt£eK/<½ÜÂò-£ÑH\\±çãIº’‚Á[ç1u:-ÎÎŽø´ô¢¥¯66jÒ{ð üö,[11[Çc×é E ¸çxðAõŠ-.ôçßV«xþ!ÄVEQþ*½_Q”«Bˆ{€÷€¿…Ó ? 碮Bx7°¢Áƒ—Ô+ß|ó ÷ß?÷ß?öööX[[“œœŒ ß}÷:u2wˆ7-^^^|öÙg¼òÊ+âîîNFFÙÙÙ´jÕŠ%K– ×K{g‰ÄÔYÀ*L9žà ÚPÀhq5Ad‰DbJÐh‹›±b-s€IBˆQŠ¢Ä˜78‰©B8kÐýÌCør{ÇKL¼Êþ½ÇHMÍ0At%FR®¦‘r5£GNãàKp§@‹ÉÒÉÍÍãè‘(¢NŸ¯uÖQedgåp.:ŽsÑq8»8Öµ=nnÎÕwl .^Ldÿ¾ãd^«eÖQ%äç¹r%…+WR8tèíÚµ¡}¿*WÖjP.]‚×^ƒ_U3¯LE~¾*„}û­úêß¾ü:t0Ý’2øNgÆq„ß~Bø]Ÿq\˜]õšâð="„xBQ”$!Äÿ€gÖ-G·nÝ8wî‹/æÔ©SdggãççÇ<€‡‡‡¹Ã»éyöÙg>|8K—.%&&[[[‚ƒƒ¼aµŠg& !¾Så@EíE9,„èF¡/0Ø < <_QIý±iÓ&²òÿ¢D"‘cînˆZ XBˆ±~¢7îAÈ•%ÉM‰ˆÀÁÏCÛ Ö ;ñر S6XŒÇŽ48®zñ²î}z}Ä'‘H$õFbb"FÛObÍWô§€Ïç:6É!„ðúuªcö@¼…dâ‘y-‹ŒŒÌJW$,®Îœ9ƒ;;»ZÍ“’’NNŽyŒÛ+#þRb¥V‘pµmÛ6òóóë”9fi×üJb ùùÆJ³Ï¢££Ù°a—.]BQZ´hQ»‰Ö­ƒ| ÓåW­’V=Ò‰±ºhÖE!|E9#} Þ¬«çÐ$‰D"¹aª°„öèYL;4D kà%Iã¡0=s˜"„Ø (ÊŸæIR!Ùã™ëN뺔žvÍñ˜”´´k嬄„"##9yò$Z­*tÔEıÄãN¯ÀL>77—={ö°yófòóó)(P³åŠÎAíæ±¬c/((àZF&MËleýúõ\¸ –¸Ö¹Üóøñºõ¯Nœ0w·4îtÄïÜ .>4w<‰D"‘Ô†ª3°4Ì¥ ÞŒ a×ò–H$K  p‚ü"„V%ÆÜ!IÊ¢Ãú± †Xc‚g,––…“m(~_‘pe4ÖÝô;'ÇP}£¦ôqçææràÀ"##ÉÍÍ-®êJAA¹¹y&Ë”dçhZøÞäÂU –U: ÀåËæŽà–'û¬òó“HK"‘H$7)• XBˆ‡Ñ0ŠGÑbÓ!I$‰1Á¬‰ã7!DoÅRœ®%!:þÜe’ñò,­¤ Õ*>>ž 6pæÌ4uñàŠ„«¼¼<–.]Zã9RSÒ±¶rA£±œgUyyyäææ²k×.¶nÝZ&ãª"vîÜɉfðd\ËÆÁÞ²ÍÏËçܹslذ¸¸8„• Wiiiµºæ:[[×5PS“–fîny¸›}ÌõBtVå¹ã‘H$‰¤¦T(` !Ñòw ¡yýŸo¬õÓT­FƒöW)²4E!/¯ö_–¬¬ô&Œ¦aÉÏ˧ –€N§-þwÓ‘› ™åËbnœœLäÆÐ¡çcº¿š9"I ÷8Ó*Ç“þªî4M½hlKÃÚÆš‚ìÜ2×¼(1= IDAT*ݼ6×\ccO&Ý«6í—ÔG|p¦UN 1÷RÀ’H$ÉMGÅw­þêÃ÷*55ƒØó—¸t1‘ÌÌì:‰8  9ööv4oáAK_¯JÍnÍM^^>q.s!6ž”” rr u*Ðh4ØÚÙÐÌÍ_/¼¼š¡ÑXÞ8@RR ±çã¹|9‰ÌÌlŒùu+y±±±ÂÞ¡ >>žø´ôÂÎÎoÄaáBX´NžTK6ê’¼ck -Z@ÿþ0z4DD€~ùº%i ôC˾B¬P%ÕÜ!I@‹õ „›ì€­5™×²L5œI°±±ÆÓ³cÆŒ!11‘7—^Ÿ…¥×ë2dHçˆgûÖ¦ Ù$ØØXceeEïÞ½éÞ½;û÷ïgÓ¦MäææV˜}Ö½{÷Z­Bøç’õWBicc…»»/=ö±±±lÞ¼™èèh | ¡““S­®9'Oš"TÓâéiî-èe“Aü@àsÇ"‘T‹{G¸ósõý¡ypX>C,¦ik}üïoÐÙ@V2Äóà̪ò}F/+{H: «®[ŸH£§–rÖt1Z;¨Eêû˜HØò¾ú>äqèø ú~å3|ÚtsV‡›úoVRÃÍ)1)å,!DGÏ0 MõkÞ8ÙY9:tŠ˜sM7(››ÇÕ«i\½šÆ‘çiÕº9]BÚacceÒyj‹¢(œ¹Äý'0Lç¯RPP@æµ,2¯es‡&„„µÇÛ»™Éæ¨+é™ìßwœøxÓ®ò”““KNN.IWR8tð$þ¾w D¯·,‚Ü\øÏàÍ7!#ÃtãfgCT”úš3ÂÂ૯ W/ÓÍ!©œ~öЄtÞ^0w8!„^ íåE˜ÉÆtqu"))ÅdãÕ!Î.%™—îîîŒ=ºZ!«¦¸ºX^v§«kILz½ž=zZ­U›y.^L¬ó8¦ÂÊJ_æA\Ë–-;vlµBVéÖ­nýëƒîÝÍA£À‹0޲ \a¥(ŠåÿI$¥±i m¨ïc·š7K¢Ëx¸ûKUŒ*3¸øC‡Ñpd>üù”Ji¡žSk‡²ýÚ „»gÁáß`Ë{¦‹S£/¹~Y¥¾:·)ÙníX¾_} ÑA·É1~½[ X71åóFjþ ³R.'ñ÷ß›M.^UD̹‹¬^¹™«Éæ÷RÈÏËgÓÆ=ìÜqȤâUEddd²9rö[Æ*>1ç.²jåf“‹W×SP púT ÿ¬ÞÆ5KÈœˆ‡=à…L+^Uľ}pÛmðÅõ;DE Æ ˜,„ð0w8º+XyÒÅdzYЧ Æ Y'NÄÏϨ¾´°*šØÛáàhYÙË]‹"!ë…^ _¿~X[[£ÓéêdêîåmYekž^n^Ë"!ëÑGÅÇǧîÝv88Tß®¡î¼ÓÜQ4 ¼E¡À èjîX$I-è0 †üP"^%†Ý_Ãî¯Ôì«"‚†ÛÿU¶ïåƒj›¤RY¸-ûÀ¸5àÖ®þc/"#^#~äÖÒ^¥¦ô}Š|’›š2–†2“,Å]¸Ì¦È½ ê+’““ˆõ;ILHn°9¯'//Ÿ ëwqùrê»§NžcçóÚœ>÷((h8¯ëŒŒLÖý³Ã¼ËÁÇÆBïÞpð`ÃÍYP Še3f4Üœ™Ž€+À‹æEB˜Ísl0]ö‡‡+¶¶Ö&¯®´ôõªr¿§§'cÆŒaâĉÔÉ˪U+ïZ÷55ööv¸º6­tQiáÔ©S‰ˆˆÀÚºö׬…GñŠŽ–€¯oÕ×Á×W--?~<>>>µÝÊ †¯]ßú "¼-çwðVÆGhžƒ°Ì\FR7ìÜ`ðœÂø{2Ì®zævƒÕ¥nQ{½z»’Ÿùó±’m:3Ü÷ìùFcn7Hj ä ­eTgݬ(eþ1/eïv5ü /òiƒI®pFF&;w6Ù’×5!?ßȶ­¸ëîÛ°5ƒGÒzÕ¤ÆSÄ£ÔòÁÄ£ªˆÕýèñlÉþnOC»¡°áM8³º²3¯p/ô|¼Ã@k­žŸ“ÂŽÏ!»šÏ¦nS d¼ú~É#eE¬€{ ÷«àªþœpö| G~+;Ƙåêµ9»V=w¼¯þ.è¬áÂvX÷FIFÚóJJ†þ¤þŽÌTù¹–c,€|#Z Ò媦ø[¾ÂÁ8šÆùJQ¶mÙoÖ †\vî>€ÞÓÊÆç¬¾üÀ/wÂÅݪï“34q‡­QœÌbë]Uý¢b6V-¨Ý_òþÀ•·[öDÅÛ=»¨ç¬ÈËÁK‹pðV_¶Õ|¶ôž¦wišµW_wÃOýÀ^y/ð*ô0µ*eaÐý¸û (½ŽœO¸úrïëß(Ùî N¾jÛ°§ÀÖµd_›ðhw˜ÕF=ŸnA`_j¡·ÂÏ[…ø'[8×r‹[ÍXêTBé?¢£°¥ÀTÞW±çãIM­g  árRƒ–püØ™›¯ò8މjÐ9OŸŠ±ˆÕœ¢ÏÆ5ì—Ϥ$øú놛¯2ÒÒà“OÌÅ­èŽ5z*¹;4 ´uäÆ2”j‚V«å¶>¡5LIË–^´ñ3ÏQ jך7˜TètZÂ{‡ÔÉÏ«6ØØX›eÞÒ´k§[õ MÍgŸAhhÃÏ[D‹ðÃ替‘âDKhkî8!ñYyèÓÍ«|“#àŸU!&?Gk¾i›ÞQw7ï¡fð1ð£ñêĘ ?EÀÕ³ª(r=½_UÅ«‚ñXÙ}ÖŽª@výK[E‰àÉej–X§–ÁŠIªàVîÁj6ÀµxXü|׫d…HÏèþlåý+Ã% 0KM¨«þÜ_½VE™`·Mƒæ$¥x…BÒ)õ¼Ï)‰ÝÚ Ú Sßoùή)é³õõ8sͯUÜ Ä•h‘—ÌF1%–ŽÇCoªçÇ›p Î:rìXÍbÉÌ̬õêF1ç.’™™]«¾¦æb\i5E!=½ µ¼ ŒÆNŸŠ©U_S£( 'ODרOm€™3ášEÒ0{6$›Ïû­Ñ äÑAÑÁÜ¡4F„ÂH®»#õã™ãÔÔðð.fñFòðt¥{ÏN >oÝ{vÂÍ­á Nu:-}n3›™¼‡§+a];˜EÄjÕª9ÁªoXX[ÃÒ¥ÐÖ Z†§'¬Xnfî9Ž4ÇH®‡0§jÛ89%oÓê†O/hVxûµoüQ5%|ÎoV·wyTýW£/12aé#ª‘ùùͰp´Z²w=+Ÿ…¹]aþ}êJ~)ÑpnC‰0Ò¤MÙyþš'—Bܵ¼Je:µT3 Š^ÚB¤i+µ¤¬i+ÐÙªÛ.íQE¢"’OAZlùñˆß¯úXõ|AõZš–þw6%†àž¥þ†]ØVò޿ݧúlE¯-œ/$Qß·¡Š@6MK¼™NþY}FPfbÉûÒ%s IeÇl̅߇ÀÚWU!«Æã†”¼oQrMÃ&@na¢€wXù~ù9~¡äçk¥¾ûZ[Ðj»71/+XiUÞ(ºK»G ´À$nçññWL1ŒÉP…„ËIø´¬Ø(33“­[·²wï^ŒFc­=œŒÆë`¨ZÄ_ºB‡Žût*ŠÂ‰'ذa)))u2Û·´kn0ä’r5׊W(KMMeÓ¦M:¤®ØXëL‹˜8y²Úf ʪUðÐCæŽâÖ'kÖ1˜nîP!öPô%¬þhÖÌ™{÷åè‘ÓDŽ­·Iœœì k§™œ"´Z-½o åâÅDî?Qo%ÙZ­–À¶­èÐÑ߬¢]i||Ë ,¸åÃ++”%PÞ ­ Š>¬ ’‚üËæ9ø3 úT5•÷é­ lEbØájÊA-­+ò¬jÞ½¬P´öµO°öáèêÇ« V¥D¡\þù(-6ÝöZÅmì*¸O*-êäÈõ•LÍ?g1dåQ«áþ¯Ö2€@Ó¬<–jyŸ…©©×ð¹Î.%++‹]»v±}ûvE©uÙ`™×²0æ×m S“–V^ÉW…¨¨(Ö¯_Ï•+WŠ·Õe vKʸ+"--£œ€•––ƶmÛØ»w/¦î_NŽ­[ÿúÀcºiä(„pS%ÉÜá42ôØÖûDz½ŽÐöt $îÂe’ÉÊÌ&;ÛPë¿z½;;ðiéi9"Æu4oîNóæî$%¥w!ôôkdeåÔzq¡ØÚÚФ‰-žžn4oîÞ`ÙN5ÁÖÖšž½:Ó%$ˆ .“”˜BVV6Ù9†Z/ me­ÇÎÎgg|ZzáèXÙƒµF£ÇƒG `ùrˆŽ†ØXÈʪ¾EX[«>W­ZÁ½÷Â]w©Û$f¥ÔßN@æ5 y¬]ušaÆ´Ziç^;Ò/–¼ßôœZQq»´ØqT±¨4}ùmÃç«âU^–j~b©šá3ja)«ðV‘áºFvÍ ³TöO«5Ö”hP ¿þU5?×èÕ2¢ñ®Å««éUÇñ%êê ®xzÅLÑ¥ú7k_ýXµ¥´É¼½WYÁ¨åm•¤– Ôð^¡ôuýåΊW2¼~õG(_JYÛjI…¤æÀÞKè æŽ¥B ¥/þ¦[ÃŒ¼¯§tLEÂÕŽ;P…|-!m‰Ç——1߈V§­T¸2–xìÙÙ%1•®„Å×½Îe Q \–Ëò^´AðôÉ£°ÔÜá42šè@À*B¯×ѺM Z·iQ}ã[ 77g³øb™k| ð­¾ñ­„V ª/É-‰»¢·¨¤Þò,M70gý9ägîPnR®/yß´Ä—²?ðäe«çñûUq#餺ê\ónà{{‰OV¯©ªÑwi¬KVÇ;·v}Yj®Ö%ï5:ÕäýânÕ4Ô²À=…«º:¶€qÿ¨í¢×Á/ƒÔ홉púoª XEóù­D䪊ã !â-Õ̽͸{¬V6ûËïNè8ºú±Š(- io 3öâî’rÊö#`óÿ©ïm]àá•j†\ü~ÕG¬&\)µ ²­K‰ g×L5O:±[j6fiʧÉòvny ØdîXŠÐ0Ò”6¦4¯–Og듼Ü<233Ù¶m{öì¨R¸Z·n]3’®]ËÂ+°¶j¸/U7‚!7—óQ1¬_¿ž«WU5»2áêÌ™3dg×Ü„þr| v¶–õ'//¿L©`QÆUѱ—¯ X¹reeCUNB}ííib)&î©2u¶AЭ( оH«¡Qýåjä‰DRcJýí”DEQRltâŸïö+wò“e„µ"~Ÿê'åÝ:ƒk—!j5ø (YðØ8±X}¿ãs<0vµº­«š-„¢n/"÷šú²²‡V}!àH=Ý'—-)´q‚kÙ°wt¤fTÝù¸ø©íÞ,)_,mÞ^ôsÐPhRjÕßC¿Üرç^SW]»J³û³Ða´*Ê jŒníJuPÊzBU6f톩}b6©&ê±ÿôvª˜æà©ŠKÇ•”w^Ì7¡yÐç5ճ잯T1ñj”ZNèW(þùXÕ+$VEn©Lâ^/ª×`ë‡jƘ¤Rþ»_ÉÍWøUQ”ê ÅŒ-¹45] ¡µ•åùXÛX‘““CFFF£¦êÏìÌÌÌû" ¹ ˜ÄF̤èõ:®]»FNNBˆ*}\rss¹V+1ÆòÒ5­­õdgg“‘‘qC™fµ:n½žK[ħY3sGÐxðÁŠ*p””H$æ"--˜˜œœœðööÆÊªf·7œ>}šŽ;bÝÊÝÒÓÓ‰ŠŠj4Ç+‘˜ƒ‘Ïçî3WÁßÅÜÑܤ,§®ºgïYbö]DÒIXýbÉÏ~Ÿpè2t6Ðvˆºýàªæ\âi¥¨«ØÝ>]õzz¨¨|8wÜq[¶láÀh4𠽝† ‚]#UNFF&ÿe1Ùu€º¢”^¯§[·n„††rðàA6nÜHNNN…Çݾ}{úõëWãy6GîáÒ%Ë2r·±µÆËË‹±cÇ’@dd$'OžD«Õ–;vFèQµ0;\³2,ÌÿËãú6Óà(˜aýyI}1jÔ(.\ÈÅ‹ñöö6w8µfäÈ‘,Z´ˆ+W®àæfæð Áœ9sxá…ÈÉQË)¶nÝJïÞ½k4ÆO?ýÄo¼Arr2o¿ý6ï¼ó‘‘‘ôíÛ·>B®/½ôŸþ9û÷ï'$$¤úðý÷ßóöÛo“”TõM¼§§'...?~¼Êv‰¤jEÙ`£‡ÞÛ¢ÿ8DfaUɵ˰o®úþR©RÁäSðm0ôxFõš²uŒxUÜÙ; é%m•Xö8ù]ÍÒÒèT¿©3«ar¡ol^)ƒ÷3 5F5A·÷TÓ÷|«ú:Ý^¸nm)åqÿwjÉ\·§Á£3WÏÂÉepôòÇT§î › þ|¨¢LìVøÒ_bïQK mœÔl«ËU‘,&’r R3§RcJ¶ ªçÔm¯‹¿zÎãT=ÿñ…ªx×}Ššõ¥Õ«^_Q«T£ú"°üœR×ooIÿK{K¶g–úþxøWH8Ý&©çRo«Ž{äw8±¤d\P…A[ײ¾cYÉ%cÇí*Ù~a,}TÍkâ® ùlIŠyk£’§×²Â¯XÔ¿ mñÀ¤Š“³³åÒ–6ÉmÚ´)ƒ¦OŸ>Õ Y5ÁÞÞ++=¹¹“aWÆÄ\«ÕF—.]ª²jг‹“Å X¥¯¹‡‡£G®VȪ1¡¡ªñm=­NV+ºÖ°æ\R{šù¸ !š(ŠR?KµI$’"//_|NÇÛo¿ƒƒ:uªñ8kÖ¬¡_¿~5ÎܺYY³f wÜqz¹* DÒ`òynÞA6M ƒžÏZñƹzVLªx_v2D¾¼Sõýÿ­f ¥ÇÁîoTswP·96Wßg\*ÕAQ3§üP~¬Êb‰ßËŸª:Ž"´Vк0Y@1ÂÑßo¬ßõäç¨}kÒÍÔŠ·_Ú *È⪊+ÇàïÉU·ÉˬøœüS}UDÂ!XñtÅûJ³þ_oO¿Pùu:ü‹ú’TËâã°ù<¢@á sÇr=:tt &­òò¶¬&­Vƒ»{ùÝ"!+<<œÈÈHŽ;Vm‰]U!ðôt#6¶ž–Ÿ®^^å¯Ei!kß¾}lÚ´ ƒÁP'1Ç˻ǎž©K¨&¥IÛ Wy*².]ºÄ† 8{ölµå¤UÒ¬™*bíÝ[}Û†âî»ÍAãAMl¨k1k,I#'66–ììlÆÏŒ3j5†Á` 22’>úÈÄÑY&ÙÙÙlÞ¼™™3gš;”åÂ… ?~œ¨¨(233IOOÇÑÑèܹ3ÎΖåë)¹µPe‹•N,|r¹2tßaemy ²Þ:8µ„NcÕ÷FÂŽ™jVð˜õèuõGÇÕÒ¸vÃÀ%@Ýv|Ñuâ™Db~R²á™¿•\f)ŠrÒÜñ\gjV)W-¶¶Ö4swáJ¢e¤å5oîVWy†nQiaDD›6mâèÑ£µž«¥¯—ÅXÀÇdzÒýZ­–îÝ»V,dÕWצ4ibKffÍ à냖¾U—þx{{3vìØb!ëÂ… µŸìÁ-GÀru…;î0w‡’¿ò›Î-ˆÁ`àðáÃèõz:vìXå ÄÅÅѾ}{lmKòÈËËãĉÅ_ÛµkWe¦K~~>ÇŽ#''‡ààà*KÙccc‰ÇËË ŸJWU-((àøñãdddЮ];š6mZé˜qqqÄÆÆâääD»ví*ø3228sæ yyyV9fMHOOçäÉ“´mÛ'§²ÙÜgÏž%!!777+Œ ¨Px0ÄÄĘ˜ˆ››οeË233¹óÎ;+ÜERR­[·ÆÓ³òÏ×äädNŸ> :t¨2›+))‰èèhŒF#ÞÞÞøúV¾êaBBçΣyóæøøøTÚ.??ŸóçÏ“˜˜H³fÍð÷÷¯°Ý¦M›ÈÎÎ.w¼DEE‘ššJÇŽiÒ¤I¥sCTT¶¶¶tèÐÁâ²¹EaûöíÌŸ?ŸÕ«WsîÜ9¬­­ñóó£iÓ¦ØÙÙ‘žžNrr2111(ŠBpp0÷ß??ü0mÛÊJq‰éÉ3òÌ™dú¿¼VqþênYJXoD¾ þw©â‘OoõUš+ÇUQ«¾é0²Ä{ Ôòµ oÕÿ¼I P¿LÉOÍ!¶ ÚôFó A¡‰i U:t¬øfÉ´¿ÁX\]]6lS¦L©ué@ Oš6u¨U_SÓºM ìšT¿"b‘õâ‹/ÖªÜÔì³ö,c=`NKÛ ÖÕ7¤DÈzüñÇk?á¤Iàî^ûþ¦ä¥— š/"ù€eü§—˜Œ•+WÒ²eKºwïNHHÞÞÞüòKÙ´ó©S§"„`ÅŠøúúÒµkW:tè@AAƒ©S§âââBçÎéÓ§;wÆÍÍ­\¶KïÞ½ aùòå´hÑ‚.]ºÐ³gOÜÝÝùðÃËŶ{÷nºv튯¯/={öÄ××—nݺ±ÿþrm7mÚD@@ÁÁÁ„‡‡ãááÁÔ©SËeŸ:uŠˆˆ|||èÝ»7;v¤U«V,]ZvÍììl&Nœˆ››¡¡¡ôèÑ777xà’““Ë´µ±±AA^^Õeõ?þø#Bd§B IDATæÏŸO@@=zô yóæ\º¤>•Þ±c;wÆßߟ޽{Ó¶m[:vìȶmÛŠÇ8p`±ÔÌ™3BðôÓj ÂâÅ‹ñóóÃßߟððpñóó«påÙÕ«W·½þütëÖÀÀ@ÂÃÃiÞ¼9£F*·øGrr2£FÂÃÃððpBCCñðð`Ö¬Yåæ:|ø0¸»»Ó£GÂÃÃiÕª]ºtáÈ‘² )))Œ1ooozõê…¯¯/£G&3³|åòÂ… Ëo@@þþþ¬Y³¦ÂãmÛ¶-­Zµ*Þ¶yófÚ·oOPP={öÄËË‹ÿþ÷¿åú\¹r…#FàééIxx8!!!xxxðå—%KÏe·:´Â1qss#77·ÂýuAQþøãBBB¸ýöÛ‰ŠŠâå—_æØ±cdffrìØ1¶mÛÆÚµkÙµkgΜ!++‹­[·2jÔ(–-[F»ví¸çž{ؾ}»Éã“4nE¹b02òÛ݈Ÿš;š[˜”h˜ÝE²Ž-P½.íQÍÆW> ÿí9 °‚÷™5ªwUìVØó |®®²'‘Xÿ· eåiŒ9F†(Š’U}†GCvõ!`yzºáée~ãØÖmZÔXPruu­òI{utêT뾦B¯×Ñ¡CÍDDN‡««k­çlݦNNåËöšvíÛ`cS3Ò£.ÆçMšÀ[ðÅמyÆÜQ4>ô‘Ö-Çĉ¹óÎ;Y»v-óæÍÃÚÚšG}”%K–”k;aÂz÷îÍøñã2d†É“'3sæLî½÷^Ö¬YÃÖ­[™9s&z½ž©S§²k×®2cœ?ž1cÆ0|øp-ZÄ_| ¯¿þ:ëÖ•”6:tˆÛo¿Ó§Oóé§Ÿ²iÓ&¾þúk¢¢¢¸çž{¸|ùr™q|ðAúôéÃòåËùî»ïðòòbæÌ™eĸ¸¸8úôéî]»˜1c7nä—_~ÁÖÖ–áÇ—9æwÞy‡¹sç2iÒ$"##Ù´i?þ8þù''N¬Ó9å•WðööæÉ'ŸdàÀx{{³oß>ú÷ïÏ… øüóÏÙ¼y3³gÏ&))‰°gÏÞ|óM>þøcî½÷^,XÀSO=ÅéÓ§3f ÎÎÎ,^¼˜;vðÝwß‘––ÆðáÉ‹‹+Ú5k*̾š2eÊÿ³wŸaQmÇÿKïXAÁ€bWìXPl¨Q+jì‘hb‹±Å“Çšh|cb×ÇFcÅŠ 6lņ‚ Ò–vÞûŒ3gÎàææFóæÍ™5k–JÛèèhñ÷÷gåÊ•\¹r…S§NññÇsëÖ-ÆŒ£l›Mß¾}qsscâĉœ={–íÛ·ó×_ñÛo¿©Œ3$$„Q£FQ­Z5:„[¶l!..ŽÁƒ+ƒ‚ùÍ7((ˆÞ½{ɺuë8þ<Ó§OgêÔ©jIÞSRRpttäðáÃLž<™³gÏrðàAš6mÊŒ3Xºt)M›6¥]»vœ:uŠW¯TWæ_½z•û÷ï3zôèÏ9ˆƒƒ&L {÷î<~ü˜3gÎ0uêTš4i’o¥iìíí™7o·nÝÂ××CCC:wîÌØ±cyùòe‰ŽSx¿I’t![bêÄ£Röñ{å=š±¤çpq1Û:ç­ö¹ä’2Ê(…©ßfØÞ%ççäôœäð‚Plº‹.JRf6C%©b%nÙèRJ+½;Ø·ÄÓã ©ò´Òé ÆÆ†´nÓ¤Ìûµ°¨N£Æu¸ò¸ÌûÎe×Ö¶P«¯J’††]ÚàéáMfFf™öË̬ MЏ+S§‚—¸¹•}ßÚÚðçŸ`,â(eN‰TÊ?r+”¨#F°sçNåclmm™7oƒVٮסC• OBB;wî¤M›6ìÙ³GÙ¶S§Nèéé1eÊ|||hß¾½òœ¸¸8V¯^Í_|¡cÆ V¬X¡|Ü Aºté¢ –|ÿý÷DGG³ÿ~† ¦lÛ¯_?5jÄÌ™30`ZZZœ:uŠ5j°zõje».]ºMZZÙÙÙÊm‡¹• «R¥J\»vM%1cÆ 233ñðð ]»œòå;wÆÑÑ‘fÍš1{öl._¾L—.]011QÎ1w6l ##ƒ… 2pà@åó¥§§ÇÚµk¹wïµjådPŽˆˆàÎ;ÊÀËë>üðCÜÝÝ•Ïe·nݰ¶¶æÔ©SÊ|YëׯçöíÛÌŸ?ŸE‹þ^u?`À:uêÄ?üÀ„ °°°àÏ?ÿ$66– 6(WŠôéÓ‡;wîàççGrr2FFF;vŒ«W¯2mÚ4Ö­[§lÛ½{wš6mª² ìÌ™3dff²hÑ"œ•óÕÕÕeýúõ„††*«k†……q÷î]~ùååù‹-"--sçÎѱcGe?5kÖdúôé*÷díڵܹs‡E‹1ÿµ/q €½½=‹/f„ ˜››ãêêÊ”)S8xð Ÿ}ö™²í®]9շƯvÏßņ ˜3g½{÷&$$++«b_«M›68p¦NJË–-Ù½{w±*6 B^$IÚ¤©)3²OZ¸c°LcT³ò‘ ï›_þBš{F’$‰‰’$¹—÷xÞF•l ÷¿éééйstu˾’¡>]ºÚ¡­]>Y›·hôÖüS¥ÛwC¬k—OùwccC:vj™ï7›¥©R%c::´Ê7L©’Éà·ß ˆåÚK„¶6lÛÿû°!”1 $rþ/þEæÎ«ò¸N:ôîÝ›{÷îqÿ¾ê’ÿáÇ«<655%11•ÿ$IBOOÈ r½éÍ-V-Z´r‚[³âåüùóÊ-†¯5j¾¾¾*Á@e%@»víÉdDEå”ÎÎÎæÈ‘#Ô¬YS%x9¹¤† Fxx8~~9åË---‰ŒŒdÖ¬Y„„„(Û>|ww÷w*Š1pà@•àUdd$ÞÞÞØÛÛ+ƒW¹êׯO·nÝðööV[ôºÜàÔ‚ ðôôTÕFÍÕ«WéñZÎ@OOOtttò LŒ=Z幬Q£µk×VÞGÈÙª(“ÉÔ=ZZZ¸ºº’žžÎ©S§€œÀ\TT&LPi›œœL•*9Egr_#îî9ï#ßlkeeEÿþýóœïüùó9}ú4 …€±cÇrõêUºuë¦2_===å±Ü@a£F”Á«\'NDWWu¹þ¡C‡òœ¯¶¶6ãÇG¡Pàááä„õõõÙ½{·²]zz:ûöí£E‹ÊíŸï*;;›/¾ø‚/¿ü’5kÖpäÈ‘w ^½ÎÞÞž«W¯2|øpz÷îÍŽ;J亂•%-ÉÌfÖ˜CRöâ HÙRyH„÷"¦ºKÙ_Ÿ‘²$‰’$m/ï1¤Ô£;UªšÒ³—=/ø’œ\6Û(+W6¡KW;ô ôʤ¿¼hhÈèèЊ¿BCÃʤOMM ÚØÙbS·|ëñZX˜ÑݱW.û‘–Vò9-òR£F5:un]nKLMáÌ;¶ìVb™šÂ޽ЧOÙô'ï™L–gÒæºusòü=zôH%‰x:ê9÷ôõõyñâ;vì $$„»wï¢ÜBõfÕW åʘ\¹[ºsƒÏž=#===Ïþtuu±³³S;þf¢q]]]´´´”y©"##IHH@¡P(ç÷ºøøœ¼ ÷ïß§}ûöüøã\¿~U«V±jÕ*lll4hƒÆÁÁAíü¢xs^¡¡9Û+ò[tt4’$ñàÁªUË;eAß¾}qvvÆÝÝ>}ú`hhHïÞ½4h PIïééI§N02R_P™WÂv¢££UÆ+“ÉTVÖå’ËsÞÿÊùæ®R˯ƒƒƒ²P@LL IIIy¾¾ôôôÔ’ÆçÎ7¯×Ý›ó­T©ƒfÏž=<}ú+++<<<ˆ‰‰áûï¿W;¿8$Iâ³Ï>ÃÍÍ OOO:wî\"×}®®.«V­¢Q£FLœ8‘ŒŒ &NœXâýï'I’ÖÈd²§K.Iœ}ŒÎ®!2+Ó‚ÏA(ŽàhqPJ!1Kbˆ$I—Ë{L…Q&ŸöM qê×™àG„„<"+3«à“ŠAGG›¦¶õ¨ß 6å° ç 2™ŒÖvMøÀª~~ÁÄÇ%–Z_––f´jÓ#£.)YLÕªW¦¯sWo‡òðÁS²Ké«$}}]Z´lDí:–¥rý"ÓׇƒáðᜄêKi©†Œ?ýï’¿K5Ê•R¯Ëë ¶2r¶åýôÓOdeeaii‰3fÌ@__Ÿ¯¾úJ­½––V¹SSsª¼¾^å° ]378fnn®Ü¦˜—ÜÊx­Zµ"44”ÇsôèQΞ=˯¿þʯ¿þÊÀ9xð`±sH¾ysÇfcc£¶âìuyUÌ¥¥¥Å±cÇðòòâÈ‘#=z”C‡qèÐ!ªW¯Ž§§'­Zµ"33“³gϪl¿|]aV§¥¥addôÖûزeK 'pد_?üýýÑÒÒÂÖÖ–¶mÛ2cÆ öîÝ«’÷,7¹y^Ïû›•µ´´pwwWÎ÷رc¸¹¹áææ†™™§OŸ¦E‹dddpîÜ9þóŸÿªŸÜ¾rS¹ó566~ë|_/ ãêêÊŸþÉŸþÉ7ß|ÃüŽŽ£FÊ÷ü¢X°`ûöíãüùó´mÛ¶D®™ŸÉ“'£¥¥Å”)S¨Y³&ýúõ+Õþ„÷‡$IGd2Y3ßgìo°Fj=¿›LsVЯX…=Aø‹Oƒ%%iõU$- ¼YŒ•$)ºà3+†2[®¢©©‰m³ú4jT‡ˆgQ<<%•yj±ZZZšêcdd€e-s,,ª—ËÖµ‚T7«B'bcã I\\"©ò4ÒÒźžL&C__}ªU¯Œ•uÍ ¸zŽŽ6mìšbÛ¬>áá‘D¾ˆAž’Š\ž¦V«°´u´10ÐÃÄĈZ˜cn^­B+Õ  À¥K9«±‚‚ "Þ¨ÒUh::`aVVàèC‡æ<¡Äeee«¶ªçÉ“'ÀßÛ´òãííÍ?þHÆ 9zô¨Êj®X™//Õ«WrVbåeÛ¶mynÿ*èš³iÓ¦BS¥J&L˜À„ HNNæÄ‰,\¸£Gâîîžoµ¹¢Ê-®accSè±åE&“áè舣£#«W¯æÆlܸ‘íÛ·3oÞùäµ­ˆçÏŸŠžàÀ‚ºuëríÚ5"##U>hß¼y“ &0eÊ”"°ŒŒŒhݺ5þþþøûûÓºuk•ß4ˆÀÀ@<<<¨U«mÚ´¡V­Zœ>}ZyþG}Ä«W¯˜:uªZ•»wѤIªV­Êùóçyñâ5kÖTþ.++ ;;;222ðññÁ8ŸÿùÏ”Aœ  “ÉhÛ¶--[¶dçÎÊ` ‡‡5jÔPæ+ŽÎ;³cÇ8ÀèÑ£U~÷í·ß²k×.6oÞŒ“““JõÄ׃WIIIÊßå¾Fzõêž}ûpwwgÖ¬Yʶ™™™œ8qB¥Ÿyóæ±~ýzüýý©[·.2™ŒvíÚÑ¢E vîÜ©|~<<<°´´ÄÖÖVy®®®.]»våÒ¥KDDD¨j}||ˆŠŠR `uéÒ…]»váææÆG}¤2ޝ¾úŠ={öðÛo¿ñá‡9µqãÆ±dÉV®\IZZZ‰$oOLLä“O>aöìÙôíÛ÷­mSSSùæ›oX¿~½Ú6ÞíÛ·3wî\-ZÄ—_~Y¨¾-ZÄåË—™8q¢òß„ ”I’$àw™L¶?=‹‰‹/Hßüç<æ=ë’åÒX¦Ý­64(~ápAÞ’w^‚Wì»#¥ûD ­«Áã´,–»%I*ú·©@9& A„Šé«¯¾¢Aƒ´k׎˜˜ÆÏ«W¯X½zu‰ÊsWÉœ:uŠiÓ¦ahhHFF›6mbË–-@ñX“\~òäÉŒ;–;wR³fMÂÃÙ4iÚÚÚj‰¾ ã›o¾aèСŒ9’Ý»wcggGzz:?ÿü3G¥S§NÊ9Y[[ãééÉš5k˜6mšššDDD°mÛ6444T =šŒŒ öìÙS¬ÕÑZZZ|õÕW|ýõ× :”;vP¯^=’““™3g7oÞäã?Î7x9ä™üðÃw*2gÎöîÝËŒ3055¥oß¾Èd2öìÙï¿þJÕªUéÚµ+ó ÂÍÍO?ý€çÏŸãêêªÌ“–»etäÈ‘üøãÌŸ?ŸZµjáââBZZ3gÎT+(;ßI“&ñÇP³fM222X¾|9ÙÙÙʤõùÍ÷Ûo¿åüùó 8ýû÷S·n]îÞ½›çëjΜ9ìß¿ŸéÓ§cllŒ““»wïfÕªU˜™™©å¡?~ RÉŠAñðZò ´õµ WdqD’Øš)]+ï1¾«R `EFFòìÙ3êÕ«§–£áŸ$wõë×WI~ú>¸ví×®]CWW—.]ºÐ¸qã"Ÿ’’ºuë2dÕ«WçáÇԪUK¹-¤"‰%,, kkë|“$))‰ 60tèÐ<çzðà ´jÕê*v ‚P:ôõõéÕ«íÛ·ÇÜÜœèèh²³³™>}ºZŵ¼899ѯ_?Nœ8A­Zµ°²²"<<™LÆÖ­[™2e ÁÁÁÅÛgŸ}Fhh(+W®ÄÂÂ333¢££ÑÓÓcãÆ´iÓ¦È×tqqaùòå|ÿý÷´mÛ–*Uª —ËIKK£yóæär9úúúŒ9’={ö(Çàââ”)Sظq#–––*óuttdÁ‚DEEg.¶=z°jÕ*fÏžM½zõ¨Zµ*±±±ØÛÛÓ¬Y3å¶FÈÉçµk×.&L˜@¿~ý”Ĥ¤$,--9yò¤Z>-ºvíÊ… 3fL±ó¥åŠŠŠbãÆìܹ3ßu¹<¨¼jР666øùù©$ä_¾|9ýû÷§S!* 7lØiÓ¦±páB\>•…=I’²¯ÿý “Éê&)hãÿ‚úþ/°ÖѤš†ŒJZš”}éwA*„Ì,Ò2%2³ˆž÷kòt)ïÜÿP2@bÿçí·ÒÛ¬Çw§VEfÙ²eÌ›7ãÇãììüÎ-/K—.å»ï¾S~[ù¾X·nʵ­[·ù›}www  ,Á>pà@~ùåfÏž]ÒÃ}güñãÆ+Ö>ž3gÎРAµ­€œ?ž§OŸbjjŠƒƒ½zõB&“ȺuëøñÇÕ¾À "88˜nݺ©åj:sæ r¹œª‰‰ÁÍÍ{÷¯OãÆ:t¨Z€%&&†½{÷òàÁLMMéÚµ+ŽŽŽ¼zõŠsçÎacc£”ËåìÝ»—ÀÀ@ÌÍÍ5j …‚›7oª½ýüüðòò"<<SSS:wîLÏž=‘Édܺu‹ 6°lÙ2•-¯{øð!û÷ïçåË—´hÑ‚‘#GråÊ233Õò„EGGãææFhhè[ç›+88˜   T¶…Gnâöààà¿êÒ¥ —/ÿ]`é믿fÙ²eÈd2âããéÖ­·nÝRþ~ܸqìØ±£Pãxöì666;vìò¨Ö©S§Ø0®YÞÅ:?9Ûé`'I’_‰NAJ‘`à} `5iÒ„G±wï^lll°¶¶.òJºÏ?ÿœK—.qûömŽ;ö¯`Mž<™k×®ðÖv"€UqˆÖ?Si°Aøg$‰ºuë2mÚ4æÌ™óÖ¶r¹œêÕ«+«(ZXXðäÉ•`¿ýö'NT>¶³³Sæ#+ ôôôؽ{wgRt"€%‚ ¼¯D,!Oaaa´hÑâªIyzz–X5ªOOOµD¶ÿvr¹œ»wïJBBñññcddDƒ hÔ¨‘ÚÊAAxW~~~„……1räÈÛÏÝ»w ";;»Àí‹ù­NËÏÈ‘#quu%##mmí"+‚ Bá”IëÑ£G?~œ´´4ìììpttTÉÉ;whÛ¶-AAA\ºt‰–-[Ò»woe»àà`.]ºDll,:::4kÖŒž={ª¼¹wïáááôèуððpÜÝ݉§Q£F8;;çYÖZ.—sâÄ =zDåÊ•qppÈ·ÂTTTG%&&†Úµk+s\¼I¡PàááAPPºººØÛÛç[ÊÇÇ???±²²¢OŸ>j[|}}IHH mÛ¶o]•œœÌÕ«WiÔ¨r¹œÃ‡S»vm¬œ{BB'OžäñãÇâèèH³fÍ”×xðàaaa( RSS9{ö,&&&´k×Ny<==‰ˆˆÀÈȈN:å™såÁƒMDDÆÆÆ888ЪU+µvwïÞ%,,Œ>}ú¨‹‹ãرc<þ\ù:ÌOvv6çÎãæÍ›deeѪU+zö쩜oZZW®\¡råÊyÞó‡òøñcZ¶lYì<^…Ξ={8~ü8ׯ_'==êÕ«S½zuôõõIOOçÕ«W<{ö š7oŽ““£G¦iÓ¦¥6.AáýáååEÓ¦M±°°(T{mmmš5k¦òž'Wvv¶ÚÊ©¢®ºwttD.—ãëë[¤J ‚ ‚ ^©°öíÛÇÈÌÌD’$²³³éܹ3GŽQ~»uöìYÆŽË¢E‹X¼x1YYYhhhFåÊ•;v,GŽr’ëæVæ±µµÅËËKùa}ýúõ¬]»–mÛ¶1eÊ …r 6ÄËËK%ß‚»»;ü1¯^½BGG‡ôôt444˜={6?ÿü³Ê<Ž=ÊСCIJJRÄ  IDAT«U«gÏžU)“~ãÆ †ª\šž••…$I899±gÏe*55•Aƒ)Ë.çÎËÐЭ[·2bÄå5gΜÉ_ý…:tÈ÷^?zôˆ^½z1wî\¶lÙB||<'OžÄÉÉ wwwÆOll,ÚÚÚddd “ÉpuueãÆèèè°uëV–/_ääéÕ«mÛ¶åúõëlÛ¶iÓ¦‘––¦ò< 4ˆ}ûö©===100ÀÁÁAeŒ4lØP¥Ôº±±1‡VVG‚œ É!C¸qã2™ MMM233iÑ¢‡Vɵâíí͈#ˆˆˆ@KK ™L¦œÛ¼yóøá‡”mŸ?Nÿþýñ÷÷GSS“ììl¾þúë%%…›7orñâE:IJeËèÚµ+óæÍ{¯¶ã ‚ %ïÆØÛÛ¿Ó5îܹCHH7nÄËËKy¼iÓ¦Lš4©HתR¥ 7,AA(E¥^m×®]|ùå—¼zõŠØØX¦OŸÎåË—•e£_·téR>ýôSöìÙÃÏ?ÿÌ|ÀþóŽ9ÂŒ3HHH@.—ΠAƒ¸sçkÖ¬Q»ÎŒ3øù矉ŒŒäÉ“' >œ{÷î±téRe›†ŠŽŽ'Ož$99™ÈÈHzõêÅŠ+صk—Ê57mÚÄW_}ELL ÑÑÑL:•ˆˆ¾ûî;e›/^àääD||<þù'ÉÉɼzõŠ… âééɘ1c”m7lØÀéÓ§Yºt) …¹\ŽLš4‰””e[;;;zöìYèTkÖ¬¡S§NìÙ³‡ùóçÓ«W/nݺÅСCÑ××çôéÓÈår"##™8q"Û¶mã믿r¢¾zõ MMM:vì¨L&ËÔ©SiÑ¢OžÖ­[Ž\.'&&ÜÜÜÔ^8::*ƒx?æ£>ÂÈȈsçΑ––F`` ™™™øøø¨œ«P(èׯ¬\¹R9ßß~û{÷îѯ_? 2™ŒñãÇ“˜˜ÈñãÇU®ñôéS.^¼HÿþýK|õU\\Ÿ}övvvÈår¼¼¼¸ÿ>K—.¥W¯^y¯ éÔ©óæÍãÆbmmM¿~ýpvvæñãÇ%:NAáýšç*ë¢;v,ÇW ^uìØ‘Ó§OcddTäë5lØÐÐÐw“ ‚ ù+õÖ€øá‡011¡R¥J¬Y³†îÝ»søðaænIƒ·WÕÉ[^*UªÄž={˜0aÛ¶mcÛ¶mðá‡2jÔ(†ª¼—Ô®][%7X®¼¶Aæ¾qÌÊÊrî# –=¿±&$$ðßÿþ—3gÎÌ‹/”¯‘Üç(÷x׽ù:­R¥ þù'Ÿ~ú)[·neëÖ­лwoFÍ!CTæ[¯^=êÖ­«<ÿùóçÔ¬Y3Ï*Goö•;ß7Wå7_WWWΜ9Ø4i~~~óÕW_XU©°vîÜÉ'Ÿ|Š+˜9sf‰\óu|óÍ7ØÚÚòÑG!—Ëùé§ŸJ¼AáßISSSù¾¡8*W®Lbb"ÆÆÆ¤¤¤0sæL¶nÝ ä¼_èß¿?ÁÁÁEª¤›™™Yb‡AAPWêe_ðäJOOPË”Wu¹™3g²fͪU«FçÎ7n­Zµ¢aÆԩS'ÏV^×yL&C’$222 =ºd2&&&Ê7@yÉ rhiiñÛo¿±|ùrŽ?ÎÑ£GñôôdåÊ•lÞ¼™¿þú‹æÍ›z|¯{óÍSîØ4h ’ÐüMõçììÌóçϹ|ù2GŽáðáÃ9r„#GŽ0wî\þïÿþÈIhžßJ ÂÞG€-[¶ä›÷+7ÏÓË—/i×®Ož<ÁÖÖ–кukÚ·oω'Tr”åÞ—¼ž÷¼^3¤_¿~\ºt‰£GrøðaåÏ7ß|òeËòon’ü¼¼ÙWî|ûí·<«Z*+ò D¥J•ؽ{7“&Mâ?þò^íXLœ8‘µk×2eÊ”¹f~œ9vì}ûöÅ‚/¾ø¢TûAþLLLò\…_X:::Ê/º ùïÿËõë×¹}û6ÄÚ¶m›2¥Ea$&&*Ó]‚ ‚PòJ=€õôéSåv¹\¹ . ZeÆÚµk©S§¾¾¾T­ZUù»   àï`XQYYYñìÙ3222ÔªÂÍ;mmm•¤ï…Q»vm|}}éÖ­›ÚÀW¯^£\}Kxx8-[¶ÄÕÕWWW’’’X²d ?ÿü3›7ofݺuÅšÛ›LLL¨R¥ ÑÑѸ¸¸(·cæzþü9 …"Ï•I¹ÒÓÓ ¥V­ZtëÖnݺ±råJ®\¹‚³³3k×®eÙ²eDEEÈ¢E‹Š=ÞÜ{T·n]µmkr¹œÇS»vm 'aý“'O˜3gŽ2/Z®ßÿ]9v §Zã›×}3Fzz:÷îÝÃÊÊ GGGYµj—.]¢ÿþ¬Y³†ü‘ˆˆ‚ƒƒÕVÙØØpåÊRRRÔriæIØsŸ¾(ÖÏÇáÜ ¼÷•»åªÏ-ä)©å=M5ÉÉrþúß}¼or'ð>†{ÞOžs7äþ7‚•÷3üé‹òžf‰HˆOâòÅ=|Ž«>·ºsŸÇ"Š}¯Âžü?ß Ü]àŒç_<]ÞÓ¬P5jÄ;wŠt΋/¸zõj¾ïñÞÜ.˜û£°‚‚‚òLŸ ‚ BÉ(õÖñãÇ™:u*7oÞäÌ™38::Á‚ òÝ–ËÖÖŽ?Ž››ÑÑÑ„„„0{ölV¬X––©©Å{Óÿõ×_S¥J>ÿüsV­ZÅ­[·pwwÇÙÙ™LƼyóŠ|Í/¾ø+++V¬XÁ_|ÁÕ«WñóócÊ”)ìÝ»—öíÛ+TS§NE¡PЯ_?N:EHH'Nœ`êÔ©hhh0fÌåuûôéC•*U” ‹cÁ‚˜˜˜ðÅ_°xñbüýýñööfذa\»v#F¼u áàÁƒ177gÁ‚¬^½šÀÀ@nܸÁôéÓ bÔ¨Qhiiááá½½ý;-¡sæÌaÑ¢EpðàAœœœÔ¶º¸¸`ooÏñãÇ6l.\àöíÛ,Y²„%K–`eeŤI“TÎiÛ¶-¶¶¶ìß¿}}}FŒQìûËÍÍ ///¶lÙòÖYYYôíÛWŒ}›M›6ñù矪ÿ_ý¹\žg Oʃ<%•³§}¸ês‹ÄÄäRé#++‹°ÇÏ8qâ¡÷ÂJ¥âzÈI÷K<}ú"ÏT%!>> ï+x»FZš¢Tú(m’$qëæ]üðCX¶l_~ùe皘˜ðÇ```ÀСC133£I“&œ9s†³gÏÒµkW‚‚‚Tª±V­Zµ8þçÏŸ§C‡,\¸Ö­[3lØ0š7o®œ„œÜ\'Nœ`øðḹ¹Ñ½{wZ´hÁüùóéС§OŸV[9«° 'àWP0¸ ’$±páB¦M›F£FÞÚvÛ¶møøø¨kß¾=óçÏ端¾RiȵeË®^½ZàLLLX²d ¿üò‹JÅPA(‰ Éœ=ãCLL\ÁK@Vfþ~Áøß.ѸE%I¾×ïpûÖ½R \½)**–3§}HJL)“þJJvv6>Þ7 ~TfÏÙ³ˆ(ΞñA.+U{öìIttt¡¶ùÙÚÚ¢Pü$½pá‚ò=B®û÷ï³ÿ~•c]»v-ôxNž<‰µµ5õë×/ô9‚ ‚  ˜Ô-¨iÞô6ëñÝÇßñý÷ß«OKK#55 W®\!33“:¨UÂKOO'%%tuuÕúHNNæöíÛDGGcccƒ­­-2™ ¹\ŽB¡ÀØØ---åcµU. Ê$믓$‰Û·o†¥¥%M›6E___mFFFj¹²’’’ÈÊÊʳBÍÇ AOO† òÁäyÿ&** sssìììòì'33S9Ïüdee‘˜˜ˆ®®.ùç˸sç=ÂÔÔ”Æçù c\\ZZZjIų³³ !,, MMMZ·n­o¦¦¦jù¹"##¹yó&ÙÙÙÔ«W/ß¼iOžŒ(³¾= /³¾ òêU ¥Sõ±$<~TvÏKAž?Yn¹Ú*бcÇÒ¶m[ÆOVVVí­¬¬øïÿKtt4aaaøùùFLL ;wî,Rð***Š)S¦0{öl7nü.ÓA¡DKÞs÷îÝ+±7Þiii :”””¿«‰Y[[Ó¿ÿ"]§aÆ„††–Ș¡°*Úֵ藯ÈÌ,øù»R(Ò‰{•XêýÅ‹ç/Ë{yJJL!9Y^ÞÃPù"¦¼‡P®d2;wî$00P%÷baγ¶¶¦uëÖX[[¿µHN^ #FŒàƒ>`ñâÅE¶ ‚ Å ¶ Â{.22²D¶ê) † †··jRÙ5kÖ £S´ªb|ð/^¼xç1 BQDGÇ•÷Tdgg¹yÕRí'::®LómFttçüÿtùòeåq'''ŒŒŒ <ÿÑ£Gøùý›ÚÅÅE­Êm^•ù÷ôôôÔ‚ï/£_v eæåËWÔ±©UÞÃ(WVVVìÛ·~ýúaffÆ—_~Yªýeff2nÜ8îß¿Ï_ýUä¿q‚ ‚ ` Â{.99¹Pß&55•Áƒãéé©r|Ú´i 0 È×322")©âlåÞ©òŠ·«,ÆT· åÎ; €áÇ+ß»w xþéÓ§™2eŠòqZZºººž·{÷n–/_€¹¹9‘‘‘*¿O«ˆ÷*UQÞC¨zöìÉ®]»=z4ñññ,Y²™LVâý¤¤¤0bÄ®_¿Î¹sç°²²*ñ>AAÈ›` Â{NCCƒìììbŸ/—ËqvvÆËËKåøøñãY³fM±®™••…¦¦f±Ç$Å‘–Vñe1&EZz©÷QTñ¹H«€÷JQAïUy6lÆÆÆ >œ›7oòûï¿¿SUÜ7ݹs‡>ú…B··7õêÕ+±k ‚ ‚P0‘KÞsÆÆÆ$&/ÿ\.§ÿþjÁ«ádzuëÖBmÙÉKRRÆÆÆÅ:WŠKS³âýI,‹@nqÿ–& ø\€¸Wÿ}úôÁ××—ˆˆ7nÌÖ­[ •Üým’’’øöÛoiÓ¦ 7æÆ"x%‚ å \V`)éDFÆ’œJjjÙÙÅ˽¡©©¾¾FFÔ¨Y m튽 L’$b¢ãˆ‹O$U®x§2ÜzzºèèR½ZeL+ýû>èK’Ä˨W$$$!—§‘‘‘Y¬ëÈd¹÷J3³*–ðHÿù>øàŠ|ž\.§_¿~\¸pAåøØ±cÙ¾}û;}ð Û2„2§§§[áVØèé¼ííûÐ+ý>ŠJÿcêÑ£QQQÊãU«.Øøñã2dˆòqa¶üç?ÿaöìÙ@ÞÁÊ|¯„¿5lØëׯ³råJfÍšÅòåË™5k#FŒ J•*…¾Î£Gؾ};6lÀÐÐÝ»w3tèÐR¹ ‚ oS¦ŸW¯¸s;”ÈȘb­ò£©©…¥Íš7À¼h†· IDATÄäÝòù”´ÌÌ,B‚òèaD©ä155¢AÃ:ØÔ­U*ùÊRzzÁAxüè Eɬ\Å”Fë`mmQâ×þ§jÔ¨‘Zdgg3tèPµàÕgŸ}ÆÆßy•Bppp¡òÜBI222 >¾bå^32Ô/ý>ŒJ¿¢242@GG33³"Ÿ¯§§‡žž^Ñû54ÄÐ0ÿ/:Œþ7®ŠÄ°Ž©"ÐÑÑá믿f„ ¬Y³†Å‹3kÖ,ºvíJ×®]iÕª666T«V MMM /_¾äþýûøúúrîÜ9üüühÔ¨K—.eüøñ…„ ‚ ‚P:Ê$€•¿_0„—Z¥£¬¬lŸFò,"ŠFmhÖ¼A…æ¼xïõ@䥘ˆ7!!ßë<|ð”[T¸^a=}ú?ß R \åŠ{•€÷MÜн} ÊàÃaEgooÏçŸÎóçϱ°(\`oÉ’%œ:uJåØÔ©SY·nÝ;ÿ»“$‰Ë—/³råÊwºŽ •yÍjDDDܰŒèêêP©²I©÷S­ze´´µÈ,æJ×ÒP³fõòBžÌkTE&“U¨ª5j–\ާ£jÕª±xñb,XÀ¹sçðððàðáÃüøã¤¦¦ªµ¯T©Í›7ÇÙÙ™M›6Ѻuërµ ‚ y)õVffW.ûù"¦´» ;["8è! ÉtìÔ²\A‡=~Æõk·K|µY~^½Jàìi:wµ£zõÊeÒgI ½F€H™}(ˆ~ùŠ3§ÿ¢K·¶T.ƒˆYË–-©V­žžž¸ººØþÊ•+,^¼XåX5pvvæÜ¹syžcjjJÛ¶m 5___^½z…££c¡Ú BI±°0Ã_\a‚–feòEŒ††5jT#"<²àÆe@CCVaƒ2::ÚT«^™è—¯Ê{(hiibfVø-qï3MMM>üðC>üðC çË’çÏŸ“@jj*ÆÆÆT®\™êÕ+fðTA„R`I’Ä¥‹7x[šÝäéYD×|nÓÑ¡U™÷ ÉUŸ[eÞozz/øÒ»w'ŒMþùž>Çß/¸ÌûMMUpáüuú89 oPô­&ÿ >œ]»v*€µhÑ"µª…‘‘‘ôíÛ7ßs:tè€O¡Æ³{÷n ½LJŠ¡¡>µj™^A9 Ö.³¾5®SaXµëÔBWW§¼‡‘¯† kW˜VýµEÅÖb’ÉdXZZbiiYÞCA¡J5€xû~¹¯r=}ú‚ê¡U¨ßÀºLûMN’síêí2íóu™™x_ñ§WïŽþm\\"þ~AåÖ¿B‘Î_Þ8öìP!¶œ–WWWÚµkG`` Íš5Ë·]HHgÏž-µq$$$°cÇV¯^]j}ÂÛ4µ­GDDT¹¯Âúàƒe²}0Wµj•1¯Q•¨Èòû› 9õ&Më–ë RëƒT®lB\\ñª·–--M5®S®cáŸG&“™  :`ÿ¾ªP‚ Gø¿Ÿdà9p•$I}ï}+µVr²œà‡¥uùB»uó.VÖ5ËôÛÜ›!Å®šWRâ㓸ú„FmÊuñ÷ &++»à†¥(::ްÇϨcS«\ÇQžìììèÑ£?üðûöíË·Ý¥K—Ju«W¯ÆÔÔ”Q£F•j?‚ŸJ•Mhj[;÷Ëm zz:´¶kRæý¶oßOïRÍCX-VÈDéoê`߂ӧÿ"+3«ÜÆ`×Ö¶B¯T¡üÉd2 Ðmz“EÀF(0DBÐC m4‘ñnUxAøç“È$•lÿûI@Ú€$Ó–E#ñYœ.ÁRë[j¬ ;ÊýlÈÉÁuïnÍ[”ME³„ø$ž={Y&}$$øõê[£¥U1WaEEÅV˜mAwP»Žå{½ ëÇÄÞÞ///ºwïžgSSS>ûì³"_»N‚W ƒ+™™Y<}ò¼4.],<¥Yóúeœxðài…ÜAÎö¸gQX×®˜¹„>/ï!(%'Ë‰ŠŒ­°‰ƒËB»víøôÓO™8q"þþþ˜ššªµ1b#FŒ(ñ¾³²²puu¥M›6Œ7®Ä¯/E!“ÉèÜ¥ W}n—éß2--M:·Á¼FÕ2ëóM––ftíÞ–+—ýË´*aí:–´kß¼Ìú+ ¹_zü?{÷Õµ6pøw¦ÁÐÅ( *‚XP±±·D“|‰zÕ˜fk¢FÆk‰½IL1‰7jc‰%vQÄ ö‚]QD°Ò˜ïÂ\¦©ûY‹µœ3çì½Ï™q`Þ³÷û:x²H^@cª]§Mšú˜¬?Až ’$ù!c2ú¡@N#ä4B†' ÄÊÜãáfûÏÐ`Mpž å]®1B²’âÈfð£V«M2ÖPŒ2M41ážÙ—…=*++Ûd¹*âãMÇþiÌ1ƒ 4´Z- ·MS™²´âã-cæœ9-X°•JÅ!CÈÍ5ÝÒ˜)S¦püøqþøãzœ`9ògB5¢IS”J£ìÅÅ¥"A],"ˆîêú/¿Ü’—^2~5[++þÍвU#d²gïÿ¾G*t|¹…IªÙªÕV´jݘ¦~õÄç¤ :’$u”ÒnàÞôg Ö|’Ȩ ˆIí‚ ƒÐø%Ÿ!ñ2Uq`rnIriž$I®ÆèÖ(•'ZȲ°GÝI¼³sÑ%†”‘žIêÃt£öQV‰‰ù ySSSùôÓOuÛ‡ F³fÍJ<þúõëÌ™3G÷øÓO?ÅË«ä¼ZGeñâźÇsæÌ)4£'9é¡Yó¬'1ÁòÞ·¦fkk˺uëhÛ¶-~ø!‹-2ú¥ï¾ûŽ °yóf<þ®Ag9:ÙS½ºÞµª?óyœ^z©A]¸~í×®Ý"áö]ƒÎÄvvv¤º‡5½«›$˜*³A’¤ú(ø ‰ê“KG$ÜD¸J3p:mQqØÅHî3J’¤¹À—†Lþn”¿„2Ò3Ñl¹¤›`Lé–wÞYhµZÒÓÓùé§ŸtÛÛ¶m[ªVbbb¡ã TªÖåË— 7eÊ”B¬ôŒ¬Òž‚ÉdXàëgõêÕcË–-tíÚ•´´4–,Yb´œT3gÎdúôé,[¶Œ.]º¥A(/kk+|Ö·a-222IKË óŸÏV}(” llÔØÙ©-¾Rlµê®T«îŠF“KZZiiäjô[Z(Ijµ5¶vj¬­Ÿ¯Õ,’$QÃÓžîäähHKË =-C2™ µÚ[5ÖÖÏv€OÃ’$Éø‰Qx‘K$*·²¼ B©È? )Vœ61L†K’ôV«Ýhˆ.Œòa—™eyÁ‰LL²2-kFä/ÕËÊÊ1÷0ŠÈÊ´¼÷HVVZ­V,ÍZµjEXXÝ»w§]»v¬\¹’5j¬ý¤¤$†ÎöíÛY·n½{÷6XÛ‚`LjµµY¬››B!ÇÑÑGG;sÅâ)• œœìqréA0,I’Ú!g ¶8ñ rêaÙwAAx1I@cÀ;qeë%…´™\†”7?–QX2Éò*°Ê䯓d¡ù;d2 …BAýúõuÛœœœJu¬M¡ãllJWÞÜÑѱÐqÏà‘É,ï="âV…5iÒ„£G2pà@6lÈÔ©S9r$*•þ³´Z-+V¬`üøñ8::rèÐ!4h`ÀQ ‚ ‚ <_$I’!c ÓhôB& ‚`ñ¬€žHø#ñÝHâ´$I¯hµÚÃú6i”–µÚò–˜b¹‚%.‰Ëe¨TJœ9}út™¯_¿¾^Ç=õ8K\amm%f_=ÆÍÍ;wòßÿþ—I“&±páBÆŒÃÀ©T©R©ÛIMMeݺuÌŸ?ŸK—.1aÂ&L˜€Z­6âèAÙÙÙå Ö>/vìØÁo¿ýFJJ 5böìÙencìØ±TªT‰Ï>û Èdk4£-MAxžH’ä„‚ Èàudøš{D‚ eä ŒAÅ߸r”ý’$Òjµ?èÓ”Q¦ÁØÚZÞ—RSŒÉÖVmq[ÛÒ͘25K—%¾o-L&ãÝwßåÊ•+Œ1‚ àîîNPP_~ù%¡¡¡\¿~ôôü™™™ÄÇÇÎÂ… éׯ...Œ=š   ._¾Ì_|!‚W‚EzðàÇ/T„âEµoß>zôèÁêÕ«‰ŽŽæÂ… en###ƒŸ~ú gggNžLš4 €¼¼<6nÜÈæÍ›uË «U«Æk¯½F=Š´ƲeˈÅÝÝ#F——ÇæÍ›ùøãqww 99™Ÿþ™C‡qÿþ}ÜÜÜèÙ³'¯¿þ:r¹(Þ%‚å‘$©JBñž¡x<áK»ví¨\YÜtáÙrðàAØ÷ȆjÀ”|O?´ü|PšvŒV ®v¨TæOÐjk«ÆË«ªÉú³¶¶¢¦w5“õ÷4 Ô²¸œ\²··¥º‡›¹‡€oÃÚæ‚ fvæÌNž< Àùóç %55€¯¾ú –-[†$I¨T*ÂÂÂ1b½{÷.ÔÎîÝ»iÔ¨?üðééé$$$0dÈ Àܹs -¡4ho¼ñGÅÁÁ7n0~üxüüü¸ÿ¾n¿ãÇ3wî\BBBJ*pîÜ9V®\ɰaÃ8sæ 7näèѣ̜9“+W®ð믿êömÑ¢sæÌ¡oß¾¥:—ãÇãîîÎåË—9þ<ãÆ#55•þýû£ÑhØ»w/‡b×®]œ9sooo†Ê•+Wðôô$**Š>}ú°qãF¢¢¢ðòòbæÌ™ØÚÚrá¶mÛÆöíÛ9uêvvv|þùç…ÆŒµµµn¶Ypp°nöØúõëÙ¶mË—/gÇŽ >œ .°iÓ&"##9|ø0VVV…ò‘9r„iÓ¦áïïÏ…  áÂ… Lœ8Q+ðÛo¿qùòeV®\ɾ}ûظq#—.]¢cÇŽ¬^½Z¬A°’$I(Xux¢® //àMäh™-IRÑi÷1jd§zu7j×®aÌ.žÊ·a-³,¡S(ä´hŒBažå öö¶´lÕÈ,}—•µµŠÖ­›<ÈX ‚³#MýꙥoAž Œ;– àèø¿¥Æ*T W¯^ܾ}€­[·rãÆ ÆŒC‹-tûvíÚ•?þ¸P»wïÞÕµÿè²óQ£F±}ûvÞ|óMݶF1aÂÝl¦ÒøòË/ñòò¢víÚT«VU«VÏĉiÓ¦n¿êÕ«3þ|233K¬¾xïÞ=4 ™™™ºm^^^lÛ¶U«VZ¾cÇÚ¶m‹ÍÓsPº¸¸0räH¦M›VèwA“&Mðõõ%>>^·í—_~A«ÕòÝwßQ©R~ŽGI’˜9s&þ½wïÞ=Ý,:¥RÉÏ?ÿÌÞ½{ñôô|ê¸ALìc´tã_¨°¼bÝ‚ Æã "!g¥$IÕŸ¶«Qr`=ª©=TVJ“çÃjà[‹zõÍ——Ç©‚_nIxØ23M—ÌÞÉÉžöšYÄòÍÒªTÙ™͈&'Gc²~_z©mÛû‰<(‚ zõêÑ¥KºwïNïÞ½ (´_NN»víbêÔ©%޵[·ntëÖ ÈÏYÃùóç9rä—.]"''‡œœ”J%‘‘‘¨ÕêBÁÁ‚ó àĉºm}ûöeΜ9 >œ_ý•îݻӧO4h òö ‚`Q$IjˆŒù¼‚. ÈÍÍåÖ­;Ü»û€Œô,2ËQ`K¥RbccEgGÜÝ]P*þ5²\’’r+.‘´Ôt222ÉÍÓ/ç®\&Cmc…­­ îî•qt²7ðH +'GCÜÍ?”³³#MšúнG;‹^°µ³¡}‡f´ëÐŒªÕ\ 0Q©”Ô¬Y.]ðó¯ÿL¯ 88ÚÑéå–´iJ•*•É ûÅÂÚÚŠZµ<èÖ½-×Á+AJåðáÃtìØ‘Ã‡3jÔ(Ö¯_ÏÕ«W¹}û¶®²_ÁÒ9]"ñG%&&ÙæííÍš5k¸sç›6mbÈ!hµZ-ZÄ¿þõ/½Ç\\`F­V°oß>îß¿_ìÏãy¤§P(˜úõëNPPW¯^ò—º»»Ó Aɹ0çÎË»ï¾Knn.3fÌ 44”{÷îqúôiÝ2¿G¯obbb±ÕkïÜ)z£¨[·nDDDÏâÅ‹éÚµ+ׯ_gôèÑ,Z´¨Ä± ‚ ˜ÀdT§YëÛ¶ìåÂùkF ^äææqõÊM¶mÙ˵«qF飬rr4„‡Em”àU¤¤‡„ï2ùÊ‘§¹zå&[7ç¿Æ^hr4œ¹ÊÖÍ{¹}ûé7»LææMhÕ †7Nðª@d$´iï½9úÏhJÁ è…Ó%Iò(n“F;”JõxS¿7ii¤¥¦“žžY(÷GYÈrlmÔØÚÙ`cæDà%©R¥UªTB“£!%%ôŒL²ôœ&IÖj+llÔ88Ø<ÐcnÕª»R­º+99RRRIOÏ$[Ï)Ï’$C­¶ÂÆVƒƒ­¸Û.ÂS÷±xñb4 K–,aРA…ž»víYYùŸçÍšåß>ŽŽ¦}ûö…ö=zôh¡ÇgΜaÇŽ¼òÊ+xzzÒ«W/zõêEJJ uëÖeëÖ­äåå,G ®ßÇ—üݹs‡åË—ãççG»víŠ=>##ƒ•+WbooÏ€ðõõÅ××—©S§2nÜ8þóŸÿÊ;ï¼Cppp©óu-Z´H—ôþÑYTZ­–ëÿüA𙙉J¥¢yóæ¬\¹’³gÏR¿~ýBí<~}wìØÁ‰'?~<•+Wføðá >œÈÈHÚ´iÖ-[1bD©Æ(‚` ’$UEÆôB‰mùÛ»v5ŽÃ‡N’§çr¹²ÊÉÑpðÀ 222ñ©WÓ$}'##‹½aGHzb²>ãn&°;ô í›amm¾YhgÏ\âä‰ &ë/33‹ð°(Z´lˆG*%`,çÎA—.pã†iúÓjá—_ò—'®[væ-Øö\kD"žÿ¯>þ´Ù¦ëØÚª±µU›«{³Q(8WtÄý–…¼H”J+:QQÁDTªü²OÉÉɺm³}*T¨PhßS§N±~ýz ?¸ðꫯ2aÂæÎK§Nt‰Å£££ùæ›o Ã'Ÿ|Bll,ß~û­n»LM—Ê•+ë‚WwïÞåúõëTªT‰êÕŸšÛò‰^ýuf̘ÁŒ3èÛ·o¡eŽ“'OfñâÅ|óÍ7O `©T*&Mš„L&£S§NE–O¸ºº’˜˜È±cÇ?~|±m@áë›——‡B¡Àî±?-Z¤›É–™™‰ƒƒï¿ÿ>+W®dĈlذA÷šüôÓOEf­_¿žÅ‹ãëëK÷îÝuÛ ^OWW×'_,AS³WÃ,LL¸Ç¡ƒ'‹¡jl'ŽŸÇÚÚ O¯ª&ï;//ˆ½Q& ^xð …ˆ½ÑtêÜÒ,©._¾aÒàU¼¼<8µµ 3LãÁèÑÃtÁ«G…„À[oÁ† &F‡¼‚Šïè'IR­VöèÓb • ‚ üÃÝÝ€¯¾úŠîÝ»sêÔ)ú÷ï$I¼÷Þ{|óÍ7¬X±‚O>ù„€€Ý·‚€Œ½½=K–,áÁƒ´hÑ‚N:HëÖ­©øO4¾ pDÏž=iÖ¬ß}÷}ûöå믿föìÙ´nÝšÄÄD¦L™¢×úõëñ÷÷çË/¿ÔûÜ|||˜:u*7oÞ¤aÆL˜0o¾ù†nݺ±xñbZ¶lÉ;ï¼óÄãår9Ÿþ9ñññ4kÖŒiÓ¦±páBI­]¶ IDATÈ·ß~‹¿¿?ݺu#$$I’xùå—‹´Qµjþ—›×^{W^y€••E×®]Y¶lË–-ãõ×_g̘1Ô®]€¤¤$ÚµkǸqã§N:ôêÕ‹ÆóÑGáååUèúŽ; À‡~ÈÂ… ™0aýúõÃÖÖ–1cÆè}-AÊK’¤ÚäÑŸ^¨(ç÷àŒŒ,öG3Kðª@TÔRŒ¸tïIŽ=ÇýûÉ%ïh$÷î%qâøù’w4°¤¤‡:kò~ hµZì?aÒbe:ƒÁ?) ÌbÓ&øþ{óõÿ"¨ 4&3ʨ,­V[(!ë‹lñâÅÔ¨Q¥RIµjÕtËMÊâý÷ß§iÓ¦ºÇiiiÜ0GäYá9Õ¸qc&OžŒGåÊ•+ºÀŠ­­-ãÆcРA„‡‡³xñbvïÞŸŸW®\ѵѣG>Ì›o¾IZZ …‚E‹ñÙgŸè³[YYé–±>|˜±cÇòù矣R©X±b|ð®ÍJ•*áççWªÙW>>>4nܸØç¦M›ÆÊ•+©Zµ*óçÏg̘1œ>}š &°}ûvllþW»ÝÃÃ???¬¬þ·4âÃ?déÒ¥ØÙÙ1wî\FÅÎ;ùðà A¡P°cÇš5kVì ­wß}—¾}û’’’BTT÷ïßgöìÙ|úé§\¼x‘¡C‡2bÄ2339xð _}õ~~~…ª Ο?Ÿ5kÖ@||<µk×&,,LW™°àúÖ­[—Ý»wÈŸþɨQ£X¸p!-Z´ ""__߯¥ ‚ÑȘˆ;¼ÊßÔÙÓ—ÌHxD®&—'Lû½/%%•KcMÚgq.œ¿ÆÃ‡i&íóı½ÓðJffçÎ^2m§;vÀÖ­¦í³8Ÿž?L0žNÈÉ%@’¤Vn–-ïz.[¶þÅšÉÿš\èN1@JJ ½zõ¢fÍš,Y²DÏQ?Ο?O½zõP«ÕôèÑGGG~ùå—2·ãééÉo¼Á—_~Itt4¯¼ò sçÎå7Þ0¨áù—˜˜ˆ‹‹ Œô­ýð%<`´V«-ûjA/’$ùQC G‰M‰ûg©<€›ÿ¦[·ne:.++ ¹\ŽBQü ü„„nÞ¼©ûÌÔ×_ÍØ±cÙ°a}úô)¶m…B¡›Adl¹¹¹äää`m­ÉôôôBA/€‡"“ɰµ-{B—âÚ{ÔÙ³g‘ËåÔªU«Èr®]»Jrrr±}§¥¥é5&A°4Û·o§O¯~ ÍÔëøÒYJ;­VmÐÁ ¥"IRU$®2>¥<(Xÿ{æÑ%àé™lÞf´äÝeÕ¥kœM“*åÀþã\¿vË$}•ÄÓ«*-Z64I_÷î%±3x¿Iú*‰\.§WŸ¦Ëû-ãÜùâ ˜6ÍÜ£xfÌœ9“Y¿Í"óÝÌÒ´  Öæh{l2Ú ¬{÷în¬æŸ)'Nœ //éÓ§³zõj½‚W111\»vM—7**ŠØXóßqAx‘XYY=1xpùòeüýý‹,OKOOgÙ²e(•Ê'昲²²2Yð òÿè,Oð (6Ødoo¯w èiÁ+€Õ«WS·n]6nÜXhûÙ³g £yóæOì[¯A° ïð¹Ô-C±±ñ¼¸v­tU sss9qâ;wîÔ«&—›7nëu¬1Ä^/õŒ¨„„Ö¬YCJŠ~y»,¥ò#俎±±ñ¥Ú7;;›C‡±_ßÔ•+–¼øí7sàùˆ Ý%IÒ-A0[÷¹¹¹¥þƒ]«Õ–ªŠœ!«6•¶ÿ§õyïÞ=ªU«öĶKspp0ööö´nݺ #.ýõ-Ë53ÖõAxÖ5oÞœÆóË/¿˜˜H³fÍHKKcÍš5\¸pÙ³gI/”Þ[o½Å‚ ùùÜÜ\Ž;ÆÞ½{IMM¥N:zõ“˜pÜ\Ë Üåææ’˜x7·JOÜ'>>žÝ»wséRþ²»ÒVë-ÒŽ…½æñ·îP»v'>ŸÍáÇٷoYYY´jÕê‰û>•%,|ÔÕ«páü“¯S0‚@²yÀ›À0Ò ¬U«VÑ»wo6mÚ„¿¿?[¶lÑ=¿bÅ Ú¶m‹µµ5 …kkkÙ»wo‘¶öíÛGÇŽQ«ÕXYYѵkWbbbhÛ¶-&LÐí—™™É矎——*• •JEË–-ù믿 µwîÜ9üýýûì3üýýÙúϾàà`Ú¶m‹îø1cƪÎT`ÇŽ¢R©=z4³gÏ`Ò¤Iøûûë‚diiiLŸ>zõêé–¹8;;óÖ[oqóæÍ"íþòË/Ô«W•J…““#FŒ ""6lØ ÛïêÕ« 8är9ŽŽŽôíÛ·P^A„B¡ 44”O>ù„cÇŽ1uêT¾þúk*UªÄêÕ«uy°ýÔ®]›Э[76lØÀ„ X±b:uâÀ˜{ˆ‚ O%IR+4T¥‘aÚ»wײòð®‚W .dÇŽ¤¦–/áû] ;o€»wŠSbb¢nÎÕr&ÏÌÌ"55½\mړλ`ÆÕ7ß|CXXYYYå›qà€þÇK¤~K¹…2ðÇ %à eVõêÕiÓ¦ §OŸ¦J•*¼üòËT©R€Y³f1eÊ6lÈ”)S°µµåÈ‘#¬^½š.]ºC5عs'={öD­Vóá‡âääÄêÕ«iÛ¶-ÉÉÉ…J`>œ?ÿü“ÿû¿ÿcäÈ‘¤¤¤ð믿òúë¯#“Éèß¿?¿Œ#::ºTÿybcc‰ŽŽæÍ7ß$==š5k¢T*سgÝ»wG.—óÎ;ïàîîNxx83gÎdß¾}„„„ T* äðáÃÄÆÆâëëKݺuqss#<<œîÝ»S»vm&Nœˆ½½=¡¡¡|óÍ7œ>}ºÐtÚÌÌLöîÝËüùóü$Ã111º6}||P©TäååÑ£GöîÝKïÞ½>|8™™™lݺ•+VpúôéB§É“'3{ölêÕ«Ç´iÓt×lÆ ÄÇÇsçN~t?--:””ÄðáÃñððàòåË,^¼˜={öpîÜ9Ýë+‚ð¢«X±"óæÍcÞ¼y¥ž9,”ž¯¯¯î攸¾‚ >>DEE•:†F£¡bÅŠDGGceeEff&999 2¹\ΡC‡¨_¿>ãÇgêԩ̘1ƒü‘Q£Féþ›o¾Ék¯½ÀÛo¿M^^;wîÔ•5j=zô`÷îÝÄÅÅéʹ‡‡‡“‘‘A×®]2dYYYóÿ÷º$îÁÁÁìÝ»—ÁƒóÛ#kr?ûì3Z·nÍÁƒ¹ví5jÔàÂ… Ì›7¦M›®»ï½÷ž®’SÝ»wË÷ßχ~¨Û^¿~}FŒÁÖ­[ŸZz]áE%‚+Æ%®¯ Ï}hXþà@–™+>IVfv¡¥‚äåå¡Õj‹ì›‘‘Áõë×ËÜÇÄC Õ 23óƒKñññìÙ³‡‹/ê&M¸Š‹‹ãA+ÙÝ¿ŸLn®¹Ü와 ÉÊÌ"O¥àСCDFF’››‹F£)vß””½^s뜽k.Mb¢¹Gðü«T"“;tŒÀz™LƶmÛ°±±)’¨µyóæÜ¿€#GŽpáÂÞ{ï=]ð ò¼NŸ>ÀÀ@ݶ‚¥rQQQ8p@·®ÖÓÓ“ÔÔÔBIjmllðóó+Ó¸ßÿ}]qkkkBBBˆeĈºàUI“&1oÞ<–/_ΨQ£žØfA{¿þú+ãÇ×U«Z·n]‘¤º;vì V­Zxy=½În“&Mؾ}{‘1I’„¿¿?äþýûÔ¨QƒÕ«W£Ñht³à x{{óî»ïꂎŽuýúõôíÛWX{ûí·:t¨nVš ‚ ‚ Å“$©*PUßêï“+LWø£,är¡¡¡:t¨Ø Õ£n޼ɲeËÊ܇••55ªûê9BãP(ܺu‹?ÿü“ôôüe~OK°ÿxª›ÒrwóÆÎÎÂòiJ›6mâÌ™3H’ôÔ×ýÌ™3œ9s¦Ì]T÷óc¨%%q°³3÷^ µ±æAÀ<“°¬­­iß¾=ÙÙÙDDDpæÌ.\¸À™3gˆügýhVV~äúÔ©S4lX´©¯oá«‚¼PsçÎ¥uëÖT¯^ž={Ò·o_:tèPîqûø®o{òäI ¿ºà{ï½Wìyž>}ú©mŽ1‚ßÿ/¾ø‚¹sçHß¾}éÝ»w‘Vppp©’üU®\™®]»rïÞ=vìØ¡»¾Ç'::¿Jrvvþš‚ñ•æúâïïÏîÝ»©V­M›6¥wïÞôéÓ‡F ´€_AAžoX‘ƒ ¹ûkm­*1X`jkºté¢[éQÔ(.˜S«V-^ýõ2÷q,ú.\3Àh ÇZmE•*U=z4G-v Ý£FƒƒC™úH}˜ÎÖ-EóF›“$IØØXÓ¿Z´hÁÞ½{¹|ù22™¬Ø×¼U«Vtîܹìýü“cÛ¢<’ÒH0"/à’$)M^JþÂËË‹víÚñÁ°jÕ*T*U‘®‡Å—½vpp(²l`Μ9¬]»–ðàÁ~üñG‚‚‚pwwgÓ¦Måóãc([BBÑÑÑE~¼½½©_¿>999Ol³Aƒ;vŒO?ý”ªU«²mÛ6Þ}÷]ªT©Â°aÃtÇÆÆÆröìÙR°rss™0aÕªU£[·nŒ?ž½{÷âããCÛ¶m í[P¶µ¸ëëèèXè±R©$,,Œ¯¿þšvíÚqüøq¦M›FãÆiÚ´)W®\)ql‚ ‚ ‚ðB“Ño$CT„üÀ­­Ú0ˆB!G­Î_½Q©R%^}õU>øàêÕ«‡$IÅæ!–$©Ì?vö6EÚ1·‚1)•JZ´hÁ˜1cèܹ3jµºØªðúœ·mñm™“½î»yµjÕ8p Æ ÃÓÓÀ`¯9ÞÞ&=¯R±Ä1=¼€<¬Æ& `;wŽ·Þz‹ÌÌLþøãâââ¸uë›7o¦[·nÀÿ¦Yº¸ä¯p-®r^\\\±w^}õUþúë/Ù¼y3ƒæÞ½{¼ùæ›$%%ì<œœœ3f QQQOü)ii]­Zµ˜7ožnÚôéÓ©Zµ*K—.åçŸòg_©TªRÍ$[´hóæÍ£Aƒìܹ“¤¤$bbbX¶lÞÿüç*¸¾ ð‹»¾Åm³µµeôèÑ„……qûöm/^LÛ¶m9vìX±³ÐAAAx„œ¦T1l-W·— Ù\¹Uv©X$`QÈzÿý÷©U«P|P£,ÜÜ*•ëxcx|L²:vìøÄ@VYÈå2*Uv.W†VÜkQÈ:t(ÕªUò_órÍ,Å„“²¶†öíÍ=Šƒ-`KPÏh¬‚(ì£Ó·mÛ†F£aܸq 8°PåºãÇù÷Úµk‡\.'88¸HÛ!!!…Ÿ9s†þýû³dÉ  _Ïž=ùí·ß0`iii%Ô¬Y3¶oß^乇Ò³gÏK¥òÉ'¼ú꫺ÇõêÕcêÔ©ºäë˃ƒƒiÓ¦ v­¯-¸¾~lÞ¼€Ÿ~ú‰—_~{{{Ýs×7##@—C¬4×· Bä¹sçx饗>|8{öìÁÙÙ¹PeCAAAŠ‘‡7Ž»¸»[VZkw÷ÊO|®råʼñÆ…Yúã°w°ÅÞ¾tE¹LÁÁÑ;»âg…)•JZ·n­ d=ž.¦¬žvÍáiã©^½:C† Ѳž–¬DíÛC—\U§N`cy3Ÿ[ùŸµŒÀ*HJ~åÊݺßÊ•óßÜ—.]*´odd$ß~û-ð¿–»»;ƒ&""‚‘#G’’’Bnn.6l`âĉ…ŽwwwgË–-̘1ƒøøxÝö¤¤$Ž=Š­­­nÒíÛ·™;w.¿ÿþ»Þç@£Fزe ßÿ½n»F£aôèÑlݺ…âé7WîÞ½ËúõëYºti¡í»ví qãÆh4BCC‹]>Xp}/^¼¨ÛVp}Ý0oÞ<¢¢¢€ÿ]ß×^{š5k2kÖ,þøãrrrHKKcúôéüý÷ß…ŽW©T„„„0mÚ4]-€èèh’’’hܸñSÏUAA^d’$9’‹£¡XnU*áäd_òŽ&`mmEî%îçâ⢠dy—c V]Ÿ§¸2%ŸRŒ¥ 5vìX:wî\â÷Å'ñôªŠµµA Y–[… ¸¸–< ðÑ@Ö£“XÊD­†?ÔïXcøôSsàÅâŠr|Œ–Ľråʸ¹¹­­-3fÌÐ}H-Y²„óçÏãããÃ¥K—ˆˆˆà½÷ÞcÑ¢E…–¯}ûí·\¹r…ï¾ûŽ~ø•JEff&ýû÷gíÚµº%zNNN|ýõ׌1‚ZµjѪU+”J%$99™üQ— /..މ'Ò¬Y3¬×¹I’ÄÊ•+ âã?æ‡~ÀÛÛ›S§Nqýúu™L\\ƒæ÷ß×]_kkkÖ­[G·nÝ}X³f ‘‘‘øùù‘œœÌþýûqttdþüùz]GAx‘H’¤Öjµæ‡¥³äët$í»î".'Ž„ýËw[ˬ©ª¬J]ëºôrêE¹…UzÌ=Í=6&mäBÖâ²ãÈÒféÕŽ„„‹Ò…ªÊª9ÑØFÜü°T7³o²9y3—².q;ç69Ú'çô|¹$ÇEáB «ôpìA-«Z©egÃÎpäܼ ÿä Õ‹ƒT«þþй3¨,ã˦`Vùµ+¾á¾µØqÔð —Q½ú5ËTÑÅÅE—6Fž^îœ;{™ÔÔt½Û0{{[jx–¸+PÈÒ—B!§®ÇÅè݆¡4hX»LûW¯^½|Ž ?þÉÉåk§¼:uËM­" §®ÑX’$Ìwß}Grr2Õ«WÇÞÞžƒ2þ|¢££‰‰‰Á××— дiS<<|éþ%5­ TWÝ@ÎežcbÜD¶%oC£Õ´í q¨m]›)®SXq á2 år íŸÅ}FÄÃò(Ç2bŒ¹1??fT™A7Çnm»ÜRR`Æ øõWxðÀðíW¨ï¼S¦€½e̠̔-V†ÿÀ«ZÍ•žî\»gè¦K­²KEjÕö0iŸ2™ŒVÙµó`ù–¦•k -Z5Ô{)¤¾êÔõ$>þ ·ï™´ßGyÕ¬júåŒ/½‹Ãk¯™¶ßGU¨?Á´ò 9H€–÷(¸'PfÖ¿X3ù_“™2eŠÁÆœœÌwß}G`` …žÛ¼y3½{÷櫯¾â“O>1h¿/Š#GŽNÿþýñð(üËæ£>â‡~àĉÅÏáy‘˜˜˜ço ï À/ÉࣵZí/OÚE’¤6Àl­VÛNÏ^^’$}\Õjµß>e? j(á(Ñ/÷ÀRy7ÿ­+ Rœ«YWyíÊkD¥GéÕGYXˬ™î6ñ®ãÞWI´hùâÖ|yûK½gÞ”E»6¬òZ…»²ôw¯ÃÊÖfóQìGü÷îÑRŽ»¥Ôש/¿Õø ¹ä2Ù±† ƒGRP›,[AAåjfûöíôéÕ¡¹‘zŸC:Kià¯Õj£Ë5¡Ô$Iê’uÌÒ3‰{° ¿ zAÚGi4¹ìÚy€Ê1sPO¶¶j:wiµµ•Éû¸xñ:ÑGΘ¥oÿæ ðö.ç¬"=edd±3d?éi¦Ÿ¸îììH§—[–iÆA}ú)˜cJë×C¦ïû7sæLfý6‹Ìw3õkà8ðLZ…°,Ôj5óæÍã­·Þ"11Q·ýÞ½{L›6 ¹\NñÆÑ[zz:Ÿ|ò Ÿ|ò 99ÿû‚røða~ÿýw¼¼¼ðõõ5ãá¹"f^•ÍdI’ÌúÍötÆiÚžok’à@f^&â&ðöõ· >Û©,4Z ï\‡ÇÿÛ$Á+€}©ûh~®9§2N™¤?¡°Ô¼Tz_êÍ⻋M¼Ø´€óÜÌ.Zõؤ–/‡Þ½M¼‚ü~zôÈŸé%¼ˆìQ‘k¬Æ 9:·2yUBG'{:une¶à@­Z´lÕ™Ìt³ $I¢Y _³¯Ôj+:µ¦BÓþÉäâR‘ÀN-̼øê+˜3Ç´}ÚÙÁÆ"xe.V@6ÀR©TÌš5‹ØØX¼¼¼ ¤}ûöÔ¨Qƒ'N0þ||||Ì=ÌgV»víèÕ«k×®ÅÃà  š7oN«V­P©T,_¾ÜäSaáy$IR?@ÿD/¦JäÏ‹3‹Û9·é|±3q9¦_бäî¦Þšjò~ Œº1Š_ïšþËõ­œ[t¿Ô»š»&ïûE¦EËÀ« N)Z‘ØØNgœ¦Ç¥dä™)å]h( 9¦ Ôêh4ðî»PLhá¹gƒÒÀks£PÈi×ÞŸ†ê 0rpA&“¨ëãEç ÖØØ”¯ªž!Ôðt§ãË-©àìhô¾œ+:òrP+jÖ¬fô¾J¢V[Ññå–Ô©ëiôžB!§Qã:´lŽRi´LD¥7alÙ5M‚!(¢¢ kWã÷%O䡲ØÀÇ̱cÇ6l666ØÚÚ2lØ0Ž9ÂèÑ£Í=¼gš$Ilذ¿ÿþ›.]º Õjqssãßÿþ7§N¢U«Væ¢ <ó$I’³€4s哌•$ÉÕÔçjspe·sn›ºk¹·ç²=y»Éû]ý`5?ÞùÑäý¸™}“Á×›lÿ^Û˜´ÑlýŸÌ8ɸ›fˆUß¼ ÿ÷k´É0O——ƒÁ­[æé_0“¤û“ÉdÔ«_“î=ÛãÛ°6Ž®PhggƒO½štëÑŽÆMê=PV/½T .­ hÓ„ªÕ\‘Ë 76¹\Nµj®´mJç ÖT¬èd°¶ËK©TФ©ݺ·Ã§^MlíôK¯ð$NNö4lT›½ÚãS¯¦Igº•¨G8s&fkPèYÙ±XŽŽ0xpþ ‡à`¨SÇpm ú’, tút5báÂ…æÆsI&“Ñ·o_úöíkî¡Âój(P X 42óXž%¡€ð9`ÒzÉ+î¯`_ê>SvYDyŒ¼1’sçP˜è×t–6‹q7Ì6éMg{òv¶&o¥§cOså¹—“ÀŒøæ?Ýù‰÷+½OCµ snN wÍ<ÛïÎøâ øå‰é¡\ll¬©ßÀ›ú ¼ÑhrIOÏ 3# ­VŸ›Öj+ll¬-cæÍSH’DµênT«î†V«%3#‹´ô r5ú¬å 9¶6j¬ÕV¿:ÅÞÁ–FëШqrr4¤§g’™‘zÜ’¤ü×ÜÖFmÞ¥‚¥ae•ŸËpذüYµ·nÁ©g®%GG¨Z\]ÁÂ_ó‘e ‚ <£$I²¦óÌ=–gPùÁ«¿%IúN«Õš¤Nt®6—Ù·g›¢«]ʺĪ«è<Ð$ýý÷îͲd²8ÓoM,˜Ÿ0Ÿô<ó–Ú ± IDATž‡üeŒ3âg°Æki:¼r%?÷•%X¶ &M‚5Ì=á9§PÈqp°ÃÁÁÎÜC1)I’PÛX£¶€%ަ¦T*pt´ÃÑñÅzÍQ*ÁÃ#ÿGx.YôBA„gØhòÓ ~cî<‹´Zí ˜nª>¤ &Ó$±²RYzwi©÷½zõ*ëׯ7I_Æ•Uê„îiii„„„pùòe#ʲhµZΞ=Ëöíú-5Í#ßïýnàQéï錄¹¯¹_ª}“’’Ø´iwîÜѯ³åËMŸ÷êIrràÏ?Í= AAxfˆX‚ &IRàSàs­VûÐÒ§œ[°‰ÀAI’¾ÖjµÝ™9òN=;Ô}<Ì}ˆ½üÉùK®^½Ê®]»ˆ‹‹ÃÎN¿»¬w4w8–~LßaÅöäíøªŸ\ 7--ÈÈH>Lnn.Õ«›¯ ”)iµZbbb åþýûT«¦_ᨴ(5‰%ïh"¹Ú\v=ÜÅ€ ž¸ORR{÷îåĉhµZZ´h¡_gzýŒfûv˜<ÙÜ£žq‘‘‘äååѶm[s¥\"##Ñjµ´iÓÆÜC1©‡²víZâââ¨T©ƒF­V—©]»vqàÀ¦L™Âñãǹ{÷.Íf(QQQ$''Ó©S'½Ûعs'GŽaÒ¤IOÜ'++‹ˆˆ\]]iРÞ} –E°A oü×Üy–iµÚÃ’$mæŒÝ_dZ¤±»(“lm6‡ÓÓɾèxW¯^e÷îÝܼyS—“C¿¼&ù²<ãÆ*³}©ûÏø"Û WGŽ 77™¬ü“ɳ³³ù믿t[·n——W‰Ç=xð€­[·êwîÜ—»yó&aaaºÇýúõÃÖÖö‰û®víÚÅýû÷ m×Gdªe½×"R#Š `=¸’ÉdzŸ3ÙÙðÏ{Çb:”?K©4÷H„gØàÁƒÉÈÈàÖ3^`РAäååqíÚ5sÅd²³³ àÔ©ü™ÇVVV¼ýöÛenç믿Öý>œ:u*›7o&%%{{Ã&ñ7„qãÆ±oß>rËQHcÁ‚ØØ<=Yý;wèܹ3ä?þл/Á²ˆ– ‚I’䌆jµÚlsç9ðpZ’¤.Z­Ö¨uçã²-#Ô£neþ2ËîÝ»‰Õm+×zàVŽå}áy|Lééé:tˆ——Wè^CÌpLKKcРAºÇË–-+Uëúõë…ŽÛ³gO©XÑÑÑ…Ž»rå žžžEöÓjµ\¼x‘]»vé–Ì•÷õ,&ßÙ£âsâ =NNN&22’¨¨($IB«Õ¢ÑhÊ÷z'$˜¯òà“h4˜îîæ‰ fpèÐ!N:EçÎùæ›oP©T(ÊXI/++‹°°0æÎk¤QZ–ŒŒ ÂÃÃùúë¯Í=³xøð!iiideeáàà€£££Anæ=+DKÁ°fge$~¾iµÚó’$-æI’´S«ÕmªÐž9uŒ(A“qU\#//øøø"ÛKrçÎdZy’å̺s(:ãJ£Ñ»ÿƒô:w;;;‹¼;]0ãj÷îÝÜ»wO·íq999zwÊ”rÑÐ ^óâf\œû£Á«»wï’—WÆ÷ìÅ‹8ÙØ N7òúBDK^Pׯ_àµ×^£^½zzµAZZ]ºt1äÐ,ÖÞ½{ÉÈÈx!Î777—={ö°k×."##9wîw«¢keeE­Zµðóó£C‡ôêÕ‹Š+šiÄÆ'X‚ "IR]`ÐMkˆiB/€‹ÀÀ cub-³&97ÙXÍëE-SsôèQ¶nÝJnnîƒWö—_~)s6A6¤ªRË;\ƒ±‘ÙððáCþøã“ukµZBBBôê§M›6åÊÁa,aaa„‡‡—¸ßÝ»wõzÍ«(ª@W}Ff<62îܹÃï¿ÿNjjþ{ñiËKÖ®]«W?½||h­×±FSÆ\7‚ð$yyylذ£GâääDÏž=©[·n¡}ÂÂÂÈÎΦmÛ¶,^¼˜ÔÔTúõë‡ã`ÇŽœ9s†ôôt\\\èСMš4)Ôκuëððð I“&lÛ¶èèh :u¢U«VÅŽïÔ©S„††òàÁ<==éׯNNNÅî»{÷nÂÃÑ$‰–-[Tì Ìóçϳ}ûvnß¾»»;½zõ¢F1•=“’’ظq#W®\A¡PФIºté‚ò‘å»ÉÉÉ„„„àèèHPPÐS¯õ±cǸt鯾ú*þù'—/_¦sçÎèö‰ŽŽ&44”ääd<==éÓ§•+WÖ=¿qãFöïßÀ‰'X³f ~~~xyy¡ÕjÙ³gÑÑÑ$''S½zuºwïNÕªU‹Œ%88˜š5kâíí]ä¹ãdzmÛ6²²²¨_¿>}úôÁÊʪÈ~·nÝbóæÍ\¿~ *Ð¥K6lXì¹_ºt‰;w‡L&£fÍšôìٳؠɭ[·X¿~=·oߦAƒ¼úê«Å¶©ÕjÙµkG%%%ºwïŽ{1Áýàà`êÔ©SäuÞ·oäääн{w\]]‹í ..Nw¾+V¤K—.øúþ/÷æ™3g8{ö,-Z´(6Ïæ¦M›ÉdôìiœªÉ ,\¸¥K—rïÞ=Z¶lIûöíùðéQ£ÎÎÎÈårRRR¸}û61118p€ &ðÞ{ïÑ£GÆWèýø¼0K+>'ž);¸žuÛšÛzO‡—Kr\”.Ô´ªI7‡nTTXv¤Q£Õ°ûánNfœäföM2ò2ônËAî@UUUìð·ñ7à(CÊÈË $%„s™ç¸•s‹¬¼,½Ûª¨¨HUUU:Úw¤®uÝ’0·Ó§!, nÞ„ôoÇÊ*ÿÎtýúйsþcË5Ø¥ÕjCÍ=ç‰V«½%IÒB`–$IëŒÕ‹Â…„œc5¯W…+M›6ÅËË‹ˆˆŽ;†$IÅΧNb÷îÝ<|ø¼¼¼"_`$Iâ•W^¡víÚeî§`™F… HJJÒm/)¯F† :®´‰ô{ôèQè8‡BÏR¿~}öìÙCLL r¹¼Ø`Ž««k¡¥ˆ¥5ûöl¸[ò~¦äªt¥R¥JŒ9’£G²wï^²³³ŸÄ6lX¡/‚¥rÿ>Ê3 0Z{Ê—,A(­ììlºvíÊÎ;±µµ%--‰'2uêT¦NªÛoÚ´iÜ¿ooo6lØä/cÛ¸q#¡¡¡¼ùæ›Ü¹sG——/-- €±cDz`Á‚ÿgï<â8¿>|ïÒA@ì%6Ěآ"±w%&úšHP±'ØÅ{%‰cÃh,T°c» ‚QA°€Téì¾xwÂÂÒ–]Àdïëâ;3Ïsfvg~Ï9¿#Œ3bÄ À«W¯8{ö¬ðúìÙ³?~<›7o^KMMÅÍÍß~û SSS˜2e >>>|úé§rç1pà@þøãÄb±ð}ׯ_?:$”JI¥RfΜɊ+H$˜˜˜˜˜È”)SðööfÚ´i˜þþþ 0€ÄÄD*W®LZZiii4lØA$‰ˆˆ`È!ØÛÛ)`ýüóÏlذ///-ZÀ®]»'==1cưwï^ŒIJJÂÓÓ“;v0dÈ çsLæk¸aÃ6lØÀO?ýÄÈ‘#ùì³Ï¸páúúú‡®®.›7of̘1r±øúú*ÌFš9s&›7o–»ohÞ¼9.\Ë>Þºu+“&M"55UˆU&„lذA0‚—H$xxx°yóf¤R)æææÄÇÇ“©©)~~~r 6~ýõWÆOzz:•*U"99ggg22ä6dÙc—/_F__}}}âããÑÓÓã§Ÿ~bôèÑ…žoZZ£FÂÇDZXŒžžóæÍãË/¿TøÞmÞ¼OOOÒÒÒ„ó>}:ß|ó ëÖ­CKK‹7oÞ0dÈÆÇ–-[äŽøð!}ûöeÔ¨Q*°RSSY²d +V¬ Fxyy1tèÐ"3ª\]]™4iÙÙÙœ9s†Ÿ~ú‰:Ð¥KÖ­[—OÈþ)S+09ÙQ³9Ÿ|žl©j=tD:t7éÎ"›E88¨tìÒ’€w´7¿ÄþBL–êïëêÕeRµIŒ¯:m‘&©®"Íœ¨9ìÛORv’ÊÇ·7°g¦ÕL†™CDêp'•ÂÎðãðð¡êǯ\†‡yó jUÕ_ D"ÑÇ@O e»X‹D"·2 éC¥ ( jàŒÔâ@]W¯.wSïªch¥©«W€Ê•+Ó»woÚ·o_¨¥hUµ(ê×§"}ŒÔÑËñŸ‹Å888ЬY³B…,¥Î=7¦¦¦%>F,+uœ¶¶v‘ÇU«V¡C‡òæÍ›…,‘H¤ÔyשT§Â X²÷\GG‡Ö­[ãääT¨¥««[òs·²“Ò-ª¨sóœï6 JIll,¡¡¡Ò¶m[ž";;›ü‘Y³f1þ|!“ÕÔÔ”ÁƒS³fÍb_÷uëÖ±fÍ¡+¬H$ÂÓÓ“½{÷òÅ_°zõjÌÍ͹sçÇgäȑԮ]›–-[Îo¿ýÆwß}ÇæÍ›:t(†††¬[·Ž .°råJ&Nœˆ––÷ïß§{÷îxxx0hÐ ázEFFrÿþ}/^œ/¶pøðazõêÅÛ·oùꫯðõõeÓ¦M̘1ÈÉf?~ÙÝD~%Y&d5mÚ”[·n þ‰¤ä>H2!ëÕ«Wðäɹ¬ep1qA„Hm÷dÊ÷=— YŽŽŽ\¿~K—.‘••Uª¿uD"èÚV ‹B—œ¸4hP[·nÊ÷êׯÏáÇ©_¿>+W®”°233YµjC‡^ ¦qãÆ 8P®ì¨M›6Œ=š¥K—òôéÓ|e[‡ÆÚÚZØwòäÉxzzrçÎINNfãÆ4iÒ„={öÙ<­[·fîܹ¬^½š   :wî,Œùûï¿ãääÀG}ÄìÙ³:t(7nÜ gÏždddàíí••>>>‚˜S³fM>L­ZµX¸p!C† !##ƒ§OŸÒ¡C! EKK‹3fšš*ç;egg'ב¶8L˜0‰' ¿GEE±mÛ6œœœøù矅óuppàСC4nܘ¥K—rèÐ!*W®,dübffäŒÝ»wŽoÚ´)[¶l!44”´´4áœýüüÐÕÕ•»~2r¿ïVVV¬\¹___nÞ¼)ì3þ|´´´8|ø°P‚X¥J~ýõW‚ƒƒYµjÓ¦MÃÈȈÈÈHÙ¸q#@ÎÂÙìÙ³Y·nááá¸ëÖ­#;;›ß~ûM26lÈÑ£G©[·®Üw˜ì|]]]…óµ··gË–-<~ü˜´´4AÀòõõE__ŸN:9{6l ~ýúüúë¯Bfõ°aÈŒŒ”ËÄ“¯¶¶6‡šµT©R…;wÄŠ+˜:u*Œ=šyóæáëëKŸ>}€œ,´={öP«V-!U°sçNÜÜÜ:t(7nT‰?gãÆñõõe÷îÝL˜0Ë—/óûï¿cð—­«]á‰ËŽ£oX_.&_T÷T@N™Þê׫¹ŸzŸCua¬U~欿Äþ‚Ûs7²¤ŠMgUÍÝÔ»´Ôž?ëýI‡JÊdN ÿ EÊü¨ùÌž_fsžO:O«Vœ¬{ÅuêeÂýûàê /˨³U\|ñýôS† FÏž=±±±aþüÒ߳罎çÎ#++‹víÚ í2tuu±±±‘+¹TD›6mرc]»ve̘1ôìÙ“6mÚлwoz÷î-·¯ŸŸü±Âö¼¢–ì:ÊJØ“’’¸víõë×G,óôéS¹ý[µjѸyó&;vä‡~à‡~¶gffÎÕ«WÉÎΖûû8wî&&&´iÓFnÌ5jàèèHP./Â6mÚ°sçN:wî̘1cèÕ«­Zµ’]sŸï'Ÿ|"2ϬáÇçëÞØ¯_?9+>>ž7nB¦¢óݵkÁÁÁ|üñÇ|ùå—,X°€={öÖ… xñâsçÎUY翵k×2uêTÖ®]Ë·ß~«’1s3jÔ(Z¶lÉgŸ}F÷îÝù믿ŠmyPQ«€•%Í*Sñ*7§O3âïüYïÏr)±:w€1ÏÆ½£Š‰ÏŽ§Ç“\ûèM š”ùüÿe–¼ZR¦â•Œ¨Ì(\ž¸p«Ñ-¬u¬‹>@ÕDD@çÎSõ(Ë–åøc)áû£bú›¥Rix‘{jP©Tš&‰æ›Ô1~ Ý 2ĸ’­¾ª"&V›Xä~æææ 0€N:qëÖ-¥ç›b9¥ÂX_Vù²HO˼B–̫忀••#FŒàÕ«W„……)=ާ¥'#þ¡ÂÈ”g’å$´DZ…î“WÈÊm¼\"† ƒ9s σe¹`gƒ•wþ%TöfggÇýû÷‰¥zõê@N™[UV 111¬_¿žS§N"F2± ¯o±"“l™ˆ ë+딪È|\ŠÊ«ecʲv"##8qâ'Nœ(p¬¨¨(ªV­Êï¿ÿÎðáÃ9{ö,gÏžE$ѲeK Ä„ Ší{¨KKK¹ße±É<­ "%%¥ÀyÇŒý{÷øé§ŸX¼x1‹/ÆÒÒ’>}ú0aÂr²³e~G²rÀ¼ä½–y¯ctt4‰„GQ·nÝc}™kúÊ•+lذ   ÂÃÃÉÊÊBWW©T*7_tt4µk×V8^5ä,777&LP~ UáåUÑ“hø€ FyIKKä›SˆÅâ|Ù#/_¾¤eË–DGGÓ¦MÆŽ‹““­Zµbÿþýr¢Œ¼U…‘׸» Š“Y*‹½oß¾ŒQ°/ë×°aC‚‚‚xüø1GŽáÈ‘#\½z•ëׯsìØ1Ο?¯tFkÞk ‹í›o¾QXÖ'#·§R^Äb1k×®eÑ¢EøúúräÈŽ?ζmÛØ½{7.\ eË–\»v¸¸8…îPôµ”ÅêììÌôéÓ Ü¯U«V@N‰êøñã166¦sçÎŒ=GGGÚµk‡½½½`ø9¥…eËæK,³~ýz/^ÌÉ“'9rä'Nœ`ëÖ­ìÞ½›K—.áääÄ•+WHLL”;_Ùu,Î\²ómÕªS§N-ð|sÑõÕWøûûóÇ0tèP:$øc•–›7oâææÆÚµkK,^ANæ–§§§ðûÌ™3Y²dIûW¯^___ÚµkÇŒ3Xµj•Rq—7j°ÂÒÃXýfµº†/6^/½n>œjÚ%ìTS ¦ENS«ÿQqx˜öõoÖ3Ójf¹Æñ_@Š2+-ÿ$|â|b6¤ì&ýõW¸r¥ìæSDFxx”_ÚH€»äø[+Ÿ þSL^“#ªœ&M˜o=¯(/u _,,u,Yc·¦ÌçÝZs+N!Nji6R\VÚ­¤¦nñÍs5(–H‹½uöÒ6´m©º"—"¶ÕÜVövãÇÃÑ£àçW¶óæ¦Kø_ù-vjø÷Ivv¶œ "‘Hxüø1–––T.¢YÀúõë‰ŽŽÆËË‹…yºu¾~ãå[\*7²’¼åZ¡¡¡xyy1jÔ(…åb!$ ƒηýîÝ»bjjJFF¡¡¡Ó A¦OŸÎôéÓ‰ˆˆ gÏž\¼x‘;wîмyóŸ[a±*ŒíÆT«V-_¹[n"""ˆŒŒ¤mÛ¶ <˜Áƒ“žžÎÒ¥K™7oÛ·o§eË–øùùaee%dd•tuu‰‹‹Së“'OÈÌ̤zõêH¥R~øáttt¸yó&õë×öËÎÎ&66VN,ª[·.÷ïßÏWº äË~ñâQQQ´iÓ†!C†0dÈÒÒÒX¼x1 .dÇŽ899áçç‡ M›þã÷,Ë8Rô÷.çµegg‡¶¶6ñññ Ï÷ñãÇdeeÉ•Å0SSS8€‰‰ *1oOIIaذa 6L©²ÁÐÐP¹rÎâR§NöìÙC÷îÝqqqÉWvü! ¶T…Ñ Ëýr²‘V¿.;!íVÊ-Ž'/³ù cÅëå.¤ýø#î Ó¹l~Ôü²óïÉ̬(þS9Æñ§òù~—%G€n"‘HáR›H$2‰Du4?Åþ±,à:Z-ж«ŠYÖ³è[¹ø7ÒªÄ@lÀ:°ÒQ>³FYìtíØ]{7º¢‚W…ÕÉóL¨Z²bþC88°Önm¹u³ýÞúûòù_‹a÷nPÁ ºRÔ®ÓŒDEþ)4@N¼åtàå˗Ňd¥Xy=8|8'É<5µäbwëÖ­±°°àÀÄåéºsçN:T¨˜£ˆ¦M›R«V-Nž<)øsɈˆˆ mÛ¶|úé§ByœƒƒC¾ #;;;ìís2K’IVŸ~ú)FFFüüóÏÂ5•qåÊZ·nͨ"š >œŽ; å—“Ñ$Ë´–Åëëë‹‹‹‹ÒÙctéÒ…ððpöïß/·-%%WWW‰‰‰!##ƒ¸¸8llläÄ+€ƒ’œœ,dû 8ôôt~þùg¹}/_¾œï=:t(;v„R}}}>ýôÓ"Ï·Aƒ4mÚ”cÇŽ å›26m’wœ022¢sçÎ<~üyÛ„÷ïßãââ‚“““Üß©Æ ÿ‰èš IDATßߟíÛ·cbbÂÀ¥ï%äííMzzz¡e¦‘••ÅèÑ£•úèÒ¥ îî»Ë½g jùæ|/yÏþwû‹Þ±ŒØ³½Øõñññ=zTé7s{Ìö ÓÕ'6+–?þ,Ö¾‰„àà`‚ƒ5PûwóúõkŽ9¢ôñÛcÔVÍTb¦=äjrñºfff(tþ(1 `µ£ÜØ^®ïÃKà7`™Hñ]„;®ù)öÏæ®ó\ Pk›1>u|k‘¿ý²:1Ó2ï¾_¹6áp5qådý“˜h™”é¼c,Æðk­_ËtN 9|mñ5;kíDGTv^"D̵žË¢ê‹ÊlÎ|T­ ðÿݱʌ¦MáâÅœù5hP!zzz|ñÅlÙ²…k×®±bÅ ÆŒC•*U˜3gN‘ÇwìØ€~ø«W¯ò÷ßsøðaÚ·o/˜~§¤¤”8.}}}-ZDBBíÛ·çàÁƒ\¹r…yóæ±råJÚ·o_` \AˆÅbV­ZEvv6]»veãÆ\»vЭ[7RRRX¸p!ÚÚÚ4kÖŒN:áã・»;.\ 00E‹±oß>Z·n-dô„„„`nnNûöíK|ž2ÌÌ̘3gqqq´k׎]»vqýúuvìØAÿþýÑÑÑÉ—á–www233quuÅÇLJ   <ˆ››ÚÚÚŒ=š˜˜nÞ¼Yâk——%K–``` –_¾|™¿þú‹îÝ»óôéS&Mš„µµ5zzz´nÝš¿ÿþ›9sæƃðöö櫯¾ÂÈȈÌÌL233ƒþºuëâááÁìÙ³¹|ù2;vì _¿~ù2²ÜÝÝÉÈÈ {÷î:tˆ   8ÀøñãÑÖÖæ‹/¾àõë×ܾ}[áù®ZµŠ””ÚµkÇž={¸xñ"“&MbÛ¶mùĽ¥K—¢¯¯Ï¨Q£X¸p!—/_æÄ‰¸¸¸ðüùs<==©VM¾r뫯¾"##ƒ“'O2xðàRûnFGG³jÕ*V¬X¡T·ÁÅ‹sýúõRŰ`Á’““Ù¼¹ [ZJýýI—–¢CŒŠ‰ÉŠ!è}- öȉçÂ… ܾ}©TJ×®]•šË/±ÓÑp2á$#ÍG¸]"‘pûömÎ;GRR:üwº¾y󆀀=z„ŽŽýúõ+ñ©’T.$_(zÇ2ädâIÚUjWàöÌÌLnܸÁÅ‹IKKºj”|¢“JF¨&NŸ†¬,(á*ž ™<ŠZ\ÝJ^àþßc6ÏÁU$5Æ}@ýi†Ú"m¶ÕÜF ÃxEyñ.ëZçëaÚƒuv먫W°©hYñ©ñ§\ùè î/Ü9›Tx§¤Òb©cÉR›¥|YåKµÎ£¡pFUE]½ºLˆ˜Àí”ÛEP êêÕe­ÝZzšöTë<ÅÂÊ ÎË1u߸1ç;D]è耻;ÌŸ*h®AC^ZµjE‡pww';;ÇÇÑÞÞž={ö(4¸ÎË—_~IPP[¶l¡mÛ¶@Ž¿››¿ÿþ;Íš5ãܹs|óÍ7%ŽmܸqH¥R¾ÿþ{† ùÇêbàÀlÚ´©ÄXýû÷ÇÇLJI“&áîþž¥¥%[·n•3ÙÞ¿?cÇŽeÓ¦MlܸÈñFêÑ£Û·oDŽììlââℊÊ2}út ˜7ož\õêÕcûöí‚XXC‡%**Š… Ê•ºÙÚÚràÀZ·nÍÞ½{èÖ­[©bupp 777æÎËܹ9ž¾•*UÂËËK®SãÏ?ÿÌСCY¸p¡ ÂÕªU‹Ý»wsûöm¼½½9wîݺuÃÈȈsçÎ1jÔ(-ZÄ¢E9 _|ñYYYìÛ·OWÖ˜dÑ¢E ÊÕØÂÎΜótEÏèݺuÃÇLJ &0räH!þ]»v ¿ËprrÂßߟqãÆ1gÎAÜ566–;ÿÜ´nÝšÆóðáC¾úê+¥®snV¯^M:uäþŠË­[·ðöö.u &&&xzz²råJ&L˜P¨'[ECH(yϬ¿UŸFÿ€—×?~!Ó"§±âõ •¨*~´ý‘©–ùÍÚr WZZZ‚â´iÓJÜ‘"2#»{v*‰WUØêÚÑ,"ß뉄;wîpöìYÞ¿D"AKK‹?þ¸PÃÁ¹…+±XŒD"A[[[©:â Éèø¨ð/¡²¦C¥œox>ßë2áêÒ¥Kddd> ={öÄQ™è- ¢eìAžÖË…ñæÍ›œî1S…EkÅ` ©Ä1I*•n‰D+ÈXšH¥ÒLÙ."‘h6ÐM*•þwb%‰Dk€R©t@ž×}€ªR©´£H$jÜüŠ è \ç _´>æÏc‡ùì³ÏŠÜ76+–­1[Ùÿn?wRï(5Ÿ"ŒµŒémÚ›ÑUFãb⢲qUÉñ„ãìŠÝʼn„¤HJ¾ò^ΆΠ5Ê×_cª•¿ã”†ò![šÍ¾¸}üþîwN'ž&óŸ±R!FL{ãö 3Æ—U¾D_¬¯’qUÊǰm<¹:n•[ÛœNƒnnШ‘J†.ù«hWa–Õ,fYÍ"&+†ÈŒH"2#H“(÷l©c‰Ž vºvåæ7U\z™ö¢—i/Ò¥é¼ÈxÁËŒ—¼Íz«ÔXbjèÖÀVÇsmsGªAh‰´a>‚æ#H•¤ò,ã/3^—WôÁ ¨$®D ÝØéÚ•yYj‰iÜV¯†•+!:^¼€W¯”ËÊÒÖÎÉîªQ¬­5^WÔNnÁÇÚÚ:Ÿ0“›¢J–,--s÷òWl(HÓÑÑ)p›¬ ­ ò W2´´´ S, ^VEQT÷ÖÂæÉ‹¡¡a‘É:::´,¢K¶žž^¾’ºÜs4kÖŒfÍšåÛV¯^=ÁÀ}š¤¤$…&òEáåå%ˆWb±˜Þ½{óçŸÅ³ R„©©)½{÷fïÞ½e"`© µXo2ߨcØRñ*3ÇD/oÆ•T*E*Íñ¬Êý0Wb¬ˆ¤t²uÈÔRÍŠ¥*È–f“CU­ªù2® ¿€‘ššÊ»w%/•ÑÑÑ‘ûBܱc‡`Ú׬Y³b XË–-# €^½z[Àš6mšÐ}ÂÝÝ]NÀÊ›q•û=Ï2çý*öZR-²EÙEï\FÈþÖe\)âýû÷%?w‰ýÔT%s_ÔH.óÇò@*•ÆýÖ<‘Hô›T*ÕtQP K©TZÎ-/ÁBÛ m š£šnE z"=êëÕ§¾^á7þ=ˆ h¤ßˆFúªÉú`‹ÁÆ&çGƒ 4hP!gΜ¡}ûöEvÍK`` «WÿÓ˜ÎÓÓKKËR Xó¼=qâD$‰íVÑQ‹€¥+®x+Êz"=^¼xÁo¿ý&˜Ë–qµ]ICèZ­jñ¤Ú¥ãTÚmöØ›¯e©"nܸÁ7J|È¡C‡Á® Œ«ììlÖ¯_¯Ôf̈©T~mçó¢'Ö#55•]»våë~’©TŠ¿¿?þþþ%ž§EçÎô*処Ê)`«ŒY|KN·¼rt(þw ‰z€â-¯–’?þøƒÅ‹+ÜfbbBåÊ•iÙ²%nnnT©R%ß>/_¾déÒ¥œ8q‚ÈÈHÄb15jÔ W¯^L›6-ߪ¸››ÁÁÁ9r[[[µœSiY¸p!ëׯ'66–† òðáÃѧO$ Ç—¬K¯‡‡œ?^nåóÁƒ4iÒDnßäädbcc©Y³f‰ãƒœ{‚É“'³{÷nRRRèÞ½»\¼Mš4¡k×®¬]»V©ñssòäIfÏž———Rþ‹4hРAƒ†Ë—/;±CÆû÷ïùòË/…d„† ²`ÁÁÏ­4têÔ‰wïÞ’ᆰ¢¢ËR[Y3õa­cM ›|÷Ýw\¼x‘   D"ÙÙÙH¥Ò|BÖøñã10(Y‰úô‡,z^±žWõÅúTÑ«ÂðáùwïgÏž%11±@!§uëÖ´kW°xA(c¾X4nÜKKKÎ;ǃ‹Å 3‘´´´øî»ïJ<¾o¢/ï^©×ܹ¤XëXc``Àرc…ÒÁôôt…ç-‰èÖ­›Ð}¥$è¶h¡ŠpUK!iáe…T*M‰D "‘h«T*­x)©"‘HLޏM*•†”Åœoß¾%((ˆêÕ«ç›^¼xÁåË—9|ø0ëÖ­ãÊ•+ÔªUKØ~ïÞ=:tè@||< 4ÀÕÕ©TÊýû÷Y½z5¿üò ržs?&((ˆôôŠÓø$7çÎcΜ9XXX0räÈbç%##ƒ³gÏ( FXXAAArŸ_ÇçÉ“'r‹-7nÜ ÿþ,_¾\ikçÎlܸ‘Zµj1`À„m<|øüQ©±óKPP11gñCƒ 4hР>¤R)!!!|ÿý÷%:nêÔ©‚5X,fûöí%Ö) ÂÖÖ333BCCÿÛVm½Úê¶TÔÒ«äÔ}÷èуŽ;ȵk×ò=Ü—ØÄ½¡QC$joŽU"jëæ¼"‘{{{š5kFHH§OŸ&!!!_)žž^‘¦…ÅaÓ¦MÂ*uAƆŠ8qâ„`¤_QìÞ½{‚(—·þºJ•* 8.]ºpñâEnݺ¥PÈRæ¼h7@òºb½çµtk9ׯU«V´hт۷o PÈ200Pî=·±bdõ•)µ+ÌgÏr2°f“Ë9–™‘@C WYO<~üxfÏžïõ˜˜ÆÏ¡C‡˜6mþÓpòÛo¿%11‘;wòÅ_¯K$Ö®]‹§§'£FâÞ½{*õT'wîä˜Ö¯_¿žaÔk¢yéÒ%’““•jõííí§§§Ü÷ññãÇiذ¡Ü~·nÝâe)}ùdçzðàAœå¶ùúú¢§§G§NJ5‡ 4hРá¿Ill,IIIÔ­[üîygΜ᧟~~Ÿ6mZ¡FöÊP§Nž={¦Ò1Õ‰Z »™”®§:èj,ßrÓÈȈnݺááᣣ#b±¸DB‹"Ì´Ìp6r.zÇ2$og+‘HDãÆñðð _¿~T®\Y¨wUä ¥,zzzTªT‰J•*•H!Ö×׎ËÛÁ¢0 …ã 2¬\¹2½{÷f„ 4jÔ‘HTêZ_{C{,u*VÆaÞ÷\KK‹-Z0yòd\\\044,õß:¥lÛ«rj׆Ì,Ë©Tš ̾‰DuÊ;ž‘H¤ ÌVJ¥Ò Ó1À‚;v ¥¥ÅÉ“'…ÏÍ””._¾Œ½½½œx9«e“'O¦S§NbÞ¼y*ËÄÄ„ÄÄD•«.Ô"`µ2lEUíªêZ)ë7¦ŽžâgHzöìÉĉqtt,õªx/Ó2O(”ž¦=¾.ËÈòðð oß¾˜ššhòýoC–‘õí·ß B–²ï»1Ÿ™|¦â•G„ˆ¦=n“edå² {p,’žŠÿ¶Ê T*ý¸ Ì+çP>TÜ`Ey’—J•*¡§§‡D"2?Åb1"‘ˆ7oÞ’’¢ð¸eË–áããCõêÕ•š÷Ò¥K8;;3yrÑI}“'O¦gÏžœ={KKKŒŒŒ;v¬°= €: ££ƒ¡¡!5kÖdñâÅÂgBrr2ÎÎ΂ä×_³³³µüûï¿ãè舡¡!zzzÔ¨Qƒ3f(¹K s—áý°°°`РAÄÄÄpõêU¥Ç™Xm";cw"EulÊÒ»roêéž…$²œœœ R¾%lóæÐ©œ;§ÜñªDK ÜÝË; EÌÎÌí¾Âc |,’J¥ åL^¶lÙBJJ :u²õõõéÛ·/‡¦mÛ¶L™2WWWªU«&תU+¹©%%!!Aðç*Šððp®]»Æˆ#¨T©B©ð¾}û9r$fffL›6 |}}ùᇸyó&‡B[[›®]»rñâE¢££qvvÆÖÖsssöíÛLj#hÕª‹/FGG‡ãdz|ùr"##Ù³gGTT÷îÝcÁ‚ÂkGŽaРA˜››3mÚ4*UªÄž={èСoß¾•kÍÛËÌÌŒ®]»rïÞ=LLLèÚµ+ 4¼ž?NÓ¦Miܸ1zzz‚èÕµ«|6@dd$AAA¤¤¤`aaA×®]9zô(ïÞ½£C‡ 7‰W¯^%>>^NÀ „4{{{^¿~••ýõÄÞÞž… ¢¯¯ÏéÓ§Y³f Ož<)±‰½ 4hРáß왫 …ÎÜܾ}[î~ÊÌÌŒ+ä×t=z$÷û‘#GrMkÓZ%99™J•*kߊ€Úrò=ªy°æÍÞe•¯Áu½:Œ®2ºØû›˜˜Ð£‡â –â`©cÉ7U¿aåë•J¡*æ[ÏG\Ì$;™õ_Ä‚^½”ÏœknØœ~•ûq8þ° £*9bÄ̱.¾h¬­­MëÖ­K7é¼y9"Vy3bäñÄ©H¥Ò‹"‘è$иTÞñ|@tž›Ë+€K—.±lÙ2¹×’““¹~ý:§OŸÆÀÀ€•+å?ç·mÛF||áæÍ›ÅN=eÈ!ìÝ»±XLzz:ñññŒ7Ž*UªpëÖ-A óôôäÛo¿eË–-ìß¿ŸaƱtéRæÌ™C`` ãÆ£}ûöLœ8]]]Ο?/ˆ<îîî´mÛ–S§N‘’’"”Ú:u mmm:wî ä¬>zxx`ddÄ7ÃuºwïΛ7÷<¨S§K—.eÓ¦MØÚÚ²téRa[zz:¾¾¾ 6ŒáÇ—èºÚØØ°téR^¼xAHH?üðV¹Bøùùacc“¯ÙÅýû÷Ù¾};cÇŽ%++ ±Ẍ#8{ö,fffµéÖ­—.]"66Va÷J 4hP'é餦¦“ž¦|Y³®ž.zèëWˆŽÏÅB"‘’’FjJšÒ‹ôb-- ô00Ô/µõHE&;;›””4ÒRÓ•¾VZZZêc` ÷¯¾VÊbaaX,&**ªH¬¤¤$¹ßß¾}+绪ˆÐÐPBCC˜9sf±¬èèh¹׊ŽÚ,S-S~©ù ýÂû•[fжH›_kýŠŽH§Lç_}>'Nò0­ä­ÆUÅ`³Á 4XnóÿרTcWÞ_áUæ«r‹a†Õ Z–qgÀŽaÂPAW¥±µ…•å/Â,Àµ¼ƒøÀ³¥Ri¹:uŠS§Nå{ÝÚÚš/¾ø‚©S§æ4ªT©‚¿¿?ìÝ»???nÞ¼ÉÍ›7Y´hõë×ç—_~áã?V*&SSSZ”°ûç·ß~+ÜDêéé±wï^™>}º\&—H$bΜ9lÙ²…={öjØ®¯¯OFF[·nÅÍÍ }ýœ›ú‹/æó ôõõ¥]»vBö×¥K—ˆˆˆÀÓÓS®[ ‘‘^^^|öYÅ)É–áëë‹‹‹K¾RscccFÎY “•èëë#•JÙ´i“'OÆÐБHÄ_ýU ?£ 4¨ƒ÷ïSyÁ‹çÑ$%½WÙ¸††úØÕ°¦n];LL+^Ö†T*åÅóhž?‹âÕ«•U—ˆÅb¬¬-¨Y³:5jZ0ÍX C"‘ðìï—<Í›×±*óCÖÒÒ¢zõªÔ¬U[»òï^QÐ××§fÍš„†† ‹‚åMzz:ÏŸ?§~ýúåJ±Q«4Ú§r¦XNQç…²Äf í+•ý‡‘؈u`ªU¼UrUÓH¿Ûkn/—¹ÿ«XéX±»ÖntEåó€ÒÙ¸3 ª/(zGu°r%´m[>sÀ¾}`aQ>ó©TzØSäŽrsø½<?~¼ >ÆìŸ|ò +V¬È'^ɉDtéÒ…íÛ·ÁƒX½z5íÚµãÉ“'‚×SYñÑGÉý~÷î] §ãë!Cä~&Nœˆ¶¶6÷ïß/tLOOOtuu™8q"UªT¡oß¾üüóÏ‚9©ŒììlNŸ>-Wz'ÆoA×´¯wm‘ú»t)DONŸ×2N223??P2›¥Œ™hÚ‘ŸYR©´\ ùªW¯N‹-hÑ¢;wfçÎxzzrðàA\\\HNN–Û?11‘|ý7n̤I“¸|ù2³fÍ"99™e˜±˜×ãNw\\OŸ>Í÷ãàà@:…7Îüä“OÆÓÓkkkŽ=ÊØ±c±¶¶fÒ¤IÂÃ7x÷îœø#K‰WÔÍO–¥¥jÝ”÷FýÔ©SˆD"…>ZŠü¹}û63fÌ fÍšœWQ45hJ`Ã@úWî¯ö¹tD:L±œÂÙg+TÈÿŸJ`Ã@:Tê ö¹ŒÄF,±YÂuþÀPœÿa°L12‚?ÿ„¹ss²¢ÔM·npíTôÛ¢J¥ÏMåÇ©Tz²¼ƒPIJeËhÕª·nÝbâĉrÛæÍ›GãÆ9sæLÇ?€§OŸª5ÎÂù2y{{ fyüýý‹§I“&¬\¹’°°0îܹÜ9s¨Zµ*k×®åСC@Žw”¥¿¤ôÈ IDAT¥%ŽŽŽÂq2o©ÈÈÈ|cÊ:í¨ Yéd^Qò{K„ŸŸÎÎÎ%ò­ª_¿>K—.%44”‡²páBlllرc¿þúk±ÇÑ Aƒ†’•™EÀ™«¼}S¶þÉ Éœ9}…ÔÔ´277=ãÚÕ»*Í"* ©TÊÍë÷yòøy™Í© îÝ}LpPÙÚÜH$®Þæù³¨2·"Ò¿"""*LÖ¾}û0`@y‡Q"ÊÌ]mˆÙB›†ò£í*÷é!âãJ³¡Æî5¹Gw“ü©þå…®Ôýÿþ|nþ9•Ī­·Ò±Â½š;wßa…í ŒÄJv”Ó 2š4á\ÃsøÔñ¡·ioôDª5»¬­W›V3mÊL«™h‰”7…V)ºº9¦î”)P£†jÇ×ׇþýáèQ8u > Zm©TZ¾.ÿR©ôòŽ¡ ´µµÙµküüóÏœ>}ZØ&ó2X³fM«Þ÷îÝ(Ò¸SȺ ž8q"ß¶ÈÈHºuëÆâÅ‹ <^"‘0zôh¾úê+á5{{{æÏŸÏêÕ«Ê}}}éÖ­›œOHç΋Ŝ<™_£ôóó+Ö9ˆD¢|e!2±*÷ˬ£Ž¢ò½ààà"ç‘J¥œ:uJaù`AŒ?ž!C†¿7jÔ///~úé'€"Ë35hРAYnÞxP&ÙGŠHII#ðòí2dÄÄÄqûVH™Ï+#8èáSNõò ËÜR©”ë×îÿ/'´±±¡cÇŽlݺµ¼CáÂ… „……ñù矗w(%¢LkŽ*‰+1Õr*S-§ò*óOÓŸò*ëÙRåR\uD:XëXSG¯N…Ï:úÔøS>5þ”TI*áéá¼ÈxÁ{‰òi›U´«`£cC=½zGÀÐ BÄ@³ 4Hbv"OÓŸ‘AšDùÕ)KKltl¨«W~¿Å¢V-X±~üÂÃáåK(¤³X‘äˆauêÀÔâUÿ“† 2{öl¾ÿþ{ÆϽ{÷044¤oß¾´hÑ‚€€\]]™;w.mÛ¶EKK‹äädŽ=ʤI“ÐÓÓ2±rQ !¬ zzz„‡‡ãããC½zõ8P¹&½zõ¢fÍšüúë¯téÒEèØ—ššÊ¸qã8sæ ƒ *ðx±XÌ›7o8uêýû÷§OŸ>@ް%ôy÷î7nÜ`„ rÇתU‹Ï?ÿœ={ö0yòdæÍ›‡¡¡!dîÜâÙ ðòåK’’’066º!ʺïØÚÚ¢««‹ŸŸÏž=£V­ZH$V®\ÉíÛ·‹œçÖ­[¼~ýºDV||<dÏž=BGB™Èe£iРAƒªxñ<ŠgÏ^–k oß¼ãQèß|Ô¨ð2tU"‘H¼|‰¤|†AÎgüÕ+wèÑ«#bqÅ5vÏÌÌâê•;åCvv6׮ݣ»ëaÿ¡6¦L™Bÿþý™7o5T½à_–,Y€¨U«V¹Å  j°²²² LÑ766ÆJÇ +¢»¼yó†°°0233±²²¢aÆ ÷KMM%-- ##£ ÝéçÕóW¼ùû 5ªÕ Aƒ%ŽõåË—„„„йKçu¿]ccã|¦³ê"$$„èèh¬¬¬hÔ¨‘oXX¯_¿VºWn²³³ILLDOOO¡§JEÀDË„æ†ÍiNóò¥l‰ ^½œ þEL:•}ûöq÷î]æÍ›ÇòåË‹Å;vŒþýûsæÌΜ9ƒX,ÆÄÄ„øøx ç»oÏž=4jÔ(ߘ;w.p¾   œœœ eæÌ™ôîÝ[iKOO>ûì3FŒ··7vvvóöí[>ÿüs¾þúëBÇX³f mÚ´¡oß¾899aeeÅÇyöì  OŸ>øøø •JqqqÉwüÆyþü9kÖ¬aíÚµèèè••Å!CØ·o_‘ß‹œ:u SSSFŽÉ®]»ppp@$±hÑ"¼½½ ¦yóæxxx°bÅ 5j„““¼{÷ޱcDzcÇŽBçñóóÃÌ֭̌[}aÿŸåË—sîÜ9FŽÉòå˱µµåñãÇ„……áââ"†4hР*¤R)„—w„<|J½ú5ÑÖ.›Åõ§á‘Â+99…¿ÿޤn]»ò¥@?zFFFfy‡AÜ»"#_ckkYÞ¡”Ÿ}öÍ›7gúôéìÛ·OéqLóæÿ<_–DˆòõõåôéÓÅÊH¯h¨Mɸuë–Pª CCCìíí7n_~ùe¾íÁÁÁL™2… .È• Ô¨Qƒ3f0~üx¡dÀËË‹U«V±ÿ~¹ôýŠ„‡‡6lÒk£¢¢°¶¶.ÑË—/çÌ™3´µµIKKC__kkkù믿8yò$ÏŸ?'11‘*UªÐ¶m[FŽIõêÕåÆúüóÏiÓ¦M¡1YZæÜäÕ­[—3f(Àò2pà@7n¬P rvv&$$„-[¶Èû÷ïéÑ£¤W¯^r‹#Ÿ|ò 3fÌÀÎò† ʦM›¸{÷.ïÞ½£mÛ¶,[¶ŒÁƒ#‰gРAT«V-ßü¦¦¦œ?___nܸ¶¶6={ö$%%…}ûöajúOßÁƒcoožÞ?¥Ø»víbÕªU<þ\0)m֬ǎãàÁƒH¥Rttt€œïLGGG|||ˆ¥wïÞLœ8‘ÄÄD,,,äâëÝ»75jÔÌÙ###ùòË/ÑÒÊÿ 6mÚ4…«¦5jÔàÁƒlÞ¼™[·nƒ““sæÌaøðáÂXMš4aÆŒT× 4TL¢£Þ’PN¥ƒyIOÏàÙß/©W¿l²JBCÊÏS2/¡!O+¬€%3ܯ(„†<ýO X"‘ˆ 6жm[Nœ8AÏž=•§fÍšÔ¬Y³ÄÇ%%%áîîÎ7ß|£°+tEGí©8 4 yóäädBCC àêÕ«$$$ÈâÞ»wN:‘ššÊ AƒhÑ¢„‡‡³k×.&L˜@XX«V­Rwø*ãÙ³g¬_¿;;;~øátuuK,^AŽZª¬°”—‘#G²wï^9+,,ŒæÍ›3~üx¥ç àĉ8880iÒ$ŒŒŒ„¢ôôtÎ;DzeËTr4hРJZ·n]¬Œ›æÍ›Ë­zÉ‹ÅôêÕ«ØŸŸãÆ+vl}ôK—.-Ö¾£G.t»……^^^EŽãââ¢0‹ÊÒÒ’ùóçxܬY³¾Æ pqq¡GôèÑCØ&[…ÌÝ1·×Vî¹}‡ôìÙ3ßM H$bøðá 3Ÿœå~ÿüóÏå| ëY˜OX•*Uм¶ŽŽŽšrB 4¨„¨—¥°iPQ/_—‰€•˜˜LrrŠÚç).I‰ïINJ¡’qÅ«‰yGzzFy‡!Ozzzz·jJÝ´lÙ’éÓ§óÕW_Œ­­m™Ì+•JqssCKK«Ð{™ŠŒÚ¬&MšxÃ}êÔ)zôèÁüùóqssÃàÿ»—-\¸¤¤$|||ò•IÌš5 gggÖ®]Ë×_]¬•èŠÀ“'Oøúë¯KôÀ’›¿ÿþ›Ç—ÈCÆ‚ X°`ÜkÑÑùÛ™&$$ðþ}éZª†…å˜Κ5‹¡C‡Êm»xñ"ïß¿ÇÕÕµTshРAƒ†CCC–/_Îo¿ýÆ•+W¨Z5Ç¿2**Šùó磫«+'jiРAƒ†Â‰~SÞ!Èñúõ;$‰\¥Œ:ˆŽz«Öñ•!:ú-õKž£n^U°¿©TÊëW1Ô¨Y½èÿÅ,X°€ÀÀ@>ûì3.\¸ tŠV'³gÏæøñã\¼xQðýÐ(S÷¼¸¸¸Ð¢E ®_¿ÎãÇ…Tú[·n¡««›/s rV^'L˜À¬Y³8sæŒR–D"!!!±X,Wª ™wTåÊ•yþü9aaa8::ʵÔÎÌÌäæÍ›ÄÇÇS½zuìííåJ0xóÿ&ÖÚÚÚÄÅÅa`` ˜Î&%%ñðáCÞ½{‡µµ5ööö ?ô}}}100º]åŽñöíÛ$&&âääDÕªUIJJB*•bbb"wÆÆÆhiiOffNt\\ÚÚÚhkk ¾eiiiÄÅÅQ©R%tttˆ‹‹CGGGèê$###ƒ÷ïßchhˆžžqqq¼}›ó…"‰ˆ‹‹“ó%óõõ¥nݺB®ÜqÞ¼y“ÔÔTÚ´i#”¼ÿž‡CµjÕhÞ¼¹Âr 4hÐPñ144dáÂ…L›6ÚµkãèèˆD"!88˜ììl6oÞüÁ™‰jРACy!•J+„Tn²³³IMIè’z3‘*Rö•Œääœ$€cÇŽáããä4Ù²eK±ŽÿùçŸ9þ<ÓˆÄÛÛ»XÇýøãB—ÛæÍ›3yòd¹íII¥KNPIIïý+k´µµ9rä;v¤k×®üõ×_‚m„ª‘J¥Ì™3‡åË—sìØ1…U ê•Æ‹L0É-$YZZ’‘‘¡°Í6Àwß}ÇÛ·oùöÛo•šóÍ›7˜››Ó¸qã"÷]¶læææìÝ»—† Ò­[7êׯOzz:»wïÆÆÆ†víÚÑ£Gš7oN³f͸qã†0FË–-9r$ßÿ=æææ¬Y³Èñ¦²±±¡M›6ôèÑGGGêÖ­K@@@¾XüüüèØ±£©&›¿F´k×WWWlllX°`]ºt¡]»vÂ~K–,ÁÜÜœ‹/’œœŒ¹¹9—/_ÀÜÜœ¾}û²lÙ2ÁkãÆ˜››ãççGVVæææ Åýû÷cnnΞ={J¥˜›› ¤†й¹9ÇŽ“;‡Üd...tîÜ™ï¿ÿž–-[Ò¡C¡„cýúõØÚÚÒªU+zôè³³35kÖäøñãE¾o4hР¡b2eÊnݺ…››fffT©R…‰'rëÖ-ÆŽ[ÞáiРAÃCZjºà­[‘HMKWûie0GIIM͉éÎ;ìÚµ‹]»v±gÏžb(÷çŸû¸S§N Ç9s&ßö´´ŠS>(£"¾åAåÊ•ñ÷÷GGG‡víÚ¤ò9’““5j«W¯æèÑ£JUsU$ÊMÀ’H$,]º”š5k&g@6iÒ$D"½{÷¦OŸ>lß¾ÈÈHa»‘‘JgâˆÅbÌÌ̊̾ʇ‡...L™2…ÿcï¾Ã¢º¶ÿ†ŽE‰v±€hìl‰{ûÔhb7–Ø&jL,ÉUC¬(bÄŠ±½Š]bET¤ï‘sé šý>Ï<Éœ²÷>„a½×2dºººlÛ¶bjjÊ®]» á·ß~#""‚víÚqûöm6mÚ$ç 5jôéÓ???¦OŸN›6m8}ú4·nÝbóæÍ<þwww^¾|)÷ŸœœÌ±cÇT¾á<ÈÀ166ÆÇLJÿþ÷¿Ìš5‹ï¿ÿžK—.e{-¥J•"00PžñȲeË0`€œÝÝÝÀÀÀ\“ ¿K¡PÈðáÃX°`òŒ±ÇsíÚµLËïß¿ÏÊ•+1bÆ ÃÝÝÓ§O3~üx5jÄÉ“'¹uëÛ¶m#))‰Þ½{Ë3ÚA„OݺuY±bûöícß¾},^¼˜š5kª{X‚ •´w ]•$i©E?®âè#¿ÒRÓÔ=„,•Äq¥–À1©‹¹¹9G¥Y³f4oÞœ È“e>Ô±cÇhРçÎ#((è“HãSäKOŸ>«««Ê¶””nݺETT¦¦¦lܸQeÉ»»;[¶laÒ¤Iìß¿_žÁS»vmÜÜÜèÙ³'M›6-ð˜,,,ˆ‰‰É×9Mš4Q™I”œœÌ¤I“055åøñãrBö:uêP»vmš4iÂwß}ÇŽ;hÖ¬™p±··ÇÅÅ@Í™3GNæZ½zu^¿~ÍÁƒyò䉼ðÌ™3¼zõJ%€•‘ þرcr0cyã˜1c²½MMM\\\äu¶ãäÊ‘•*U’·§¦¦æù>¹¸¸ÈÁ3GGG•¶ÐÑÑÉT.>66–Õ«W3vìXyÛ‚ $‰éÓ§ÓªU+ùÞ¤¤¤°mÛ6?~œeu+AAAø7Ð+¡I°õôus?èƒû(y×®§¯LS¾|y5j W¶Í [[[ù¼Œt+yáàà W|¯^½zã*ú¯G~é—À1©“^^^¸¹¹1iÒ$6nÜÈ´iÓèß¿–¥sÌÂ… Ù¿?C‡eÙ²er\ácWä3°bcc¹xñ"/^$((ˆ#GŽpòäIlmmY¹r%·oߦAƒ™ÎëÛ·/>äÀŒ3†ªU«rõêU~øáš5kÆÐ¡CIK+¾Èm÷îÝUÞA—.]2UtrrÂÁÁ??¿Çhkk («PíÛ·ONž>jÔ(8€ƒƒƒ|¬¿¿?666rί°°0BBBèÔ©S¦ò™ƒFKK­éͲ@óæÍ3åÑ‚Ì÷7ãÞŒ7ŽÝ»w˳Ñú÷ïÏ¡C‡äÒé‚ ‚ ‚ðo¤¥­…–VÉË [Á‰Œ\Â%‰žžòº=== &88˜ãÇçùü3fÈçmÛ¶-Ïç­^½Z>oÅŠ™öë땼`QI ª•_}õ·oߦwïÞ|óÍ7X[[3fÌ:$§^ÊJF>Ñ¥K—R¯^=š4iBzz:çÏŸgݺuŸLð ŠaVçÎñññ‘ß;vŒ~ýúqãÆ ,--177Ïö\mmm•²Ø÷ïß——üþûïT­Z•©S§õ%d eTÚÛ»wo–òˆˆ‰ŽŽÆÊÊ*Ë6===Ù½{7G¥[·nèééÑ®];ºw‡‡ÊÇ÷sGÝ¿P-7žAOO¯ØJqæUjj*GŽaÊ”)™öiiiQ¾|y•mìØ±ƒ}ûöáîŽ­[·¦Gxxx¨$ÑAA„#ó²¦DF”œŠ|Æ¥ ÑÑÑ.ò~Ê–-úŠmùeaQòÆ`^¶ wî„©{*Ê–5U÷J,,XÀÔ©SÙµkÛ¶mã‹/¾ 99*W®Œ©©)ššš¼|ù’ÈÈHBCCILL¤FôìÙ“íÛ·g9ïSPìÓtÚ¶mK`` 7æË/¿ÄÄÄD%0sçÎŽ;FãÆåMlmm?~ 6Ìö<]Ýì£Ëzzz>|˜³gϲwï^öìÙÃÁƒ9xð ³fÍâäÉ“ØÛÛIHH3gΔÏͨ Xœ³Ð²J™žÇµ÷ÁÁÁÄÆÆf™4N[[[e iÆ6___þþûo|}}Ù»w/‡æðáÃ̘1ƒcÇŽeúþAA„kë²%*€em]¶Xú±°4ESK³ÄäwÒÒÒ¤¬EÉ Ê”³.‹B¡(1 ÿõôt)SæÓ™TTŒŒŒðôôÄÓÓ“¤¤$.^¼È­[·xðàñññ¼yó†*Uª`ii‰½½= 4Èvâ̧D-ëÌj׮͢E‹˜0aäÚµkòL¬«W¯2bÄÆŒÃÏ?ÿœåù˜™™WœÃV‘±l°F,^¼8Óþ´´´<%™OII¡yóæ4oÞœ¥K—råÊ~øá¼¼¼X²d ¿ÿþ;hjjÒ®];ù¼ŒY_3±ÞõæÍÂÃñ··/èå©ÐÐÐ@¡Pd9m1¯¹Äüýý±²²ÊWÐ)99'''œœœX¸p!7oÞdåÊ•¬[·Žï¿ÿ^.O+‚ ‚ ÿFmÊrùv±>ÔÎIåÊås?¨hjjbcSŽû÷ç~p1°©d††Úê£åHWW‡råÊòäIÉ(‚e[¥d­úèééÉ1ƒ;µý+7nÎÎÎDEE1iÒ$y{›6m066fÓ¦MÙVÒ âéÓ§r’;uhÖ¬ººº*ù™2ÄÆÆbnnNýúõsœ¡Ô»woŒ‰•·Õ©SG®Xøüùs@¹|°qãÆ˜˜˜ÈÇÙÚÚR§Nyôè‘J»ëׯ—ghå$ã‡ì»IÚ3‚n¡¡±±1Ož<ɹ?|øp®ýd\Cûöí3Í´ÊΰaÃ000P©>ùÙgŸ±páBà÷F¡8¥§ÃÓ§pëܼY°×½{ î+É¿øxåØ zÝ·oCt´ò ‚ …C__»ªÕ= *V´Â¤gÖÔ¬Y5Ï[% 5jæ=éº:ÔªSMÝC”3Õª;ت{ÂGLm™¾ ëׯ§nݺxyyÑ¿\\\011aáÂ…Œ;–fÍš1|øpÚ·o••±±±;vŒÕ«W£¯¯Ï¬Y³2µ{ôèÑlgfµjÕ bbbèÝ»7ffflß¾½@ãϨô·bÅ ºwïΚ5køì³Ï¸wïÆ #..޾}ûæ‰oÛ¶-;wî¤ÿþüòË/ØØØÍwß}@ÇŽIOO'00qãÆe:É’%têÔ‰æÍ›3þ|lmm9zô(‹-ÊÓs###~ûí7œœœ¨_¿¾¼íÔ©S;vŒZµjaaaAýúõ9~ü8sçÎeÒ¤IÄÇdz|ùò<%&|öì.\`üøñ¹›¡]»vüöÛo 0€ 6`ggGll,Ó§O—ï BqHJ?? „  /S¶,´k:ÁÛú%Ε+pè;ù,Þ›---prWWpsƒ×AÞQ£fU…E˜øFmcÐÒÖ¢ŽcñæÜ14*…}õÊܾ•yEJqªîPCÃRjCnLMKSÙ¶<uµjÛ£§'~ñ §ÖRuöööÌœ9“™3g2räH®^½Š¾¾>cÆŒAGG‡)S¦°jÕ*V­Z¥rž ÿùϲ¬D·~ýúlû[¿~=$''säȬ­­?hü‹-âÕ«WüöÛoÔ¨Q’““ÑÔÔdܸq|óÍ79ž?|øpΟ?ϦM›¨T©zzz$%%¡P(1bÆ ãÂ… <{ö,ËÜQ:t`óæÍŒ7ŽPªT)~üñGÆO©R9ÿ íÒ¥ {÷îeÔ¨QTªT‰`kkK­Zµ¸téíÚµcåÊ•Œ7Ž~ø777æÎ+ϳ´´dÍš5 :4Ç~puuÍñ¸wõéÓ‡³gÏòóÏ?SµjUùÞ|ùå—|ýõ×ynK¡ á§Ÿ *ªðÛŽŽ†íÛaÇèØƇ’RŸ"2RyÝGŽ~Û©©pæŒòõûï0q"´jUøý‚ ü[èééдy=Ž=¯¶Ö¯_Ï×_Í‘#Gxþü9åË—ÇÙÙ9Sþ©æÍ›¨²]¡PðŸÿü‡ &pöìY"""033£]»vÔ¨QP.ï›>}z¶‰â¿úê+<<<¸|ù2oÞ¼ÁÑÑMMMÆŒ£²äpàÀ´jÕ GGGyÛ!C¨]»6!!!˜š*“jhhpæÌüüüxýúµüµ©_¿>¡¡¡ìÙ³‡°°0lllèÙ³'ZZZTªTI/@¯^½¨W¯žÜ—¡¡!‹/¦lÙÌIW­Z•mIЕ+W2räHþúë/ÂÃÃ)S¦ ­[·VÉ£eccC```‰«º(ÂÇM’`Õ*ðò*ž¾‚óç•}ª»hÌÊ`Zl1ü-ðø±2€Õ§Lš%`%ˆ ÂGÉ”¦Íêrîì•<Y* …ÇzÅ–ûê} Z´l@Љ¿‰‰yQ¬}›™™Ðʹ!Ç//--Mœ[7âäñ¿yù²xƒX–Vf4m^¯Xû>M @bPÀe»zëõ˜1p†J…<¡x´oßž:uêðÃ?¨l?}ú4-Z´`Ò¤I™ö ‚ð?OŸ>ÅÒÒ&–ld‰Ä2A’¤ì§ …J¡P4.x„6›²¿Q³9{÷ïÉöÁÇœ9pà@ÁÇXP††°v-88ß—/Ãÿý$&ß½zÁäÉÅ߯ |lüüüèÖ¥ži§ t~ l¤@CI’.êà„l)Šá”á'¦¡_ ¢€å•ã¨Èçœ;BbbRGšw::Ú4tª…M¹"ï+7©©iœ?£G‘ÅÒŸM¥r85®ƒ–VîE»Jš7o’9{æ2‘ÏŠ¥?»ªiаf‰Mr/ùóç³`ó’†ðgÓ]`š— F[[›ü‘J•*áé鉮®.ÁÁÁxzz¢­­MÿþýÕ=DA„ÎŽê ^2Qú·ß–-`\̦cb`útõ¯vî„5 sgõô/‚ð)°´2£SçV\ÛÅÖd IDAT¿~‡»w‘œœ{a§üÒÒÒ¤råòÔ®k®nÉÈg¤¥¥Ió–õ‰ˆˆæJH(±E4ËÔ´4u«cee^$í]]Z·qâÑ£H®^ -²%…eË–¡n=ÌÍËIû¿“`}ÄV­ZE=7nœJ’wccc¶lÙ¢²ÔNAÈ]X˜2÷“:ED(Çð¶žG±Y´HYaQ–,Æ• îA„‚ÑÒÖ¢®£µëØñŒgÏâHHH$)é M‘¥¯§‹~)=ʘSÞÚÍ:ó¨\¹²”+W–W/_þ”ׯHxHjZÁ–UjiiRª”††¥°.o¡–<_E¥bE+*V´".îOžòúu" I¤ð^ikiRÊ@CÃR”¯`‰AÁ& BNDë#fggGHH'OžäÆ$&&R©R%:t耡¡¡º‡'‚ðÑùÏ ¥ðVçÛ0hØØO¡¡pâDñô•“ÄDøãe>,AáÃhhh`]Þëò9ç$þà`,†ç…‰‰&&Fê† ä‰`}ä ­[·¦uëÖêŠ ÂGíÙ3ðóS÷(”ÒÓaëV˜:5ïçK ‚ ‚ ï™ÔA8}ÒÒÔ=Šÿ Ê[PéŸþaݺuìÚµ«@ý¤§+¯½¤xó‚ƒÕ= Aác&IiðKýÅ‹$ª+)¤ Ù,AAÎS÷T=} ÷îe¿?#pµuëV¢¢¢ üAýÚ5xù²€ƒ,"gϪ{‚ —ÈÈH"#‹§_IC×®]ÑÕÕEOOO%Op^%$$`ee…———¼íòåË…9LA H°AxøPÝ#È,«1Ý»wO%pÊ'Í…Ù‡º……©{‚ àààÀ½œžzüK,X°€ýû÷S³fMŒ³³s¾Û8qâIII¸¹¹0uêTÚ´iSØC¡D,AAž?W÷2{wL÷îÝ#00ÈÈH ða«¬ú()ž=S÷A>çÎãÅ‹êF‰‚††AAA0™b@@Ÿ}ö•*U 00ôô‚Uæ¡p‰– ‚ IIêAf‰‰p÷î]Ž=ª²4$«ÀUrr2þùg¾ûˆŒmmwRRô>h¬…I¤A(\ ¤¦¦bllœë±ñññÈKr‡‰‰Ia Q–˜˜ˆ®®.™ %$$ ©©‰®®n–çÆÄÄ`hh˜mð*!!---ttt²í? €Ž;ækÌ/^¼@GG}}ý\}õêUž‚k)))$''c``¯±§L­K““áÉ/Ø+2RÙÆÇF’”O¼ zÝáá%/_‰ ÂÇ®Lu 333077ÇÚÚ…B!˜Ï*€¥P((W®\¾_FF夒•QÀÌLÝ#Aø8|þùç¬Y³ä}‘‘‘Œ5ŠråÊa``@éÒ¥111aÀ€DGG«´“’’Âwß}‡……FFF”.]š &°cÇìììøë¯¿äcÿþûoÚ·o¾¾>eÊ”ÁÀÀ€=zpëÖ-•6gΜ‰;vìÈñ$IÂÎÎŽÙ³g3sæL¹ÿŒTiii,]º”Ê•+c``€žžõë×Ç××WncïÞ½ØÙÙqãÆ âãã±³³ã³Ï>àõë×|óÍ7ò}ÐÕÕ¥víÚlܸ1ÓXîß¿ÏíÛ·qss#** ;;;®_¿.·9|øpùØàà`ºté‚¡¡!&&&”*UŠJ•*1þüL3¶ÂÃÃéׯ¥J•ÂØØþóŸÿðÍ7ßPµjU•c½¼¼¨]»6¥J•ÂÐÐ+++ÆO|||Ž÷Qþ Š}Vp0À_Þ²KKpv77¨[·pÚ,lOŸ‚¿?> wïBJʇ·©« ŽŽÐ¾=¸ºB©RÞ¦ ¿•¹9<~¬îQ¨23ƒÒ¥KÓ¹sgZµjÅéÓ§¹pá …‚´´4$I’ŸkkkÓ®]»|÷¡P@jjaüÈ– BÞ´oßž¸¸8Ξ=KÛ¶m±±±”A›–-[òàÁLýúõ‰gÇŽxyyE@@€ÜNß¾}Ù½{7ÎÎÎôèу'OžðË/¿°eËž={FBB=¢mÛ¶”.]š™3gbaaÁÕ«WY»v-çÎ#44Tž]ôìÙ3îÝ»ÇË<|˜¤¤$/^ @RR»wï¦S§Nüþûïr_½{÷¦Zµj:t(S«U«Vèë룯¯ÏâÅ‹ $11Qn`Ñ¢E¼yó†ШQ#•vk֬ɡC‡äÖÂ… ã—_~aÔ¨QŒ;'''Ư²TsË–-hiiqôèQy9âÈ‘#iÙ²%gΜ!>>CCÃ<ÝOAøK¨ãøqèׯxJ”ÀpæLÑ÷•›—/aÔ(X½ºðƒWï{ö .„9sJÞ“tA„Á{ŸÑÕ®NÈ*EFF kܸq8::¢¡¡‘ež¼27‡÷V/¨]IûZ‚ |lzôèÁ±cÇ;v¬ÊvmmmjÖ¬Izzºœø}Û¶mÌ™3Gå÷‰££#_|ñ…Êùù˜~ûí7ÂÃÃåí3fÌàÕ«Wrð ”A­*Uªä9™z™2eèÝ»7ššš( yá÷ß/¯ŒŒŒ7nIIIìÙ³'Û6µ´´ÐÕÕåܹs9rD^ÚW¡BâããÙºu«|lJJ ÇŽ“«ædéÒ¥9rD%xààà€®®.111€ryäöíÛ©P¡‚ÊòCP±¬¬¬T¶ššÊÊ•+yýú5 LpòäI"""DðJø×+òX{÷‚Pœ…`Ò$X¼X¹´Pbb`èÐâ/~è¼x+V(ŸÞ ‚ yãèÖÖÊÜŒ%ÁçŸç¼?ciaË–-9uêÔ•OïÔ ~ú©À§*;;¨^]Ý£Aø¸YZZbiiÉýû÷ àÆ„††rñâEîܹ(‹€r擦¦¦¼<î]µjÕRyïááÁÒ¥KñõõeïÞ½8::Ò­[7ºu놣£ã¹Zµj™È\½z€¹sçfÚ—‘ÇëúõëÙ¶©¥¥Å”)S˜={6®®®”-[–N:Ñ­[7Ú·oO©wr°œ9s†—/_æ)€eoo½½=ÁÁÁ„„„pûömnݺŹsçxóæ )oóÅÄÅÅñäÉ\]]Up|öÙgDEEÉÛ&NœÈ¾}û˜6msæÌÁÙÙ™îݻӵkWÊ—/Ÿë¸áSW¤3°nÜ€%KŠ7x•!%fÍR.],nééðÝwżÊpú4¬[§ž¾A>VZZ0hºG¡de;çíØŒ@ÖСC Üß_€©iO/TC‡*—4 ‚ ÷úõk¾üòKªV­Jß¾}Y¾|9< k×®4hÐ@åØW¯^¡­­-çŒz×û³§ŒŒŒæÇÄÙÙ™+W®0gÎêÕ«GãÆ‰ˆˆ(𘳪¶––<àÞ½{*¯W¯^Ñ AÌÍÍsl÷»ï¾ÃßߟAƒ!I›6m¢G”+WŽíÛ·ËÇP±bEj¾»~?7nÜ Aƒ4nܘádzyóf’““ùæ›oÐÖÖV?muÂ÷ﯓ“!!!L›6 ;;;>ÌèÑ£©X±"Çÿà´‚ð±+²VJ L›¦Þ* Ê%uÅ×cëÖâY.™“M› $D½cAøØtéR2fÿ|ý5¼óù7OJ}@%}}=ºÀ§šzõ yèA„÷Ìœ9“­[·ÒµkW.^¼È‹/8wîË—/Çìm¥ŒŒ`ˆµµ5III<{ö,S;=Ê´ÍØØ˜ &püøq¢¢¢øí·ßhÒ¤ ÁÁÁL™2¥P¯ÃÔÔI’ âÂ… Y¾æÌ™“k;nnnlܸ‘ÈÈHŽ;ÆèÑ£IHH`ðàÁòr?Ú·oŸk[’$áááÁåË—™7owïÞ%::šÃ‡óÍ7ßšš*ß[sss´´´²¼õý­V­ .äúõëܾ}› P¾|y6lØ p„£" `íßï,‹V›k×”‹Kb"üñGñõ—ôt1 K!¿´µ•3‡Õ™bÂÃC=AœîÝ•K Õ¥LeÊQˆDJ…Bñaë„"—‘<ýe/~~~€2WUýúõåÙU)))\¹rP&8ä ¶Pi7==•m?þø#Mš4‘ƒ]fff 2„Ç£P(¸qãF¡^›““iii:t(Ó¾­[·âææ&_kV®^½J‡øõ×_en­6mÚ°fÍz÷îMbb"÷îÝ#22’Ë—/g¹|P¡P¨ÜÛÈ3°fÍšE•*Uä}ÿýï‘$I¾·úúú4oÞœ«W¯öÞòœGqíÚ5ù}zz:ýû÷§OŸ>ò6{{{¦OŸÎOo×ùöý„M‘}L, Aœ ›6åïø˜˜R ˜ }ÿ~eþ«’ 8X¹ŒSAÈ» ”yó˜s¶P¹¸(g_©Ë´iТEñ÷[¦Œ2—…Eñ÷-Bö E{ ­ºÇ!ä,cyÚÕ«Wå™?åÊ•P ¤¤¤0fÌ"##ÿ°†NÙ²eùúë¯ñööæÍ›7DDD0hÐ .¿WžWWW—óçÏ3sæL9ÏÀþýû‘$‰† ÊÛX²d !°,dôèÑhjjòõ×_«´sëÖ-&NœÈ‰'r\òW¹reN:Å‚ xðNn™èèhΞ=‹‘‘ööö>| \\\2µ¡¯¯Ïëׯ¹ÿ>iiiXXX ©©Éãlj‹‹“‹ˆˆ`äÈ‘$&&ÊÛg̘Ajj*;wæ¿ÿý/iii\ºt‰®]»ªÜC ^¾|ÉÎ;Ù±c‡¼=--ƒ¨Ü_Aø7*’ÖÝ»ðøqQ´\0W¯æ-¨Þ={øùçŸå¤†ùuâDN+2'Oª{‚ ŸúõaèX±xúÓÐPVÐ]¸ttŠ§Ï¬èéÁòåàî^|y¨ªT߇<¤¡) =` ¯î±9«W¯  öhiiÔ)SÐÐÐÀÍÍ.]ºàîîŽ ‡¦_¿~<|øP΢òññAGG‡^½za``€µµ5ò’:]]]†г³3ëÖ­ÃÖÖ–¶mÛR·n]9ßÖìÙ³åqíÙ³‡©S§\àkkذ!«W¯&""‚† Ò°aCš7oN:uxöì7nÄÆÆ&ÛóŒŒX³f Ož<ÁÁÁæÍ›ÓªU+ªT©BXX«V­ÂØØ˜€€7nL™2e2µQ¿~}$I¢J•*4kÖ ÆGDD5jÔ W¯^tìØ‘*Uª`nnNÆ ‰‰‰‘ó_¹ºº²|ùrnÞ¼Iƒ ÐÕÕ¥~ýúQ¥JtÞùÅÿã?bffFŸ>}¨U«íÚµÃÖÖ–7âîîN·nÝ |/áSP$UÏž-ŠV .=Ο‡Ž³ÞÉ'¸ví …©€I³’’ཇjwö,Œ¥îQ‚ ||ªV…;`ËøóOe…צP@ƒÊYW%!÷(+ØNª¬‚¸reÑåS45U&ÍïÕK™@_„g*Px©î9ëØ±#Û·oÇßß}}}éСÁÁÁlذ‡¢««ËÌ™3ñôô$22’Šï=¡iÑ¢¡¡¡ìÛ·‡bmmÍ_|Á´iÓ011@GG‡ÀÀ@¶mÛÆ±cLjˆˆ jÕª 2„!C†¨$bwssÃÄÄ„ºuëæz S¦LÁÎÎ.Ë}£F¢eË–lܸ‘›7o¢¥¥Å„ 6lÕªUS9vРArN«w·Õ«WM›6q÷î]RSS6lžžžÔ®]Pæµ…wVKä‹®®òº--¡IhØP仄‚ðóó£[—x¦.Ðù)$°‘V %Iº˜Õ1 …¸è%I:XÐñþ[( I’Žä°8eø‰id]Ž.7QÀrˆŠŠÂ¢×[ûùù Aƒ°²²RÙ÷ùçŸHtt´ÄògåÊ•hkk3ú½j) XZZR·n]þ*Τ͂ óçÏgÁæ$ O*XwuE4ëùó¢hõü;¦÷W’$JIÒ’x݉‰ÊjŒPœJá_OK š5S¾þm*TP.oá_eð7Ðx¥æ±|,f+ŠS’$½Q÷@ò+&&†iÓ¦ñðáCV¯^-'|ß¹s'¸¸¸ˆàÕ¸xñ"^^^XXXàîî@rr2ß~û-ñññôìÙSÍ#„G‘°Š¢Õóúµò‡óñãǹ~ýz®«}ûö¡©©™¯>¢£¡té–¼xa•ûÁÅH°AÔëÖ­[\½z•–-[ÊO·wíÚ…©©)mÛ~Zù‘ãããÙ¾};wïÞÅÜÜœAƒÉ%Ûóêìٳʗ/O³c´U( …¢à ´N#Xye >º:à¬]»–µk×âëëKÕªU‰ˆˆàîÝ»T«Vµkת{ˆµ3f€‡‡Õ«W§lٲܾ}›èèhºuëÆ¸qãÔ=DAøhI+‹ÜwjgjªL>hhh˜§ÀT©R¥ä¡y•”ééù z5 LAÔË××—iÓ¦áççG‡èÓ§5âlIKùÒÓÓqqqáüùóò¶ìrŠädíڵܹs'_Á+oooæÎK`` Àzôè&L`ÇŽ*¿×·oßΣGøöÛoó=>€—/_Ò°aC¹,º¹¹9ãÇ—÷/Z´[[ÛµW›6mbåÊ•\¾|^¾|I¯^½èÞ½;{öì)Ò¾…O“B¡(¬xûzôv³È•7ÆÀd…Bñ»$I+g®&:::œ´ 33000ÀÍÍ–-[rþüyΜ9“í,,—|çÀ W&¼-IJ—ÉqAJ¢víÚQ½¤dn/$·oßæüùó4kÖ ///ttt²¬è”I’8|ø0£ P¤J•*¸¸¸`jj*o2d*Ç…„„зo_&L˜ï>2œ8q‚°°0zõêÅŠ+Tþ‰‹‹ãüùór9õ⢣£ƒ‹‹ uêÔ)Ö~…OÊw€&ð=Péí6Q…0 …Be¾°*€°M½#Ê? ÜÝÝå%nBá2223­¡Ih£V-e妒äm @9»ªM›64nÜ8×@V~”+§œéõ^á µz÷ºA„’# @ÝC(t³‘:wîL•*U ÔÆåË—‰ŒŒÄÍÍ-ßç0€ï%ìʪ²pA« ¿+ãZ=<<(_¾¼Ê¾#GŽžž.—Ÿ/.¥K—ά„¼R(öÀ Ÿ$I¯ …ÑÛ]bVî2îÕQ`ªB¡Ø.ÆAAE‘°š6U.]KO/ŠÖóO_²ªÞšÈrrr⯿þâÂ… ¤§§“^Àkh(++:ô.D"† BÞ?žððpzö쉟ŸÇŽCOOggg\\\²<çÎ;øúú†‰‰ mÛ¶¥uëÖ¹ö•]¬ððp¼½½ ÃÂÂWWW4hÀÍ›7¹ví 4È2@´ÿ~´´´èر£œIOO.]ºä8–7npýúu:w·7W®\¡E‹tíÚU^Æ÷÷ßsèÐ!bbb(_¾<={ö¤jÕªrœ8qB§··75kÖ¤FH’Ä‘#G8wîÏŸ?ÇÚÚšN:Q³fÍLc ÀÌÌŒ† ªl?{ö,‡&..ŽÆãîîNPPÉÉÉò²Ìws`™››ãããCTT”|¿---±´´”ƒ<¡¡¡x{{Ó¼ys¬­­ñööÆÂÂggg•¾ÃÂÂ8þ¼|ßwíÚÅ… äû"IMš4‘ËÒàèè(W ;yò$ ¸¹¹áããÙ3g066ÆÍͦM›fù5¹víû÷ï'""sss:t耓“SŽ_Çœr`ݽ{"""(W®:u¢F\ºt‰;wîÈ÷á]’$±{÷nÌÌÌhÓ¦MŽý ½_“’$ù¼}o¤J’”¨Æ1},2Xs#À瀨Ü(‚PÈŠ¤€µ‰‰2SR¸¸€ŽNöû3–N˜0'''´µµ ÜW+² ]]e‰vA!oV­ZÅW_}ÅðáÃùüóÏÙ»w/kÖ¬ÁÕÕ•=z”¤ZúwÞ¼y888ðí·ß²cÇ-ZD›6mèܹ3¯^åœ÷¸OŸ>̘1CeÛÊ•+©\¹2_ý5[·neΜ94jÔHNC¯^½øî»ï2µJ×®]Ùµk€œiذa¹^÷îÝ»éÕ«³gÏfàÀ,_¾œ¡C‡’ššJJJ žžž899ñý÷ßãííÍ”)S¨Q£+V¬Ûøúë¯Y¼x1^^^ôêÕ‹Ý»w“œœŒ››íÛ·gÙ²eìÙ³‡™3gR»vmæÍ›—i,þþþ¸¸¸Èù*SSSñôô¤Y³f|ÿý÷lÙ²…¾}ûÒ¦MƯ² ÐÛÛ›^½zqõêU’““åÿèÛ·/³gÏf÷îÝLž<€C‡Ñ«W/9Õ«W/æÎ›iLýõ½zõâèÑ£ò×nóæÍ,]º”^½zqæÌùø€€•dsçÎeôèÑôìÙ“Þ½{ãççÇŠ+hÖ¬ÇW™.IãÆ£N:LŸ>]ÎëÕ¸qc @rrr¶_ÇŒ¯ù²eËT¶Ïš5‹jÕª1yòd¶oßÎŒ3¨S§K–,àÞ½{òRȬ®ÝÃÃ#GŽdÛ¯ðñS(ý€æÀÿ½³Ù‘À=¯2X7/`¦Ç"‚ðÉ*’@>/ ðôÌÛ±¬ñãÇ£««[ þš6…,(«Å_”Ì|d‚ %Ybb";wîäøñㄆ†òôéSF…¯¯/ ,óòòböìÙÔ©S‡Û·oEll,C‡åàÁƒŒ1"_ý>|˜ &ðÙgŸqãÆ "##‰ŒŒÄÕÕ•åË—³gÏš5k†½½=¾¾¾ÄÇ«¦¥ùóÏ?8p  ,\âáá‘ëì«w­Y³†•+WÀÚµkÑÑÑaöìÙlÚ´‰Þ½{Í“'O§iÓ¦Lš4‰}ûöÊY\û÷ï`Ù²eH’ĬY³øóÏ? dîܹįÆòðáCîß¿O5˜7o÷ïß—ûõêgΜQ þ¬^½šM›6Ñ£Gž={ÆÓ§O &,,Œk×®e{-úúúH’$ÏœKII!((ˆY³fqéÒ%&L˜€$ItëÖ-Ï÷”AµŒ`ÏÞ½{‘$‰Þ½{ÊY`=Ê´òÁƒsñâEnÞ¼ÉÓ§OqwwgÆ ¬_¿^>1²¨ IDATnùòå¬^½šV­ZñèÑ#"""ˆŽŽ¦gÏžxyy1uêÔ|õ?þ`þüù4kÖŒ‡òäÉÂÂÂhРÓ¦MãÌ™3téÒ333¶mÛ–)‚——ð¿ï+áÓ£P(ŒeÀbI’BßÙe„`å•ñÛÿ¾ EK5ŽGá“Td¬Úµ•3ŸÔ­W/°±Éß9yªT˜ñã•3u25…AƒÔ;A„ÕŒ3äe€ZZZüôÓOTªT‰Ÿþ™””–,Y‚žžÄÞÞCCCÖ­[GíÚµÙ¾};¡¡¡Ùu‘ÉòåËÑÐÐÀÇLJÏ>û Ö¯_OåÊ•¹~ý: …‚òúõk|}}ås%IbË–-ØÙÙѲ¥òo&cccvîÜɆ ò<†þýû3nÜ8Ú·o»»;/_¾dåÊ•T«VÍ›7Ë Ù­­­Ùµkúúú,Z´(Ç6ïܹ@Ó¦Måß­åË—gíÚµ¬X±w¦H=z”ÔÔT9ø#IË—/§lÙ²üñǘ¼-«Û¨Q#þøã<_Wq ÀØØ8Ó>PÎÖrtt@OO 6`bb"ÃÒÓÓYºt)æææìÛ· *`jjÊŸþI… X³f Ïž=Ëóx–/_޾¾>¾¾¾Ø¼ý@deeÅš5k°µµ%$$¾üòKž>>üòË/”*U ///ôõõ3Þ„O†X ì—$)«„ãF€£B¡¸Ä¿}Å¡ Ôd¼ ¼xç}<ûÎÿ¿’$éßPÅP%€%IRˆB¡ð&jý$!‘Á£äG¼J+x<²”F)ltl(§]MEÁWª§øôx’ÈL‰$]*X‘.M…&–Ú–ØèØ`¨QòšªbÓb O'2¥àÁm…6VÚVTÒ©„ž†^!Ž®¥¤ÀãÇIï?‹ÈSS¨PÞ¡)‰Š4€P¹2üç?0iܼYÔ½)5jK–À{/‹•†LŸ––°~=Ççk##˜3Þ+ž$‚ äCZZiiirõ½ IÙ ä@Äëׯ³l#ãØ¼ÎÊè+11ož D@@;vì`äÈ‘x{{Óºuk9ÀQPï/Ÿ×ÒR~L +«€XƲÑÜœ8q‚¤¤¤Lù¯€l“¯¿û}õæÍ€lg´ôûêýâÙ4h_ý5û÷ïÇÅÅ???ÜÝÝ)]ºtžÎ>:]º@löïBC”šÒ(s=ßn3yû>ãeò~#og…æäŠ»ÿå;ï_¡ –eü¿|Œ$I%'jý?YÍV[W(öïå+ÿ¼ù‡õÑëÙ»“°ä°Bk×RÛ’/L¾`¨ùPꕪWhí–d)¯ç^l‹ÙƉø¤Ò·‹¦B“¶FmékÚ—¯L¿B[Qð¢_BáŠOç·g¿±#fç_ŸGB*”võ4ôè`Ü/M¿ä‹2_  à3Ü‹Ìñã°q#ìÛ/^^»5j@ïÞ0|8äò¯¸y ”¼Í›•³‘Ö¯‡˜˜¢éÇÊ ÆŒàVPª!C ukX±‚ƒA*œO*´´àóÏ•3ÝIJAA„“––ƃ¨V­šÊöþù{{{,,,ÐÕÕÍ6ÇÕíÛ·ìgh½ÏØØ333îß¿Ozzz¦àYÏž=©V­š\5®{÷î”)Sˆ-’e^UªT”5—,[ž>}šÒ¥K«ä±zßýû÷ ÅÍÍvíÚÑ®];V®\ɦM›2d6l sçÎ``` /9¨Q£šššÜÌâ ØÓ§O‰ˆˆÈôu*(…BŽŽN–Á²Œ™h¹ ÀÞÞ>ËÙhOŸ>åÕ«W©lÿçŸÐÕÕ¥råÊrŽ±ì¾¯2¶çõûªbÅŠhiiq÷îÝLû’““éÚµ+ÎÎÎL›¦LÕóå—_2eÊ|||HJJ"%%E,ü´¾—$)Ëè†$I!@H~U(ú€ ”yûzÿýûÛ*äpÌûí$¡ ‚e¼ßÛ–ÛûŒmq’T(ŸÎQàd’$R(gob+o•žÀ܈¹üõÉRöUK **%Š_¢amôZ†šeQùE˜j•Œ?@_òþÛI· ½í4)À—¾ dYä2~¶ù™¶Fms?Q(R[c¶òíãoy’ò¤ÐÛNJOÂ7Îß8_š4eÍš’´}øP™x{ïÞ¢iÿÆ ˜=[¹œnöle_#¼0K ”3’ÜÝ¡G¸pþúK9ÃíùsH/ØŒN45ÁÜ*V„V­ n]õ'OÏŠ¬YOžÀ‘#pçŽrIe´gÉØXtt„6mÔ;ÛLáS³víZ–/_.¿¿{÷.þþþ4jÔH^ÞåææÆ¾}ûP™qóìÙ3þüóO puuÍS …‚Î;³yóf¼½½U–l:uŠ={öȹŠ@™¼OŸ>¬_¿žßÿCCC•¥w…%#Ç×îÝ»¹~ý:5ß)³{íÚ5Z·nƒƒW®\ɶ)S¦àííÍ¥K—äæ4mÚøß,+œUª›˜˜ÐµkW|}}Ù¿¿\Q1£Âaz>@dÌ"KLL”ƒG3èÞŸñfjjʃxýú5€2г}ûö\ûɸ†¬f_eŒyýúõLš4IÞLpp0]ºtÁÀÀœœœ8yò$/^¤Aƒÿ¥Ï¹ÿ>{öì¡\¹r899åi<´mۖÇsòäIœß™¢íëëK@@€J²ù²eËÒ©S'üüüxýú5åË—Ï2p)|2Œ_ »QI’Q‡bú«R¡P袜Ùõî쯌a&ïüÿ»ÇTÉâCÀ ‹.Ò ENË 3få¶TÒœ¬ó…-| Å܂߅¼‹I¡ËÝ.œ‰?Sä}¥“Îúgë9úê(þÕü©ª[5÷“ŠÐê§«™ðhéðË|¸™t—P–UXÆ$ËI¹Ÿ : ‰©áSY¹´Xú;ûú,Mo5ÅËÖ 2ÅÒg¶.\€NàéÓ¢ïëåKåRº  ض ôõ‹¾Ï\[+ƒ¦&4n¬|ýÛX[æ'AŠÁ?þÈË—/ñððàþýûÌž=›´´4~úé'ù˜%K–püøqºuëÆÔ©SiÑ¢>dþüùDFFʹžòjöìÙìÝ»OOOîܹCÓ¦M eîܹ˜šš2yòd•ã į¿þŠ··7ÌÔWtt4XZZYÀD”:::¬X±‚~ýúáìì̬Y³¨[·.¡¡¡Ì›7I’X´hQމãÇŽËž={èÖ­3fÌÀÁÁððp-Z„††Æ #))‰“'OfYÑð‡~àÔ©Sôèу޽{cgglj'¸téZZZ¹&­ÏÈëåé鉳³3ÿ÷ÿ''4ß³gÆÆÆôíÛ—zõêѱcG6nÜHçÎ;v,ñññ¬Y³&Û%Œïʘi–QQ0+“'O&""‚:põêUæÌ™C©R¥ä™u?ýô­[·¦M›6Ìœ9“ pëÖ-æÍ›Çëׯټys¾*%/^¼˜   ºuëÆìÙ³qttäòåËÌ›7F­rü AƒØ³gdÊ”)T•Y(ñ^_¿¨{ Ù‘$é ðxþ¡m)”?,Lø_ÀËeÀ«4ª°w©ðÞ1¯2YtáŶCÀM`"pëC¯!'/Ò^Ðâv n&SΖ·î¾¹K«Û­8ïpžŠ:¹çq, K#—2%|J±ö)!ñÍãoDK F…b]ôºbíóô†¾÷•éÔÄúûohÛÞæÌ,6{÷B—.àï¯\þ¥F%p¾RÎn޼Ɉ#8tè¼mÑ¢E™>€} ¢¢¢9r$Íš5ÃÕÕ•S§N廋/Ò°aC>|˜¯óBBB1b*ÛÓÒÒ²ÌÍÿÿˆBCCéß¿?7æóÏ?——êìÝ»—&Mšä9‡GAœ;wŽ#FpúôiyÛÔ©S™>}z‘õ)BÉõÓO?±k×.ÜÜÜ9r$&&&øùù©ÌVqpp ((GGGæÎ‹««+C‡`Ë–-Œ92_}ÚÚÚròäIj×®ÍÌ™3i×®£F¢\¹røûûgÊoåää$ψʨ Wúôéî]»022b„ ´iÓ†#F §§‡··7;wÎñüV­ZáååÀˆ#pvv¦_¿~ÄÆÆ²uëV\]] "!!!ËÙKUªTáÌ™3tëÖƒ²téR455å ï/É{ßèÑ£±°°`÷îÝÌš5 I’°´´dĈ<{öŒeË–qàÀ–/_ÎçŸΉ'pwwgРAXZZòÇäzŸÐÕÕ¥uëÖÙ³fÍ~ýõW\]]™8q"vvvœÑ¸|ù2C† á×_UY¾°páB‚‚‚ð÷÷Ïר2¤¦¦RµjU>|ˆ zzzœ>}ZþPÝ¿îß¿Ï_ýU öóâ?þ`àÀüþûï <PVÒÐÐàÁƒEÖ¯ ^OŸ>Uþ¡6 °,`#‹H$– ’$­/̱ Ù{[–ü‚'AhSª@mlÔlÎÞý{èØ±£Êö/¿ü’­[·N™2e ¡L™2T«V-S^ªw………ñøñcÌÌÌr=6/>|È“'O¨P¡BŽ• 6lÈË—/¹}ûv¾~Æ„$I„††òüùs¬­­©X±b¾f礦¦òøñcžn=ǽp¬/ÊoUcJå)%6_š&Z—kq'³`>…ÅE £Ülx#!o?H™¢á~æ}œ/;“¢É½ÙIIÑÚ¢5u ÷\hFބիKvΧ8y he“_~ù…_×üJÚèB&¥„KK¡„°87n\[J¿,¨ÕjhÔ¨AAA¹¶Þ.YÞ,Ïû`S¿~}fΜ©ó&ÙËË‹sçÎéýá‡t²ž—[·nqûömÉç$g¬†ƒ2~üøBï¿°L™2¥ØeddÊ.¦¦¦´jÕª@c«W¯.yc5jÔx¦Qw@@AAAÌŸ?¿D¾«AÐÉz^”J%ŽŽŽ¹vJÌïx›4i‚¥¥%.\ÐÌvíÚ@›6m <÷Ó”+WŽr¹t?©V­ÕªU{æ~sR·nݳ´´ÌUäÊ ggç"®²¨U«µjåïS³k×."##™3gN‘Í+SfI–SAØ‹™ù»À-à>y—Þé¨õO¾‹rv|–wT¾E±RzŠQÕ‚ Ì~/¢æh:¨E5¿Üû¥èw\æ>˜Ë8»qXäŸ[T,Y^êâÀíŒÛ¬ŒYÉ'vÏ—y-óüÌ~0»ÔÅ+€ÀÇø$úÐÍ*wßË"'4Öë½g(yD¦O‡ýûK-„WBÀþ K=|øÌÌLêÕ«Whñ*66–³gÏ2iÒ¤çÞ¶V­ZRæUq“åÍÒ¨Q#½‡°óçϧ)nqò*–¥ÊÈȼüÌš5‹»wï²iÓ&Ê—/_¬åƒe>}úðÇàááÁÈ‘#111áÌ™3Ìž=›Š+òñÇ%ÖÜë•æ»ï¾#!!uëÖQ³fÍç.U”yiñƽ€=O­³扢¸+¯A0#w﨧ý¥rzG9äø=ËxÝ0~j÷‚AvöW®"ÌE±”ênàI,ÆhPõŽ÷?Ú_,]÷ C¬*–5±kø¬âgŸ‘‘ATT”ÔñöyùýAÞžƒ%ͼóžKÀºuëööö:K^RRRˆ‰‰)ÔK¿dMr©•æÆ¼ó ,`ee±úEà€ªÈ¿> ÇÚ.…õë—ÊôÅ&`ùúú²mÛ6~øá6oÞÌöíÛIOO§Y³f|ùå—¹¶½>pà«W¯æÆóæ›o2iÒ¤g¾-üí·ßˆŒŒdñb]J???–/_Îõë×±°° eË–Lž<;;;BBB˜3go¾ù&}ô‘Þ>ÿýwBBB˜?>FFFL:•¸¸8fΜ)µ¹ÎË—/³páB>ýôS¶oßÎÞ½{iР3gΤZµj¨ÕjÖ­[ÇæÍ›¹sç666ôèуÏ>ûLêx´råJŽ9h=¬ÆŒƒ««+cÇŽ%%%…¿þú ___îÝ»‡ :u’ü1ž¾¢(êtÁR©T¬ZµŠ­[·MíÚµ™ÿüs¬­­ bÙ²e¸»»3`€¾¹ß?þÈãÇuº›ÉÈÈÈ<Çgß¾}XYY±eË–\Ëë^%fÏž±±1K–,‘¼ª îîîüù矒_“Ì‹áííMPPvvvlÚ´©Ð/Ïd^.DQ¼'Âà[rXO Ï-Ð /ùmŸ¤ -p{aA0E+rÙ&@~¿Û¢ÍrcÇSÏ9O^’¦¡ÍKÍñsn¿?kLLA³ÃA°æ1 ãs°ïѾ¢Þå ±ÿÑþg Xœ>}š€€ %`]N½Ì­Œ[…Œ²è IáFÚ ê˜ä/Pܺu‹Ã‡ÅçŸþÚX)))púôiŽÎÏÿIÿ–‰ì«,Ž&%I”oÆ¡(ŠÜ¸qƒÃ‡Ë?üP¸ÉJ1ã)Wöïõ¬‹/²lÙ2.]ºÄ¥K—èÛ·/ 5¶Ý´iÞÞÞ:)ô“&MbÁ‚ØÙÙÑ®];’’’X¾|9«V­bûöí¼ýöÛyεk×.Ο?¯#`MŸ>éÓ§S¾|yÚµkGjj*óæÍcãÆœúˆK—.a``À®]»X°`mÚ´a×®]’y÷éÓ§éÒ¥ ƒ&444WŸŠÜ dúôé4oÞœ}ûöIF‡[·neàÀ|ûí·¬Y³†!C†àééÉÁƒéÑ£‡´ýÖ­[IOO×~ûí7?~\àf___öìÙCÏž=¹~ý:*T`ÆŒìß¿ŸQ£F±xñbI4Ë2?~<^^^L™2…#FP¥J:vìÈÆí p`` Ó¦MãdzüøãüôÓOlܸ‘1cÆÚ?©#ÀªU«Ø»w/C‡eåÊ•¢V«?~<ýõW¾Ç³bÅ ¾þúkfÏžÍÂ… %w___”J%5Â×÷ù¿T-Z„¿¿?íÛ·gذaLŸ>]Z÷èÑ#Nž<ÉòåËõ¶ çÔ©S’7ÈáÇéÚµ+Ÿ|ò‰Ô™jùòåxyyѳgO¼¼¼¤ó¤gÏž 0€ààà¿ùصkK–,¡K—.lÛ¶ +++.\È„ ˜9s&³fÍÂÃÃ///‚‚‚¤Ì1€õë×#Š¢,`ÉÈȼfff4nܸ´Ã(qŒŒŒž™‘-Sx,--_ËÏ• ˆ¢*Âf`*%`e©ã%*`%¢(& -+|až”Jæ,ƒÌ)ŒY Aßœ]„†À`…o“'™b&E½Û"M“ÆŒ;8ggUeffrîÜ9Ž=JFFjµú…ç Iyá}5¡éú"CDD‡&22RZ¦ÑhŠd¾+W®ðÓO?I¿Ï;·@GŽ9Â’%K¤ß׬Yƒ‰‰É3·Ûºu+Û¶mÀ‚+Vä96%%…Ó§OˆZ­–Žùyš¿<ÍËpÍEQ$88˜Ã‡óðáCiÙ‹MRÆÄ+(Õ˜Š]þ7nœ$^Âè IDAT4mÚ”>ø€k×®qòäII X¹r¥N;ç7ß|“Ï?ÿœ„„Ö?‡iÙêÕ«E‘ èté0`'N¤Q£F@vÛñ§ß¶¯[·úôé#-{ÿý÷=z´Tæ÷,ÜÝÝéÕ«‚ HAþù'666,\¸P'ãkøðátèЭ[·êd=MR’ö"<<UŽØ/¾ø‚³gÏ2xð`iÙÿýǽ{÷t¼£V¬X™™žžžÒüÌŸ?_¯›IYàðáèT*ºvíª·nÊ”):ƶ;w¦wïÞøûûúäjÙ²e±jÕ*I¼èÚµ+~ø!‘‘‘ìÙó´ÝCÞd}I/]ºT¯@ë•5jÔ()ž¬ÏUÎϬ(Ь_¿žš5kÒ¾}ûÏ)#######SÌÜAÈjwu£“o áë‚(Š)¢(F‹¢&Šb(ŠGEQÜ/Šâfào´I:çêIæBà°(Š»‹#®hU4bq8ÿ ÷3µþ¶™™™œ:u OOO|}}IMM-ñ àªì5ðÊ:nÐ W«V­bõêÕDFF¾pöQnW»víâÌ™3Ô¨Qƒ .P·n]½ãíçjéÒ¥œ={6W¯ªÜ ÂÆÆ†š5kê,700ÐÉswwÇÁÁM›61wî\ 8yò$!!!üøãrgC™2€(ÂØ1Ÿaiþê—àÉÈȼZ$%'QÄÏÁˆ¢øŸ €)@^ ¬ÄmRÀÓçjÐhX\§i ÙŽ¾˜IÕ¤râÄ üýýIOOÏ÷!>""‚¥KŸß˜»º¢:4}‘(‹žT1•û÷ïóÏ?ÿB¡È÷Ø7lØP¨²²®]»ê=”6jµ???N:¥“q•ÿý÷áááÏ=G#ÓFœkpîEÂ,rR5©„‡‡ãííMtt4‚ äyÍEQ,Ôg˜Þ­P 11/m’™  ”B9l± XUªTÑ[–e¸h}‘ò* ÈÚ>1±à/€bcc±¶¶.õÁ0aÂvìØÁˆ#¤2¯íêô´©nÖ±†„„He~¹‘5.7,,,ð÷÷gúôéìÛ·+V°bÅ ÌÍÍ8p žžžÒ¼>>>tîÜ¥R{‰=z„J¥ÊUÌʤ ®ï½÷^®ëžõ¹JJJB£Ñä:.çöÏû¹*ˆÉ¤B¡`øðáüúë¯:tˆnݺ±nÝ:Ax%;fÊȼŒ¼©™€&²Œts‘‘‘‘y,€ªÅs ÿ  B}´–$ÇD¯zbŸ VÀL`†(ŠaÅ5q%òwÿ`odO•zUˆŽŽæâÅ‹ä™yeeeU¨ò嫯¾h˜E޽¡=+V¤I“&ÄÇÇ“œœÿŸOݺu Tº÷4YÏ{666¸¹¹IË Z)T¹reí²žŸ…£££´]ÎjоÐoذ!>äÆù^ó *ªß¿ÉeÏO²²aeªU®F“&Mð÷÷'333OK„•ê_¾Œijê FZÄT¬X*─€•› óàI\–¹¸……ÑÑѹnÿôØ‚`jjJdd$™™™z]t4ŽÒýþûï3yòd¼¼¼1b7n¤víÚ:óE™™½zõz®²µ§qrrbÍš5¨T*üýýÙ±c;vì`ÕªU¤¦¦âååÅãÇ àÏ?ÿ”¶³°°@©TJ†éOsÿþý\—¿úÍYR øÇwíÚ5nß¾­S™“¸¸8=O´œŸsssAxæçª ¾j ½†y•x>ý¹1b3fÌÀËË‹Î;³eËÜÜÜ Ý&XFF¦hiHî⸌ŒŒÌëŠ(Š'A8| ì‹¢X4µ?¯6¹e«Í@+þÍ-Ή-XX’¤.[‰rö†öؘhíX:tè@@@gÏžE¡PHÆíY 666…zî:p÷”^S®TVVF¡PдiS\]]¹téGŽ!11F£W…ѬY3=!èyhÚ´)þþþϽ]=tüŸ ʇ~ȇ~˜çúJ•*1hÐ ¢££9vì—/_F¡PèecU©R¥P×üÏð?!ï\RÁÞÐCCCZ·nM‹-$¯·ôôô\³Ð ¥1À3ÄÐ'— ©’¢Øe³«WõÕñÓ§OÙev7&&&†;wîè=þ<õŸ£McÆ Ñh4\ºtIo]§Nptt$==Њ½zõâÈ‘#œW 4ÈósÚ A’UÚ´iþ}û8rä±±±/œÕ'#######SÌü E[öV¶T‘²‹Ž€%BSà`¢(ŠÅ^ã×Ö"÷{åÒ¢¡iCl ²›^Y[[óÎ;ï0qâDš6mŠ ÎøÉ²vÜí,ÛI?+ š4i„ èÓ§666ù–—½JT¬X‘wß}—±cÇJÏZEÑy±,^óœ1Ò²eK>ÿüsºv튩©iÑtœtu…²Öu:G_ISìÖœ9s¤lÐv%\µj•N–ÓðáÃE‘I“&éˆ;·oßfæÌ™˜˜˜ا`È!€¶;_NSºS§N@“&Mt:Ï}ðÁdddðÙgŸ¡P(6l˜Þ>?ΡC‡rŸ ‚R©dðàÁÁÓÓ›<÷ÂÌ™3Y¸p¡ÎòÇ“––F5­øS¿~}ªW¯®3nüøñ¤§§3räHIÔÉÈÈàÓO?•º$äGÖ9{Z366Ö[–å[åãã#-KJJböìÙÏœ'k»Ž;bdd”ëú_~ùE§üïøñãlß¾6mÚHå¨Ã‡'%%…É“'뤰^½z•E‹accCÏž= ÀСCE‘ï¾ûNGQß¿?ׯ_—üƲ9r$±±±L:sssú÷ï_à¹ddddddddJà2ð?d« X E°Ø)Šâ’˜¼»U÷’˜¦ÀäO–5aÂ5jôž°í-Úc¦0{¡}%æ sÚX´Ñ[žSÈêÙ³'VVVE#j¼d YŸ|ò .../dâßÃúù³ÆŠ“:&up6vÖ[žSÈêܹ3ÆÆÆ/öy72‚N^ Òb {é}ï{ áƒhÖ¬ 99/// X±b…ÔFsøðáìÝ»—mÛ¶qõêUÞyçÙºu+‰‰‰,Y²DOŒÉÞÿ}6nÜHóæÍyçwˆeÆ XYY±`ÁñݺuÃÞÞž   É€ûi† BDD‘‘‘T«V­PçbæÌ™øûû3oÞ}: 5pÏÙA1‹ÁƒãããÃÚµk©^½: 4àæÍ›¨T*ˆŒŒÌ·µ©££#Æ ÃÅÅ???ŒqttäêÕ«4nܘ¡C‡òÕW_ñÁ°uëV† €077g÷îÝ899å{Œ -3ÿJ–¬NïíÛ·çèÑ£î€÷4& —ÌŠ˜…Ú¾¨Z~(Æ‚qžës–^¸pcÇŽ•`t¥KViáƒðóó+´˜ã`ä@'ËNü›T6¼°F–™ïúœ¥…gΜ!00ð& »‹¥™éóS£tìXjÓ»ü»hÑ"ÜÝÝY½z5;vì k×®ꔆ) ¶lÙ‚ 022âÏ?ÿdçδiÓ†#GŽ0jÔ(il¹råpww×é¼Ð¢E ½®tk×®å?þ´}||èÛ·/AAA’“…R©dРAy–yµmÛww÷gíU¨Pwww½9²b?yò$_ý5ÑÑÑÌ›7ýû÷Ó©S'Ž;Æ;ï¼#566ÆÝ݆ ³—˜™™áããÃ_|Add$³gÏfýúõ¼ñÆ9r„=zLXXX®ÞQ‚ °zõj¶lÙB÷îÝ133cÈ!œ?;;;Œ¥ã³³³Ó;Ž!C†ðé§ŸR¾|y’““¥ÌºåË—ãææFJJŠä¥Õ½{w¶mÛF½zõؼy3»wïfàÀüóÏ?¸»»S·n]i¿¶¶¶¸»»KþPþþþ¤¦¦æé°nÝ:\]]Yºt)ÞÞÞ 8S§NÑ AiŒ‘‘>>>̘1ƒ””æÏŸ··7=zô 00i¬½½=îîî:¦ïmÛ¶Õùœ*•JöîÝ˯¿þJBBžžžøûûóÁpúôi=ƒ|KKKIH,޲T™b`;p9« XI‚ T@ë}5MÅÛ%5yEeE>±û¤¤¦Ë—¶h`ÚàÙɲr{é^P¾©ü J¡Øó1ž‰¡`È”ÊS 46KÈ?~<ÅYÙ¢R¥J¼÷Þ{tèСÐûø©ÊOEÐ P^YžOí>-ÐØ,!kâĉ…Ÿ°wohÞ¼ðÛ%ß|£Í +%@d  ŸýV L–™ðíˆoùî»ït–Ï;—É“'³wïÞç*Ó*-zõêÅÉ“'‰ˆˆ(P÷—ÌÍÍ%ﱜԨQŠ­QJ‘0yòdæÎË™3gh^Vþˆó¡yóæ$%%qýúõN•–)z¢££µÝ+¿ ÛÈç7R‰g’(ŠËŠ26™¼¡pv$þRvÊdddd^2Iaíš‹¢”ÛA>z‹¢Xxuá5A„±Àà8ðð†(ŠziE‚ ŒÆ–ùL¥p€yÚê–Š+ꬊUÅâzÍ•¨Œ¨Bíº(°6°æ\½s8—lâ©w¦2óþÌói¾·ÿ¾Ì+¯ƒÂ±9~s©Æ°Òqå33°ŠœcÇ´¥„ªRì¢Ý¢ÀSò Â/¿ü¯k~%mt!­C¥%õ2pæÌ8À¨Q£^Yñ `Ù²e4oÞœ3gÎè,ß²e téÒ¥”"{5Ù¿?AAAŒ7N¯ddddddd^&Öú˜drð>>ËM¼*nÊ+˳Õi+FBéeEüUý¯¯~®ò3n¥g(ÝÞ²=?ÚÿXjó¿Žüíø7uMê>{`11¸Üà’¯@kœþS) ¥66°ys¡Ä«¢äµ°¦OŸNóæÍqssÃÊÊŠ &”vHÅÊØ±c122¢sçÎ|øá‡|ýõ×’_˜ƒƒÓ¦M+í_ ÆOÓ¦Mñðð ZµjŒY _p2222%€™­f¶˜XæíŸø:S®º.n¸zØP»ƒ%–ó.u1¶PHçS~ç!SÚˆ¢˜ü^Úq¼$XÖÀFQýJ+ˆVæ­Øâ´SEɾŒ7 ø³úŸ .7¸DçÍB)(Ù]kw©t¨keÞŠmNÛ0äÿK …ÿ¸üC“:%>·‡+j”¢ïÚ”)ðc)¦åÊÁ¾}ÃÆ©´P  FóìÏK5pww×ó*KT¯^;wîP¿~}öìÙ#uÎ{UiÓ¦ §N¢G?~œåË—̤I“ z)ŽßÅÅwww¬¬¬J;”Bk‹Ö}*«€@[‹¶øÕñcµãêÒ2d°SÚqÐå [¶ÒÈ´Q±Ìa ðŽõ;œ«wŽYUg•l»qãàúu6 ŠËÉÑQ+”A’ÏvË %)¤SBòŒŒŒÌ+F&ÀãÒCFF¦ôi9¤ݧV@)â·ø!v&–¤¦RºM®„Ãf(F¬rä[§K¤?Ö¦ÁŸ\KØÉdi[Á@ ÷OUrŸLFF¦¬p ø«´ƒÈ‰™ÂŒiU¦ñ½ý÷I:B`r w2/ô>- ,q0t ©YSºYwÃX0.ˆ‹Ž–¨ÀíŒÛì{´°ô0îeÞC%ÎøZ)(±7´ÇÙØ™žÖ=©nT½ˆ#–yQúÛö§¿m®¦]Åû‘7Üϼ¦efÆ‚1UªR˸}lú`§,£‰ÕªÁÚµðçŸÚò¾«W!"RS ¿ÏŠ¡jUèØZ¶¤,z(QLeôªÈÈÈÈ”a4€ä ,=J§VæXÛ’ôPEø©d2Ss¿™T(œß2Dz¢!÷o¤q÷r*&VXTÐzF%De Ê³ÇTof†ueCø@Åí³É:crbfc€ã›æ˜Xðè^&‘RÈHÑÆ¢4°©¦5>ÎLÕðèžnEpÎ8’cU¤>Rç:‡ÒH`€§¢–¼Ê{Ië#Î¥ðßÞ¦žª‡}},í”4xÛšsÛ´”¡ÉD^ÐÞtjT"†¦ l«e¥š*¨àdLZ¢šÇ1ù?ˆ™)°¯g‚M5#4*‘ø¨ îü—Š˜ûéÁÒNIÍVæ(§s÷²þͯm5# Œ2R4$EgâÒÎ ·Î¤`d®-yL¬&)Z76c –µÇ‘§"%!ûüv^cs!Ççy-ddJ˜¹¢XHu¤˜1 p·rÇÝʽ´C)qjÕ`œÝ¸ÒC¦©oRŸú&õK;Œ’ÇÒ—Ž]i Dä>‰8v¢(²tùRöyï+°ddddŠÕ‹¶¡M”~Š~ÁPdd^)ê¹[1be l²;b¥>R³ý«(Ž-‹Ñ[£™£69Q±VöÛüÿö="øhïΩÀÏM®õŸVܨÛÙ’á+)_C·ÛVrœŠußæüŽi™B)ÐwFU:M¨ˆÒ8û-br¬Š]ßÞÅéCà›3u1/§$)ZÅWUÿC£ÊVzÞ]•vc´ïùf·¹Ah`î —zZci§º.ìJЯ²H¬aÕðpÌl•D^HÑ¢†,©Ž«‡ ãÍÏS«­}\¤õµÚZðkhCŽ-‹aý˜Û¹ÆÚ,°÷WÇÄJ·Ì!:$UÃà ;‘œ}~ úͪJç‰Q(³ÏÏí³)¬΃Ù­®'x×¢JS.ÿóˆä85-‡” üT2U™bd¦àþõ4~¬wEgÞ>¿h÷0·ý ‚ýç9ï­3ɬv+÷y<"9>{Þ+>‰üñvpžçAF¦¤E1¥´cHII!===×u&&&ê²®R©¸pá÷îÝÃÒÒªV­šëØ„í÷¬Máƒ.fÒÒÒ8yò$)))¸¸¸àââòìžâСCØØØÐ¼yóçÚ.!!A°¶.™ÒïÄÄDN:…(ŠÔ«WiÝîÝ»©S§uë¾x×¾ÔÔTÒÒÒ°²²ÂÀ Œ”Òɼv(Éä2iêS˜Þ4¨„(¢ˆ*âÐddddJ€.€E!·}¨¹UtÉȼÜ8¾iÎgûk¡4P¥‹Dý—‚“1æå• ]ZƒÌ4‘“kc°©jÈDÌËk…Ÿ¸ˆ RÔ4zÇšÚíõÿ0mªòéîZ›+P¥‹Ü:“Œyy%öõL0/§äÃu5ùÒç"éÉÚìªÁ h÷‰V|ÊLÓž!Åòþ_Õy£âÜöxNoŒ£ãg±¬¨¤v{ ®Ö&U* ITŠI'ìDÞÕÂNoeÇ{逾x•Åí â{Ö­ÚÈ”á+Q D‡¤sët2¦ÖÔëbEÅZÆŒ?à·5/IYPï-p Ã8íùI|I|d&®¦ÔhnÆGj3½ÑU’cu…þ¬ ¨,VÆàÔÊ‚Ö#ËS¹® Žošsë´V$30xó}­àtÿz!Ç´çï½?èðé“yïg¥×±…yÞó¶×÷üö—BÉȼj|þùç,[¶,Ïõ´oßž_ý•&Mšè¬S«ÕÌš5‹¹sçŸýw%o½õóæÍÓó4vpp@©TêŒ/Kܺu‹víÚ @ïÞ½Ù½{÷sí#==fΜùÜV:u°¶¶ææÍ›:1:uŠ÷Þ{OZ&Š"6l cÇŽyŠ…ÏâôéÓtíÚ•G´ÿï|ñÅÌ;€ÈÈH<<<Ø¿‘Xß~û-žžžœ;wŽ7Þxã…÷'#S”ÀMî“Îý«E‘Æ####óò%¦‹rB™'ôŸSM+^eˆÌq»Á­3ZåË£u¨ÖÄ”¾¿Uåô†84j‘Î+IâÕ±å1løä6¢š¾k˘mNzû®ÚØ”{WR±¯oÊü®7¥l¢ÁV§Ã§vÚÒ¹¦Ü:Låº&¸=Éœº=yo’x?ûú&L=]cs=¾·çÜöxWÅJ]þš,' XµÜ,°ª¤-;µ>6Ï<@§Ü/!J·;ŽÓ[æTm¤›–ÁµC‰zËnIâËŠ™­}ؼé—Ä_ýBQ¥çDý®V(´M ºžþä˜lqmGÔ)XØ)IIPS¹® ퟈HÁþù£{0)êwµb‚· Öö†¸^‘ÝßÝ̯ͪ\ÁŸD¶þ/’Š.&û'u1•Ö#ËðÖðò’€Õð쬴ãÇ Šhç[4óÊÈÈè2`Àœœt¿;“’’8zô(û÷ï' €   1“'OÆÓÓ“š5k2aÂjÖ¬Ibb"lÙ²…Ž;òï¿ÿòÖ[o•ôáš%K–É!CèÓ§5j<WÆ€€’““éÖ­Ûsokmm­Ó1=**Š 0lØ0ë»ï¾cÆŒ\¿~ý¹çÈböìÙ±È:·=ž›G“¨ÝÞRgÿW¼¹âˆ h·14QP³•9V•”Ò3mByÃw¬%ïÑC¿? ñ¾Vg¾w5¥ï†’’ –<—"Î¥u1•jMLy£Ÿ ÇE Q‰4ëo+Åwr]\¾Ç®0Ⱦ‰ to¨š °ÅýóJzۜݟ§€¥ÎIŽWëüžŸ¿çSNqkê麜ÛÏ“¸v(‰³[t3%÷Ì>?‡<<Á®LäöÙd[˜Ó¸—ž°ãë(î]MãÞUm©_ø©d"/¤ààjF‹A¶lý_$ª ‘·F”—â:±&¶Èç•‘‘Ñåƒ>àwÞÑ[®Ñh=z4+V¬`îܹ,^¼€û÷ï³hÑ"êÔ©ÃÙ³g±°ÈÎ$?~<½{÷æý÷ßçË/¿$  ÄŽãE ÀÓÓ³ÐÖ½½½qrr*TéaÎÌ+Жx¦¤ègßÆÆÆ*¶œ„††bmm-e]åÄÛÛ›¶mÛbf¦ßWFæeE œE…’û€}i‡####óF*N–v22e… NF’8áÒÎ’_CJëŒÌ²Ë¿*×5áÖédlªj}¬’cUz%cw/§é XV• i>ЖÆ=­©åf¡‰Bg}Öü³=²ëzÃ\ñÑ\1¢µ IDATWÇ0ÐÓ‹ Jêv²äÚ¡D\ûjËC–»¿L w³1Ë9å3²ø8íG—/+Q¾†”´cG»1vhÔ"¡Éìÿå×|µÇnçœí96hauú?ñí9íuzZäþ5}Éi CþªŽyy%zX|ì1zhý_Îü¾Šz^™g£P(˜8q"+V¬àÌ™3Òò«W¯’™™IçÎuÄ«,Ì?üÀÉ“'INNÆÜüù×ÇÇÇ­­-¶¶¶ù޽}û6&&&T¨PÇ£T*qssÃÐ0;ÃõÖ­[\¼xWWWìªÔÔTîÝ»GLŒÖk1::š¤¤$Q(´ÿW\»và`­^Æ õ2Ö²ðññÉ5û*88˜«W¯bnnŽ››wîÜ¡|ùò’çÕíÛ·Q(888ðèÑ#©”111‘°°0*V¬Hll,‰‰ÚïãÈÈHLLL¨Q£‰‰‰ÄÄÄP©R%½ó}÷î]ÒÓÓ¥,¹˜˜=z„‰‰ aaa( ­§ÙáÇùöÛoÈÈÈ **Š * Ñh8räNNN:%¥ÁÁÁ\¿~FC½zõ¨]»v¾×KF¦4Pˆ¢x%Q„–v(2222/éÀ=Œ¿RŽäu$@C™lúôZ#jrþ¬[ê–‘¢!&,˜°ti*íI'@cýp#ýeÕš˜2ýZÞ[à@½.VD§spÎcø¬2¿œYP†ÏN1?½!NêbØ| -έ-°©¢}hÊòìÊð“Ùæèõ»Zé¬Ûú¿(ÆAŒ‚˜ÛþÆ3÷UX’cUüÖâû¦ß#öVv£Â@À¥“ºÐ¤·V”ËYùôµJ¼ŸILX: QRIb)š\»=žÞGúcíõl1¸MûÛJÛæv}ž{ÞäÜç•y~r|wÊåï¯qqÚ,ÒråÊI˪T©€¯¯¯$ú<ÍùóçÉÈÈ(”xðÇàììÌ‚ ž9¶uëÖŒ3†÷Þ{nݺѹsg¾üòK@kŒÞ¿jÖ¬‰‡‡Ô¬Y“Aƒ‘”¤-)>qâÎÎÎøùùZÊÙÙ™„„¢¢¢hÓ¦ õë×§OŸ>ôéÓgggºuë&›,î޽˥K—xûí·¥e=ÂÃÃڵkãááA—.]prrbùòå8;;³zõjiì›o¾IçÎøûï¿qw×vôòòÂÙÙ™½{÷Òºuk¼¼¼èÒ¥ ­[·`ýúõ8;;³ÿ~½ó3hÐ ITÚ´iÎÎ΄‡‡óàÁœiÖ¬™4öäÉ“$$$H"Ü7¤ëðÆoЯ_?\]]9pàÑÑѸ»»S»vmz÷uêÔ¡]»vÜ¿ÿ™×MF¦$ÑæÜkð!”á´ÅðãedJK#KÒTidjä{+™2J8jàTéòZò “TŒ±zÖX™$Ës àªo"Kû‡I¿ ] æV¶F˜XP©Ž‰Ô}NP€ský‡¥ž?VÁÌÆQ3[]—J»|‘]ž—5G|d¶€S©¶1×|³÷Óå‹JXÛrïZ§7Ä‘™¦!顊KûñF?\ûÚù¤/3MCÐÖgÿ·7ôÇŒ-4écK; ‚ýõMßL¹l9ôAQ0Ÿ‡¤‡*y>`ï´»Ø9S·“%Í•£n'm6[«á常'AçZ­u['+-·k•…:)-IÍéq¸®@Ãw¬±¶×ÞRF§sÓ/Û¯ª°óÊâUÑ‘‰TÊ$‰½&DFFò¿ÿýiyݺuéÝ»7{öìÁÙÙ™AƒÑ­[7ÜÝÝ%§Ü2³ž[[[œœœž™}•ÅÑ£G100`êÔ©ÄÇÇ3pà@Ôj5½{÷æøñãŒ7Ž>ú…BÁÚµkñôô$66–ƒÒ¢E Ξ=Ëĉ àøñ㘘˜`eeÅðáà bùòåtêÔ‰ôôt-ZÄâÅ‹ùúë¯Y¾|¹ƒ†††tìØQZ6hÐ ¼½½ùä“O;v,)))üüóÏLœ81ßã:t(NNNôë×~ýúñÍ7ßP³fM4hÀôéÓÙ±cÛ·o§V­ZÏu^ûõëG³fÍèׯiii8p¥2»œÞÇLJªU«Ò°aCí~ÿýwªW¯ÎO?ýÄÕ«W騱#£FâèÑ£üñÇtïÞFÃßÿÍœ9s˜0a[¶ly®ØddŠ“,ë_BŽšBö"”yUØé±+¢’¢ðØé¡³ÎÜМ†rêžþózruHÊHâîc}ÏŠç᳦Ÿñ]«ï¨d^ ‘SwOñÖ†7ü¨ÑGŒ}c,{ÌùèóüûÞ¿X[q%æ #Œxáy Š€@{‡öøEú•Øœ2EL0 ä¬˜!¦–v(¯!T”zçr™§H‰WsïZöõLhØÝšŠµŒ‰Ñ ŸîªEõffÜ»šÊÊ¡·H|Éùñ¸<ñ̺´:+‡Ý"=IMì©Ò@ßô¼\umiž*]ý«Ú?=Au;g—f•*^ñI¤ïoÚ®N?«HЖx’ª(ïhÄ;ßÙcfcÀÃÐtWfg®Šá~6˜—SâöqþÛûHêÚ—éÉv‡ž( >ÛW‹Sïpv³¶|®b-cZ+¯#¶= ZD•!¢40±RHÇ›—È3dIušõ·ÅÄÊ€ë]áah:CÓ ÚÏïq®(µû ÌÎë0®"×%¡Q‹”«nÄ´+ ˆ‹ÈàüÎx=/*M>§ÂéCÜFWÀØ\A­¶ÚëšeÞžEæÝÏîW°ŠŠVÞm5e^J²ºÄe‘‘‘ALL 7nÜ@­VÓ·o_ÆŒ£³ÍÆ™:u*Ë–-“þ)•JZ·nÍÛo¿Í°aèV­ÚÓS˜ &0a„OHHàÀtïÞ]Z¶eËŽ;ÆÇÌ¢E‹¤åóæÍ#33“… âííM÷îÝiÖ¬™TÊ÷ÆoHþOǧQ£FŒ5JÚ~Ñ¢EÄÆÆêd¥Ö;ªuëÖXZjÿo ÄÛÛ›þýûó×_IãöìÙƒ««+—/_Îóx*Uª$«ÛÙÙIYRåÊ•“ü¹4h@:u |Ž*T¨@… 011A­Vëd_VÀêÚµ+‚ ûòC£ÑpôèQlll¤eǧfÍšŒ?^Z6{öl>|øÂ¦ŒLQ“õp™ˆÜÌw¬Ìk@à iV© +d«õƒêâúG×Xw Îx+#+æv˜Ë¥‘—¨jQ¸ö¯Y¼iÿ& ;/¤’y%i^•¦hJ„ì-ìiV©Í*5ÃÂ(û‹Øµ¢+Í*5£n¹o-[PÞ´“CO0§Ãœ›S¦ˆó¤“ÉÆÒå5% C°Ê${~Ð Ff ¦ž©Ç‡ëjòù¡Ú4îeMCTé"‰´ÙµÇÿŽ!ö¶6Sªv{KfF4Â3Þ÷Ï+Icry^{Í ML<èB¯éUø:°. »[KcLŸ˜¸GžOáÜ6mæTåº&üÚ©§ë2íJÉèýŸ÷uÄ•ËÞ‰’Ù{VÙaAʳø÷hNmЖ¢˜X0xQuæ=lÂR±?7¤çö›ko½2R4œßñìÌ®,ï(Çæü|³!#×ÖÌsìµCI˜•Sb`(ðU@/ªÎ€ß«1ùxÉø>`…V° xÌomöSã^Ö|s¶.ÃWÔà«ãu0¶PP¹ž ·Ï>ßßXĹ)+´YS«uË’ 4oü·]œÈX¯.÷ïß',,Œk×®ˆ¿¿?7nÜàÓO?åСCìØ±Cò‚ÊÂÜÜœ?þøƒ;wî°zõjŒ­­-þþþ|óÍ7Ô«W;v”Ø1˜ššJ%wYìÝ»€?üPoüàÁƒ8pà@¾ûuqq!((ˆ1cÆpüøqÔj5‚ °iÓ&fÍš%S«Õ:tHÇÿjß¾}Œ=ZgŸŒ;ö9Ž®dxøð!AAA¹zx¹¹¹éˆW =7ÁÁÁŒ1???23µÿ®ZµŠ… –HÌ22E Šb"ÿpN6yÝ ¼ȡۇ¸“Ýi¤IÅ&xõò¢š¥þÛ—oßú–/Z|¡âÅ«O[Wi-ý<=p:v‹ì´wÐ ï7?ŽFåÐíCœ¹æÙƒ‹ß¾´´oYbóÉ¡@2†ÀÖÒåuDÅTEf:J;™\8·-¯Ï"ÈHÑ`fc@Ë¡å¤ ©+Þ‰üý~¸46ý±†ù]n~*[ôÈLÕ°ëmæRj•VeÚýÝ]î_×–:·¶ çöØ×3aˤH)C§NÇìl¬ÕÜ’ºï™XàØÂ#3µÈÞiw X©+®hT¢N·Á¤hU®†ïy!j`åÐp¼ÆEHÂÜÓ¤%ª9ú×C¦5¸¢sŒyqlÙCéçŠ.Æ”¯‘·Aü¹mñx‹ %AU%C:Œ³ÃýóJTi`Š:Sdç”;\Ø• _>(ŒËÿhÿŽ\ÍhóalŒHOÖ°i|„ÎØ‚â¿4ûœ^Ü@R´þ­e¾ó~V¸ye N:‰(2EQ”]ñ_1V¬XAhh(QQQÄÅÅ1cÆ 4 ~~~Ï,Q+_¾<#FŒ`ãÆÜ¿Ÿ   ÆOJJ ï¿ÿ>·oß.‘c¨\¹²Ži;@X˜¶ýí·ß¦\¹r:ÿ²2µnݺ•ï~-Z„Ë–-ÃÍÍÊ•+3räHöîÝ‹˜ãMÆ™3gˆ‹‹Ó²ö]³¦þ „Ât),n|}}AOpppÐ[æééIÕªUY»v-;v¤R¥J :”íÛ·£V?;YF¦$É.”U³†Ëô$ Ðoþ"ó’c `kbKbF"êÜoª\Ëè æ»ñ¢Xe?tõýT÷^Ý÷˜á6€IÿNboè^B?ÅÖ$ÛÜqCÏ ¤f¦2úàhß>œçñU±¨Â ·ô«ÝOÀBBYtn ‚ ’ÿƒÇ¹ç°6²æâËôÛÕOZ^ɼsÚÏ¡þ˜*MQ‹jŽEcªÿTNÞ=)ëàÐo¯`Øa´whÏçÍ>ÇÎÌŽLM&»‚wñ©ï§Ä¤ÆÐØ®1;=vbn¨5&nd׈ÐCù'ü>;ôY¾qÊ”!R€‹hPñwi‡òz£¹šHDÞµT2%ÂõÃy‹)cTœñŠËs=@ïéUhÿ©ñ‘œÝÏ_ï`h¢Àé-m¹wZ¢š„»Ù ª 1× Ð€¼í|î^Iåî•gÛÕ™XÐåËlªÏQ>ø4¢FS~qå$êb*{ŒÁþs5„Ï‹ôÇþÛ[ð Ũ‹©OæÏ›°Éù®Ï"3UõCËZ+Êye Î#"Р¹ZÚqÈ” ßÿ=>>>œ:uŠ &°jÕ*iÝŠ+8pà]»v¥iÓ¦¹nߢE ©Ó_iÕ-qøðá:ÂhEµZ§°•…F£A£Ñзo_úöí‹J¥âÈ‘#Œ7ŽÝ»wsâÄ Z·n«wT–8NýúõuöZ‡(‘eÄžÛùΫSdNDQäàÁƒz^gÏÚ&33“ž={Ò³gO–,Y±cǘ8q"‡æàÁƒ:žd22¥‰T-Šb:jær˜ äLÁW†Eî‹øÅí*˜jÍhM”&Œj<ŠÍ½6ãdã„“ÖFÙÞ!–8Ù8á`©Ík°1¶Ññ¶²6¶ÖnclMU‹ªØg×PWµ¨Š“‚ A!í¿’™®a­•‘•´.ËÊÉÆIGÀ²7·ÇÉÆI{rÃÞÜžSCO1¢áì-gg<;yÈcÊÑÊ''c,oZž€÷Ö`¦J­‰°`@‡ø ò£ƒCi¬™¡™t,‹Ü1ÃmvfZCFC…!ê `¡»¶vÜØÀ''‚Bç÷ŠfŸ§Lâ8€’3„ÑCMÆ•xÂòN'•y)¸z0Skª62¥÷OUøüPm>ÙáÌôk °¯§M?½1.OÓò¢¢ýX;¾¬Ë¯¡ ©ÖXû½Í7ñ¹= dd^â KW“q¥´ã)”J%kÖ¬ÁÄÄ„5kÖpèÐ!i]ß¾ÚJ„/¾ø‚ÔT}1YE6nÔÚ}¶jÕªdÎ…Î;èt ÌbñâŘšš2wîÜ<·ˆˆÀÜÜ\ÇCK©TÒ¥KÞ}÷]@kÇ™3gô¼£²ÎÓ²eËt2£T*kÖ¬yfüYÙKOgpeù‘eyN’}d¤n’dHHÁÁÁÏœëüùóúµôû¾Ð} Ü3ÂÿaªÿTö„ì‘ÖMõŸÊÀ=IÊxþ74]¶t‘²À¾ôû’.[ºpâî‰<·™Õ~–ä˵ðÜB—:Òdu.Çh;Lj> '§çŽe†Û œmœí1YÌ· åú–D%Eal`ÌÒnKs-lX¡!#ÿ‰ñïÆ¸y¹‘’©}ðé_»?J…’ЄPî(•fýîyÖSo_2e” à8¨øYEÙ3°t¹GˆÜ–ì%çæÑ$¶OŽB•!JÝßèkCyGí›ô »ØþÕbC•!âô–9´o¾cÂÓY7ºdü^ddJšxB¹uÓëD:uøöÛoE‘O>ùD«†NÛ¶mñóó£~ýúüöÛoüóÏ?øùù±jÕ*:vìÈÖ­[qww×óSJKKcÖ¬YyþËbݺutéÒ…µk×:þ#FP³fM–,YÂ?üÀ½{÷HKKÃËË‹)S¦`mm-‰-¹Q½zu\]]Ù¶m¿ÿþ; dddpäÈV­ZE¹råhÛ¶-¾¾¾ˆ¢H—.]t¶oÞ¼9ï¿ÿ>{öì¡W¯^ìÚµ‹-[¶ðöî;:ª¢}àø÷nMï„ÐK ôé%‘&JÁ÷µaGü)ØPA±  "E£/¡J „.„J @ éÙ6¿?Öl²¤‡À˜Ï9{NrïÜ™¹w/d÷¹3ÏtëÖ¨¨(€«ýåçááÀÎ;Y½zµ-ŸXîöüÑXlÓ¦ *•Šo¾ù†uëÖ‘˜˜È¦M›8p NN%çùY·nÞÞÞ´m۶IJ`Í}Ö±cGÖ¯_ÏäÉ“ILLÄh4Éwß}‡«««-€(I•Aþ)„!®+Šò y…–è‘S]ïhƒ¶ýüaä‡l¿¸€ŸýL‡lÁ­âœH:Á†s˜ÞÕú‡èdòIÂOXóVŸN>M¯º½È@6ßdK†î¢u)S_7žßH‡yIÜ£.G±%vK‘åujYŸ˜œ½~–±›ÇbÖ¡ƒ¯lz…¾õúr(áƲM=P+j†5±&Ž?rõÓöLàïø¿™ºg*ß„~CwmÚHü¾âÔ ]ÀŽ‹;Ø¿‡îµ»£Qi¨â\…øŒxÂO„Û¦&g'Û®¥t‡ˆŒ¤%?n“nµ]Y$ëÓˆÃêŽî‹t6|z…ȉÔmã‚G€o5‰fbv¥“p:ç¶ô៵×Y0úNj®žÉáø†Ôrå¾’¤Ê.8²HÖcý‹&ÝCÞzë-~ùåŽ=ʇ~È´iÓÐét¬ZµŠqãÆ±páBÞ~ûm»cEaäÈ‘|ýõ×4ÙÙÙŒ?¾Ð¶Eá­·¬Àcbbظq#;–?G»»;ëÖ­ãÑGeòäÉLž<Ù¶¯víÚ,Y²„5Š_ }éÒ¥ôèуqãÆ1nÜ8Ûö€€~ÿýw<<<ˆˆˆ U«Vøûœ1þ|œY¸p!þù'mÛ¶e„ L™2—¢¿ûT©R…N:±cÇ À¤I“xï½÷èß¿?³fÍâ³Ï>cÖ¬Y¤¦¦ȤI“xÿý÷éÛ·¯­ŽaÆѹsg~øá‡bÏsݺuôìÙÓ6±4,X@=xÿý÷yÿý÷mÛ}}}ùõ×_ ½’ä(…ÝÙÓIá9v¢§ËmïTzç­Šñwüßvûv\ÜQªÖ­PÜŠÒªçYÏ$;˜pмøëÂ_üuá¯rÕ[dz:ëÓ½szϱíË¿ cˆHÖùTû§õ ™ ¶ŸÝtn ÓxÜÙR ˜0óŽ\¹©RˆV£MŠ'ÊG°î|‰e[í¯¢¥ÄÙµ°üù®$éNÇ>ThS,O8º/RÅyíµ×:thüPùiµZþüóONžú(7nÄh4Ò¶m[:tè@tt4aaavõþøãvïÅÀ Ö«W 6º a@@‡bÛ¶mœ>}šôôtêÖ­KïÞ½í’þ?ÿüóôëׯĄù’t+` !Ey‹u|E< ;LºäæZ ¬<˜m¾}ß¿µÎv¿ëÔÅ'Y,Uš¼:sÌ÷„ÞI74·ŽGž y¶ÐrùóuåʰH58î˘t ¬ÂŒà8Èäí•B¨íæKD=Ä9^X’$©∲DaKœIw¬&MšÐ¤I“ËÕ®]›ÚµkºÏ×××–ë©$Ý»w/ußêׯ_ªàXHÅQ©TtëÖnݺ[®¨@ž^¯§wïÞE—?GV~Lž<™·ß~›‡zÈnzÞ¾}ûhÕª•m[—.GèõúB¡×«Wzõ ®IÓ¨Q#5jd·­qãÆ4nÜØn[‡ì~ýõ× =77·Ó@óÓjµôìÙ³Øé‚AAA¹_’n‡¢ÆÎE0†Õ4åñ"ËH•Ü…Ô ¶Ÿù4²å†k¾¦ò(,÷“m_¾‘Uù?¹kÝíÊåO_^W3¯Ú~®éVÓn_5×jô®×›c׎q<é8é†Ò¯Ü›–—0ñϘ?ùçÈBËåæ±ÊïÆ aIŸ +b$št›œ£Bð´B.sQIL›.²k5Åüß$I’$.²ÛhÁ¸ÙÑ=‘¤;I³fÍØ»w/¯¾ú*...´mÛ–ääd~øáÛÔÄÀÀ@GwS’î 7&q@aÁÄ9ŒÂþÛÝ%©¢lÝjûùͶoÚVê òâÅV/–ºžü#œn=UÔ¾tc:k§ªkÞ*„ Ýk—þ©MQ.¥_âìõ³éÀ/Éæ “-9Œ"^£8í8ÅÚN„ú@F’$I²:ÅZ‹Ýv“ȾõK{JÒ]æÉ'ŸdÈ!lݺ•sçÎáææF«V­vt×$éžRÒþOÍüŒãíèŽT‘‚Gÿx”Sɧðsö£]µv\L»h0±`)¶ž‹iùçÚ?ø»øóŸÿáþ€ûØpna=¾cŽ<ÚèQj{XçÖOŠœdÛ7ª¹uôRÏ:=ù0òà 9¿o|ËœCslç6¦åžhú•†4Cƒ—&Ó˜Yæz¿Úÿ?ù€–þ-™Ûg.wþ½ZObV"CV )0]°,"ÎFÖŸjñ}ëõ-áÉa¶'0cb ¢à¼QÉáÌäüxš5f‹ü#uKxVÓrßPoê´)Ûʲ•™F¯àâ­ÆÅ[Z똩§Njšõñ d µZ¹ RWþ)°NÖkæìYº`Zg•í:Wôù)*luë]­eýêé¹o¨7Õ›;—pô½É‚‰ÓD˜Ìäüèè¾HÒÊÍ͇zˆ_|‘Q£FÉà•$9@±ù­„BQ”ǹÊ!~ÆŸQhJ yI•Ê©äS/¦_ý~zrþúyVYÅÀmeò爚±wÞNÞ¤d§ØÕúk(ãîGcŸÆ\ͼÊÊÓ+ë €½~íÅãM§ŠKN$°å‘ú%úbÓbyªùS¸ÍœCsH3¤ÙrHL8hkcÓùM¶éu1×cJ<7àùõϳôøRÂ…Q׳.‰Y‰K<ÆOÿüD\zœ­ìæ ›1n³~ÁÍzðaä‡8iœˆO·m³ ÿ‰ø‹/fHÃ!Ôö¨Mª!•W0ÿÈ|’³“me£“¢¿Íº„ðÎK;íú·üÔrÛy\˺fÛ>vóXΧž§]µvä˜sØt~S‰ç*9ÀQàO,þ+„«5U^ÿ3‘3ç<Û©GG÷å®óÄœ:ðäûÇb8¿¯ìÊK묢÷UY?ã Æìâ²”Uçgª0ì+ë*Lß <ÍáU×mûô®*úŽà÷â Wµ‘A]ÝØ>÷Z}eÜß“§—ÖGï–÷jÞð³ì]–tSõÞjoïmLÕ 'R.y«fÉÓvz·¾ÀÇmŽs>ªâî¯:¦]hÀ®…‰,}a<½´É|Ðô ™{ßÜéγ ÙËÝI’$I*¯´ !®)ŠÒ•“üͯxðj™+÷ÎЩf'Þmÿ.±i±¾z˜ϰíËŸ3ê|êyÛÏsÍ-´®Ë—ù¿-ÿWè¾Í6³ùBáù@#/Ey)²Àöé{¦ضóÒÎA ÒØ»Õ.ßWYúñEÔE³éü¦ƒK1)1…ž XGZ厶Ê/ÇœSä1R%,ƌ»Â"–8º;RÑ„©jE³ðóŸ¬G™X®÷÷$x€'×ãXžRò¤ùƒž ÿ¦~õôløôÊmk·å`/†}U ¯:»–Z«0øãô|ÕŸ¨ðä›`=öE-[ðêÊÉlÔZ… ûo_pðn•xÞÀÑ5©ð¤ïøV¾_0y/;Àƒ ,BÈ%’%I’¤;V©VBœV¥Ù†;*’!¬;Á¹ëç­ŠZQ#4òiıÄcz2¦å³Ù¿ÇÁ=•¤Jä"ð&ß ³˜æèîH%³`ž–ÈÉÿƱê´qtwî¾] °Žp1‹_Uµ"u{± ~õn],òLd:ÿoMtùx¶m{ÏWýñ®©ãÆd]}5ô~£*A£Sð«o=·s{3˜Ú6ºBꕬvÌ¿FðOz¾êÏúWÈN“‹Æı—DŽkbFÉ¥%I’$©ò*U @±WQ”lgÙhxµœNX¹]L»È›[Þdf÷™((VÌ2eñ̺gÊ•'J’îJ§1aa Æ:º;Ré!bÔŠî·ýÌ\6º’JR¯+õp°}¥uVáYM @êe#†L Õ›;ÐØ‰¬ëfNïHǘUøÔ-µV¡îý®xV×b̲pîï Ò®šìÊøÕÓ£sQÙýž“n&ñ|Éy]}4vtE¥V8»'ƒëñF<´è\T˜‚äXk §sÈLI¶€_}=Zç|íÖ×c6 2“MøÖÉ»¥ôn*üêëÉJ1“‘d½Š½»5'TÒS^ô˯ž0åXH¹dÄÕWƒwM­mVŠ¿úzŒY®Ç—œÃ­f°3U9aÊœý;ÃÖ÷\Š |ëZƒc™I&2SÌøÔÖQç>Ì&Á™d$™ «šjM¨òï5Ⱥnæ|T&‰…—ÍåYMKýö®XÌÖ `úµâËßȯ¾žZ-AÀ¥£Y$œÊ)²l•@=µZº`̶prkZ‘厭O%'“‡šFùò×× eêÓÝj?óŒ šåa8íè¾H’$IÒÍ(u @±AQ”Îìc=©¸ñ´%'9Îgû>cKìúÕïGM÷šø8ùcÎáðÕÃ,;¾Ì–¯J’îyG±NÌÆÂ«B™@åbÁ8%ލ¡ñì§­Ý;^ëG½¸oäüÞ ÛöÀ®ŒÝÀüÇÏÒ~¤/ÍúzØö§^6òÝ3Äìʰ«¯ó3~ œ\ªy,fÁÎùõµX[¾¢Ž5Eë”Hšx´)©WŒü_@ñ9—º¿äÏé5lÁ/‹IðÇ{q4ìâFó=¹“Ã;GxàI_»XGפ2åLs[]ŠSÎ4çZL+ßã??׳í èEÈ@/ÖL‰çwãxdFMå À„:GHºh›x´):g÷d0­}4½[ž¯å-éܤ—SÎ4'zs³zž,òÜ|ëèøï’zvp³mØùÃ5–½kËæì¡¶ÇÊ÷ã𨪥ë˜*(ÿ^Μ KÆ\`÷¢D[=Þ5u<^Ÿúí]íÚ´˜g%ð¿7/‘ðà„|XÝ–ߘmáwãJ5åÓÉ]ÍÈyuhæm·ýØúT~|ò©Wòs½Âã³ëðÀ(_”Çþg$š ÍQ`̲½1•A^´{ÂG°€xöÇ~ ˆ)Žî‹$I’$ݬ2°À6«'ÙÌwø0Þ%'9Îþ+ûÙe¿£»!I•“؈`Á›BˆÏÝ%©ì„‡ÕŠfÑ6>6”_uª²ÿy“òiþ 5(unoF¡ €a_ÕBï¦æôŽtüèñÐâ eø×µ™rßq[¹>o0dz ÛïI&\¼4¨Ô Ÿñ£jžÏ{*÷4Åû‡ûØRÂögâî¯áá©5ÈI¿³ãÐΞj^ßd›vxñp:gþ õtzÚ½›ŠyÃÏ8®çkþ¸xk¸°?g5þ õè]U ÿºÿH!;Õ:µnÌò@ê´qAëu3µïsA£SèýFU¢7¦òÏ:û”IžÕµ þ¸‰ç d$š¨ÕÒ­“ŠGgÖ$ýš‰]  ô'—¢‚çÿWŸ&¡ÖûëZL™)fj·v¡io^‰hÀ´vј Ö{aØ—µé0Ú 4dZ¸x8‹ªAz†S»È6ÎíÍ$duîsÁÙSMÖõ{w¡3Û™jP¡Zj¦’3ïKw¤$Ø´ Ž„ÈÈ(ù˜¢89A@A¯^ÖŸ++‹öíƒmÛ .­ÛÊC¥__¨Q:w†6m¬Û$IrŒr}ÂBW¥5—ù•OiÇ0´4/ù8I’¤J%ø#±d#xB±ÒÑ]’ÊÏ‚ùõ4â#\לáŽîÎËÅ[MõfÎ\:œUd9E¥ðQ«cÄËÆÉCÍÄ#Mñ©­£vë¼ÀOmƒ>ªXGÎ|÷ðNmOÇÝ_ÃÓKëÓ¸‡;A]Ýé:¦ ›¿LàÍj‡yæ—ú4ím p¼Uã09Åë8ÉZ¿ÖUGþ¼ŽJ­ðÄœÚtü¯_‰çk1 Æúä¥U ìè†ðºïA„Å:¢çܾL&E7`ÿoÉ,zö<Æì²Û~˪âø<¥%W¤ðÝÃgŠ=¦ûËþ¶àÕoo\dçWPþMmºŽ©ÂýÃ|X÷ÉbاpñÖðí ë ‹Š ^hH“^8y¨©sŸ 'þJ뺖Œ$©—üõõUÖL±®Æ2È‹VP§k–¢ÀŽy×øù¹ó ´zØ‹çÿg-ßb5vÿ”XdгÅCž¶àÕ®…‰,üÏ9„º<_…Ç¿«M­–.Ü?܇] ñ®¥£ã­Á«´«&>éMÂé\¼Ô¼¶1ˆ:÷¹ÚÆ¥#Ö{V¥Q¨ß¾`ÿï%ÿð ©Ä,˜ßtt_¤Û+!¾ø6l(à¦(k×—_ÂÀ¸qP§NÅÖ3,øã˜=Û´ªhK—‚ŸŒÈ@–$9B¹ÿÙ !â1Ñ SX„…ß±Pôç\I’¤Êå 0#9„™æ2xuçB$Z0½ý7ß3¹¹•âîe¹9®RâŠÎÍtäÏëij&AÏN5ÛQÜý­ÏÇÚŽð±M3ûó£xNmO -ÁÄ‚Ñç°˜­‘Žö#­ŠÌ³ÝH¬Ìs±#h|ëèðo` ðœÞžÎ‘?¯Ö ÔoÿwÑVI2“oh7ÙÚ®É ìÚ7™Éæ"ó|U´žÖvs›>·N‡ÖM¿l+Üß³Àq—£³9¼Êz-„NïL·íËÆ™gä‹>§ø¿j‡Y3%•Z¡vk»À³§º@ÝBÀÿÞºDî$ëËSlSFýêém·ÂÏÇËöóúWlulŸs•Ìd³ÝùuuC¥¶Þ;Ûf_%á´5GVfŠ™µÿÛ “?Ÿ˜WÍ{7%^&×ØË·F ¦ñB9—ò²v-<ò¬[WñÁ«\B@d$ ‹ßš6Ê*1þû_˜2åÖ¯r]»“'ÃÓO[G¸I’t{ÝÔ !„øPQ”-ìc‡©Nt´¹N¡$I•Ò`9FΡ`a0IQ¶ìÃRe6ۂ噿x¯i?¾Ñ*rµ‘2s÷Ï `åN5+LÚûàVþ„ì½õºWkêdÛv&Ò~îJr¬¤óüêë©Þ̉ò𪑠¸r2Ûn_f²™ëqF¼kÝž †rÃç¥n½*ÿƒT˜|²Y¾Êó~ h\ðÚxoò¿7ykUÜ÷¨7-ú{Ò´—.Þö«ÂÎ!ýª©@2øøcY¶¤ÿ^5´\=SxBöüÁ­—þl€È`Ô»Y«ÚÈéßzòÞ·øãöïí¿ç—?àèáoN%Xø‹÷Œ,Ç€ÙŽîtû,^ ŸN‘£ +šÑ³fÁ•+0vlÁÿo—‹áÅáÒ¥Û׿áÃÖ€Ù×_[§J’t{TÈ'{!ÄVÌ4"‹‰üN_a ¸}«nK’$/X`.° M„ïËàÕÝEa¶`x$ž†C,”…Ê!ÿ”³©èK˜~Ã*u¹‰ØóË¿‚(d4”mt@9¿ôò„ÊŸü=Wn ívй浥(Óvîõ)°¤„°æº“Shž¯W,lĘ“»šñ»óÔOuiæMN†™ís¯þúżf yûU…Ä„,ùâœÅ-!,¢ÐŸÁм“c‰e׿ ÁŠY—¿^Ë=šþê D<ûÍ aÿ>l–îÛ¶ÝÞàU~K–X§×9Bf&¼þúí ^劵î²ä,$Iºm*ìÑ”ÂLSe1q|Ä<Ž?&BÑŒ‘%I’c$aá & „YN¼› !Î(ŠòÔ^¾[@k%€Gw鎒?øáä^p Y®Ò®Í‰S§+ç£òM3¬¢Á·Žu”Íå|#jòù*iSÒù¼Uÿê´qAQòޝÚÈÉ6•±4rSìêÉÿ0.wJ›í˜|Ášü×ÊÍOS!#Ïæà^Eƒ1Ë» ŽÚú¤R+ÅqJóÞ´Ò‡šÁÖ\g¿ÿŸ5¿–ضAÁ Xóky×Ò‘›wí«6ÊY•{ó9—·oVè)®ÅäÝ7žSÊż²Õšäõ °åh+Œ³WÞûpcõ^ÀQö1[,ÿB½¼¥tW‰‹ƒ÷ßwLð*×W_A˖дéímwÚ4ˆ‰¹½mæ3fX¯¿$I·^…?šBÄ ³… >W™ÇR LÂÀj è”’$I'Ø |™À!öb¡?F™ëê^!„WPæ¯ç C:—K>@²I¾h´} ºÙéwûO±ýÜïÝÛô0­³ŠáßÔ¶åÇÚ³8/‘HþÑBÎêbƒXI&Nü•X§Ò ú¨Njª5uâ??Õ-S_ó·ë”¯]ƒÝvëÆÜ}ù§Ò媔&y|iäæ®ròPÓ᩼:»¾P…/ÓZ1áïÆ4ëãQ®º}ò½·—ŽfÙÞó¦ùêÓ¹¼øŠ}^ •ÆúÞ5ííAÃ.î€ugâù¢X§wäåâêùª¿íç ®î|•ÑŠ÷5¥ë˜*œø+ Ë¿#;=ã‡CkÌÉCM¿wªÙFnŽ/€«§ ŸÊx·Jç2Œ5((ó…KÝéö™;ÒÓK.w+Öét·Ó‰Öœ_޶jœ:åè^HÒ½á–%B\^Rå2Æ.þÃ6ZáG6Mp"¨ýM’$©t,À% 8…‘Ó¨QHÁÌÏÀ"aûÛAÉ,˜_2’Þ`%Ït|˜…:g|Ý¥;BF’‰øcYToæLµ&åËM•+îh[¾½J·ªà]SÇÄ#M‰?žOm.ÿŽ”9·7ƒ¿¾ÎË1ØÛûš Ò(ü_ÕCEŽ,XõA ;¡Ò(<øv¾m]Û='ÝBNºÅ–[©$ùÛýàX3„Y0¾ö ŒY´Î*šõõdÊ™æùó:Ë^‰åÔötz« ÀðojÑâ!OÜý54èäFN†½ëÍ='Üðé:<凋—š‘ßסÕ/r2,´ì…F§àS[GÌ+*DþÑpOίˮ…‰4r¢åà¼Dë.^?&2-´âÅ´ -¸v6‡º÷»ÚF›ý9©ø'•{—&Ñçͪ5u¢Ç+þÔiãB©By¡Ñ+4vâÌ¿A»”8#Ûæ^£Û Up¯¢á½ƒM‰=‰C'\}Ô˜ ®à0·-¬,….ìÏ,°ÿn•ÍuV3Æ`$s¯óËŽîtû\ºT9‚8ÿ ‡AH> !PÊ9duÞ<ÇŽ:Ë%ÌŸo &IÒ­u˳[ !®__+ŠÒ€k e}ØI{:üÈ!-þ¨ñ<'@èþ}I’to³9@6`2€kÀUWÈ! FÔh‰ÁÈ:`°Aæþ¸· ! Š¢ ÊäÚÎ5¼Ôh ótZ\J>PâäÖtª7s¦a7ûétåðË«±¤_3Ñçÿª¢uVÙMQ;°<…Ÿþ{SN^»JäQ¾è\T¶]|4d1ìÔ¶tæ†Å0âÛÚxXË_;›Ã‚QçxjQ=ônºbsyåŠ\È}aÞhTxU×"„u¤˜1ËÂŽy×èþ²?ŠbMDž;2íðÊö.Mâþá>hT´â…É øul,­ö¦a·r_7€”KF¾zðÿ]R¿zzZ<”·â`©¾r¦ØU‹³ÿ·d[ß½ªkypB³`ÓçWêæNíÖ.uuCQÙO•<¾1•øcÙôy+ÀöþXL‚•ãìFÒÆd|ùà)ž ¯Oݶ®vp#°ƒõ¥]5±ð©s\<œ—L&üõXÜ|5´yÌ‹ŠÀŽn2-ü8êC?«i7Ú*W­VÖãç÷evoü 0‘M¯Ò¹cÁÐ_qo =»ÇmØæJt«GD”.€•ÍîÝ»¹~ý:ƒ *s;°}{9:x‹lÝjÍÇå"?fHÒ-¥…­EÑmv@Z‚4ÀD‡tH’¤;‡Š,4œÃÄ,œÛåRáRaE©ªB·7€}˜¥•A¬’ÕÀ•·"ðaócÄýc *8¹«©dÊ•|ÉHêå¼Õî¼kél«¾ÅË.8ÜÙSM“^øÔÖ‘“fæôÎt⾚œ_}={¸£uVqép§¶§™\£W0åT…€§(ÆÏBXà³k!¸új8•ÉÇmŽàMŸK8còo¨§Q7w4N*bdr&2ÝÖnãîÔ q!3Å:m1>§ÀnԽ߅ì4 ÿD\'%ÎHÕFN8¹©Èɰp9Úzž*µB­–Ö^fйÈÕú œ£N!¨›;U9!,‚+'rˆÞœjwMòבl¶Ë/åê«Á¯®õœ¯3Ø;¸Q«•33ßÊÕ39øÖÑáæg}//ÉÂdToæŒÖI±õ»j#'š„º#,pl}jsñª¡Åó߀büñl»$ÿŠ êµu¥vkÔ:…k1ŽmH-4Ù<@½v®ÔoïJvº…#^'õ²‘šÁΨµ é‰&Û{¡R+̸Œ›Ÿ†¥/]`Ë7WKu}ïdF2YÇXãeŽ\¶`¸_qÅÑ}’ §(ʳxó9Ê9ÿä ð)\¹rÿ¼)¸Ï> û÷WP'+@õê°²˜d ¹«ÈÈH 7fذaengófxóÍ›èè-ðÙgÐ¥‹£{!I•ÓG}Ä”…SÈ~¶è•„‹u˜sF`Eavþû²£(Š;àúïËëÆý’$Ýs²°Ž»J® sqë\I’=!ÄEQ:_æÈ–?xºúC|-§– fWgQ3Ø™&½Üm¬ì4³ÝÔ³ü’c Å&ðκnfÿoÉ¥jÿZL;bJÜy'ª îU´$]0ð¿ñ‰Þd͉U3ÄW_ëÇœüIâÓL¤%>š+áT § o7zsÑ›Ó Ýw&23‘ö h®œ(øÍbE^¿â˜ ‚cëS9¶>µÈ2ÅÕ‘h*r[a}OÖÕäGw«ÒÚ·,‰î/V!°£AÝÜ9¹¥r>WúãÝ8üê è…“‡šy· ™V¾WìÈ%éîà꣡íÒ¯™øsòݽìõEv±‰· &²wZ0B8xý9ÉQRRJ.ã))ySwíÚ…ÅbÁd*<¨œžžÎ?åN•‘¡‚n²§+ùîø)I'X’$IÒ=C‘®(Jo3æ÷ÖòÊ{Í£=c•üsX€°è™óôBus‹éÝRÆl ß:ƒ_}=Õ›:áYM‹¢RH¹dàôŽt2S*Qvcé–ñ¨ªaõ¤x.DeÞµï¹3˜'ö3Õ\ æqBˆJ8þFº]\]݃¹¸"##Ù¹s'‹5ëCQ« &$$°fÍš2·áêêNe `¹Ýܺ’$•‚üÄ.I’$ÝS„&`¢¢(»ñû’Žºtg²Î“ÚŽîZ¥<›øã—ÝR¹“c—¸\º·ÜI÷jy\ç›yϘÈÉLn¦µŽî“äx^^ R¥’e­RE¡yó´k׎ÈÈHöìÙ€Ùl.Ȫ_¿~¹’¸úi…u·Âøú:º’t÷«ÄÏT%I’$éÖB¬µ`jžÈÉá 5ÿÍ7ÂD9WF‘$IºLdó7_‹p†š“8¹Ã‚±™B¯$À¼ª]Éž½¸º‚Ÿ_îÏ®ôêÕ‹±cÇÒ®];T*uü„â¦Ú©[÷&;z TÆ>IÒÝF°$I’¤{–â’IzX0?ÌÏ KœÃ•ìq¶$I÷…6°ŒA†#,N°`f†BˆKŽî›T¹zè!^{í5Z¶l‰¢(…æÅ*êÕ¡aÃrvòhÜÝ IºûÉ–$I’$Bˆ !Ìã–Ú©\üb“3—Ð?ç0‹È䪣»'IÒ],“«fKèoØÆGY©Ä}!°ÔÂ<^‘áèþI•×}÷AHˆ£{a½{—\. «Y³fånoÔ¨rZáFvt$éÞ X’$I’”â²â- æ™\›´—Ùñ‹é'Vóœá$«1 W¬—$éæHç$«YÍs†Åô{™ŸÉµ-˜ª a~KqÅÑ}”î Ï<ãèX=û,”ef ‡‡Í›7/w{½{C``¹¯0AŽî…$Ýä*„’$I’T!D ð±¢(SŽ—90ò2‡†måCwäÔ ½Suî£*Áèñptw%IªärHå ‡‰#ŠKìÎNâ´^A–e±ÈLÎNq³™­¥{Rûö0l,[æäfZý IDAT¸>téÜÞ6U*øøcëH¬l­Áâì “'[û"IÒ­'XÒ-£(Š ÐhÔ\5 Ä'}BˆdGõQ’$©$ÿ~¡ÜìPåe ]"§º_ç|Ÿ#,i#0ët¸¼¨cñ&PïN E‡ZœÑâ‚79àY’îz Ò1’‰‘, ¤“Æ%‘Ì™œΫ ¤ëÔ5ê}& 뀿¦=Bƒ£û.Ýù^}NŸ†}ûnÛ À‡B9ÓYÝ”À@xÿ}xï=0›ooÛj5LœõëßÞv%é^&XR…RÅxÌÙÙùI•JÕPW«V-§^½zŠJ¥R‰äädqáÂKll¬Þl6+®®®Ç333~BÄ:ø$I’ŠôïÍíÿ¾&)Š¢šHo˜À?A׈n¢B¤ r³ Ü&7ÅI`¹¹å–$IªôTFU¶‚&]…’&°¤[0´`>Žõ¡Ý)ù¨Q˜rÝWéî£Õ—_ÂÀúõ·¯ÝÖ­áÓOÁÝýöµy£Þ½ÁÉ Þ~ûöÄrv†©S¡S§ÛÓž$IV2€%UEQªªTª æyOOOeäÈ‘Ú~ýú):tÀÕÕÕ©°cŒF#{÷îeýúõM.\8ñÂ… étº_ŒFã‡Bˆ·û$I’ÊJ‘Dýû’$I’$‡Ñé`ÊèØ¾ø’’n][ÎÎ0r¤5y¹NwëÚ)­.]`Ék0mçÎ[ÛVÛ¶ðÆrä•$9‚ `I7E±®}û‚V«^¿~}íĉuaaa¨ÕêÕjµtèÐ:ðÁè7lØÀ‡~øÈ®]»†ªÕꙋ僿J’$I’$I’TE‡‚®]­#±6l€Ã‡!§>QëtÖ„å½zAß¾àãsóuV¤Úµ­»ýûaÝ:ؾ*¦îªU¡sgèÓZµª˜:%I*;À’ÊMQ_½^¿DQ”Ó§O×¼øâ‹¥ \¥W¯^ôêÕK÷ûï¿óüóÏ¿ž––6@Q”ÁBˆÓØmI’$I’$Iº«¹¹Á!ÖÀõë~‹è:;W¾€UQZ·¶¾&LƒÁb)_]*øúVŽQf’$É–TNŠ¢ÔÖëõÕ¯_¿æòåË55ª°ºyäºté¢1bDÐÖ­[÷)ŠÒ[ñw…5 I’$I’$I÷OOëë^£ÓAµjŽî…$IE.‹$•™¢(5ôzý®víÚÕÚ½{·®"ƒW¹ªT©BDD„väÈ‘nf‹¢(÷Ux#’$I’$I’$jîܹ<÷Üs¤¥¥ËsÏ=ÇÒ¥KܳзbÅ zôèA‹-0`@¹êxíµ×1bD™ûöÛoyî¹çÈÈȰ۞œ\p¡v!)))åê_®Ù³gÓ©S'Z´hÁ‹/¾h·ïá‡f„ 7UIf̘Á˜1cl¿ÿóÏ?<÷Üsüù矷´]éî XR™(Šâ¦×ë7µjÕÊ/""BëááQ¦ãSSSINN&99™¬¬¬b˪ÕjæÍ›§1b„N«ÕnP¥ÎÍô]’$I’$I’¤ÒÙ¸q#sçÎ%ûߥý™;w.;ou–ôÛìÈ‘#<òÈ#lÛ¶ µZFSöIJB–-[F“&MÊ|ìºuë˜;w.9ù•M:•~ýúÙ•‹‰‰¡[·n¬\¹²ÌmäZ±bcÆŒ!** ­Vkw®ÉÉɬ^½š-Z”»þÒXµjßÿ½í÷ØØXæÎËþýûoi»ÒÝA°¤2Ñjµßùûû×]»v­ÎÙÙ¹LÇÆÅÅáïï>>>Lš4©ÄcEaþüùêöíÛ»999ý®(Š\Š^’$I’$I’n³Zµj1gΆîè®T¨]»va±XøüóÏ9xð Ë—//sàÊ•+ôéÓ§ÌǾð ̙3WWWÛ¶3fxØ¿e˶mÛVæúóË >®X±‚ýû÷óÅ_Øömܸ‹ÅB¯^½nª²jÖ¬sæÌᡇº­íJw&™K*5EQú«Tª¿ÿþ»ÊËË«ÌÇ;ÖîÉBii4–-[¦mÚ´ipvvö8`Z™+‘$I’$I’$©Ü|}}yöÙgÝ wùòe€ržÊµnÝ:|}}iÓ¦M™-OЫ¼®\¹@Ó¦M ì[·n­[·¦J•*·­?` ŒÞ÷•tkÈXR©(Š¢Óëõß½òÊ+Üÿýe>~„ üúë¯ån¿zõê|öÙgZFó¢(5Ê]‘$I’$I’$ÝE^{í5&NœÈ‰'xì±ÇhÔ¨íÛ·gêÔ©dff(Ÿ””Ä{ï½G»ví ¤S§NLŸ>½@¦;wް°0æÏŸo·=--iӦѹsg‚‚‚èÙ³'ßÿ=&“ €o¾ù†°°0Ž=Z ÎË—/Æwß}À¡C‡ ãã?.ñ¼g̘ÁsÏ=ÇÉ“'éÑ£ÁÁÁLž<Ù¶ÿìÙ³¼øâ‹„„„ÄC=ÄŠ+lûsrr ³}G™KHHÍš5cìØ±$%%1bľüòK[¹O>ù„°°0ÒÓÓ‰ŽŽ&,,ŒŒŒ Ο?OXX ,`æÌ™¶k”{=9{ö,aaaüôÓOú´hÑ"ˆ‰‰±½w¹#¸^xáÂÂÂlÁ;°°òÓFÅçŸNTTƒ ¢aÆtîÜ™¯¾ú £ÑX ½¸¸8Þxã Z·nM`` Ý»wç믿Æ`0û>}ú°lÙ2„|ôÑG„……qéÒ¥u;vŒ°°°»2_Û½N°¤Ò©Ñhª~ðÁeºg222xúé§™6íæM=õÔS)æÍ›®L’$I’$I’î,Z´ˆ:pêÔ)Œ‡‡o¿ý6¡¡¡vSÑΟ?OË–-ùè£Ðh4tïÞ¬¬,ÆOÇŽ Mž+%%…ððp[€¬#zÚ¶mË„ 0™LtëÖÄÄDž}öYF‚5jÎ?üP Î%K–NÖçÓ—/_&<<œ;v”xÞ‘‘‘¬X±‚Gy„íÛ·sâÄ N:ÀîÝ»iÕªsçÎ¥N:tìØ‘èèh~øaÞxãëÞ¾};íÛ·gýúõ´lÙ’®]»ròäIžzê)Þ|Óþ«Hjj*»ví² þDEEѦM,X@:uhÕª‹-¢S§N„‡‡³gÏ[Ù;w^b §0ÉÉÉ„‡‡søðáû>Lxxx±ïi®£GrñâE»sX±bßÿ=;w&11‘!C† „à•W^aÈ!v¼#GŽÌgŸ}†——Ý»w'!!—_~™Þ½{›û8÷=?vì˜mÛéÓ§¹ï¾û˜/pà]îW%x_äK¾äK¾äK¾äK¾ä«ò¿€gñ&“Oåz³~~¾r劸Q£F , ƒmû„  ¦L™bÛ*1{ölÛ6‹Å"&Nœ(ñä“OÚ¶:T"!!A!Ä ^|ñE[™‘#G @Ìš5˶Íl6‹Áƒ @üõ×_Â`0ˆ*UªˆjÕª “Éd×÷–-[ [¿ãããů¿þ*¶mÛVàMþŬ"""hÑ¢Õ«W¬SâöïßÏã?N—.]låjÖ¬ÉÔ©SK}^·SDD=zô°»<==™9s¦íkŽJ¥bêÔ©èt:.\ÀÞ½{9räÇç‘G±«V«™5kþþþÌ;³Ù\ª¾ÄÆÆ²iÓ&úôéc·`€N§cúôé¼ùæ›F¼½½pà@Ö®]KBBË–-cèС$''3fÌÖ¯_XG_¹ººÒ©S'Ûq¹«øøøuÆÅÅ•û< “{î999ö¶ea"""hÔ¨uëÖ-°¯¸ûÊÓÓ­Vk»¯Š, s_Y,»d÷jµš'Ÿ|’ØØXvìØÁÊ•+ÉÈÈÓïb2€%•¤–ÙlÖ5nܸLEFF2yòäRÿGU¦Õª…^¯7 ‹,¤(^Àï€_…w d ð)ŠÒËmK’$I’$I÷˜sçΑ––f·-55•þù‡æÍ›ãââbéUhQQQèt:‚‚‚JÕf³fÍP©Tv«æŠŽŽÆÙÙÙnÅ¿#F ×ëY±b+V¬@«Õ2bĈҞb©[3zlݺµÀ¾´´4^{í5[^°ÂX,>ùä¦L™XGx=öØcüúë¯|ûí·vu¯[·Žnݺ¡×çMV騱#`ý>t£ü9ÀJrãªÂÒíæ{ È8q¢TíÜ82¿cÇŽÈ]uíÚ5bcciÛ¶-@±÷Un1ŸRˆjÑ¢@¡Ó<·lÙ‚³³33f̰m5jŠ¢Øî+/// Tª¶¤; `I%© E-JEޏ*¬n___#PܸÒ/±æ§rðŠâîÀ>H’$I’$I÷€ììlÞzë-, ` ~Lœ8‘ëׯÛòOÕ¯_Ÿ®]»²nÝ:–-[fwüçŸÎÑ£G4h^^^¥jÓËË‹rèÐ!-ZdÛ.„`úôé€uŠW.___ ÀÚµkùã?xðÁ LKLLdãÆ…ÅJkРAx{{óí·ß²{÷n»}ï¼ó_|ñE|½ù©T*V­ZŤI“8xð Ý¾‹/Ök™Í–-[ š4iBŸ>}øí·ß˜3gŽmûÁƒ™8qb©ÎA¯×“˜˜h{?s·ýH§ªU«¢V«Ùºu«m*ÀÚµkÙ²eK‰íÄÄÄpòäÉ"XW®\±òÀ7n&“Év_µoßÞ¶ÀÆme…¼÷Þ{\¾|™#F ÑhJuî7¦mÛ¶¬_¿ž 6ض›Íff̘‚^½òÆ Ñ¡C–/_NDDÆ ÃÉÉ©TmIwžÒÝEÒ½ÌUQñïÊ•†«««Ü Ý©(M€Çok‡ ¼Œ5q¼$I’$I’$ÝNNN,X°€¨¨(xàþþûovíÚEÏž=yùå—måæÍ›G—.]1b , I“&8p€­[·ÄW_}U¦v¿øâ öîÝ˨Q£øå—_ "22’={ö0jÔ(úöíkW~ôèÑüöÛo\¿~Ï>û¬@}ûöí£oß¾<øàƒ¬Y³¦\×ÂÝÝ… 2tèPºtéÂÀ©S§{öìaçδmÛ–qãÆ[ÇÔ©SéÙ³';vdÈ!T«VcÇŽ±víZZ¶lɈ#ضmYYY…¾ÿþ{ºvíÊóÏ?ÏG}„§§'ÑÑÑÜwß}ÄÅÅ¡VŸÊ700;wÒ A{ì1¦NJ` õÙü{ï½Ç¢E‹X¶lMš4!,,Œ¥K—B=¸té7n¤OŸ>¬]»¶ØvÖ­[‡““ݺu+òZNš4‰¿þú‹àà`¶nÝÊ¡C‡6l˜m…@•JÅ¢E‹ ¥oß¾ôïߟºuëÉÞ½{iÓ¦M™W_üþûïéÞ½;ýúõcРAÔ®]›7räÈÆ_ oÚèÑ£yæ™g€›_@ªÜä,©$&!„RÚeOo—ç>‹Øý•çÞ~E)W|I’$I’$I* ooo6mÚ„³³3?ÿü3L›65kÖ Õjmå4h@TT/½ô111üôÓO¤¤¤ðÁ°oß>ªV­j+Û¢E BCCÑé¬ë ¹»»J£FlerW£{饗8qâ„m5¸9sæðÃ?ègŸ>}ðõõÅÏÏþýûØïëëKhhh©»ZèÈž°{÷n}ôQöîÝË¢E‹ÈÊÊâ£>bãÆ¶ž|òI¢¢¢X²d žžž,[¶¬Ð`XXX†ÆÛV¾”îNJiW'îMŠ¢‡ ]I£,^~ùeÛÚãÇ/s4>—ŸŸ_NbbâSBˆ¥v;¬+^*>ùVùõBˆ%“$I’$I’îFŠ¢<‹7Ÿ3çrUpøÔ:¥ëÆiw7&55µÂ„ß çΣAƒ¼þúë|òÉ'ŽîÎ-‘––FDDmÚ´¡^½zvûÖ®]K¿~ý˜6mo½õÿíÝ{pU彯ñïÚÉN„[Ab‘)GAMQ(MA œê©"©ö3j=ju´*V«ÅªˆÖ äR£!r‘‹"7!‰I·;ɾ¬ýž?""„ì\×>ŸfÖ^{­w=+£ üxßßû C CÓºukؼy³ÓQªµqãF’’’xþùç™6mšÓqä,ž|òIžšûå“Ë«¿ølö³Ãg–Š„¯L¨X.JKK9~üx4q–¯¯&¼ŠW7:@DDDDÄIÆüqî½÷^‡Ó4¬ &0jÔ(|¾S F¼^/3gΨ²ç”Ôœßïç‰'ž 666Ø—KÎ_ê%çdŒ)Œ‰‰ÉÛ±cG»þýû;¨Ø ÃTL<ÛÖ½;O """"âc ½{÷¦°°ƒr×]w13é|Ò²eK¦L™ÂK/½Dbb"ÉÉÉV®\Izz:Ó§Oi‰¤œ[^^ ??Ÿ£GòðǼ4]š%Õòûý«V¯^¨þÊÆñÃö©û1ÇÏòu§FT½pÌ$""""çaÆqË-·8£J–eѳgO"##™|Ôá/€""MUÅ ¬RËãti",Ëêèv»÷Θ1£åSO=Õ¨køßÿ}n¿ýv;$cÎ=ËʲžlœdÕÚô˜°ÙÅQDDDD—eYÝ€ Ú9FD¤‰ùXÏ*5q—cû|¾ Ï<ó K–,i´ç~ýõ×Lœ8Ñ<\mñªÂL ¤c…êI¯DDDD~ò²páå¨Ó1DDš #øñ±C,©cÌGÀŸÆŽk§¥¥5øóöíÛÇСC}¶m/ φt“1Ǩ(b9m+PÕn‰""""òaŒ IÇœN""ÒÆìSKj̶í§ü~ÿ Æ ³,XÐ`ÏùòË/¹úê«}ÅÅÅ©^¯÷S³õ®V4T¶”ã1Æï` 6ßpõo© ( °¤¶lÛžaÛöÇŽ˜6mZÀãñÔÛØÆ^zé%®½öZ»´´ôŸg„1¦f++–íÝ|UoÁBWŒÆ˜=<[DDDD‘Íçì§þþÐ,"òS ظ€M*`I­Ù¶ý¬1æ†W^y¥ W¯^Þ”””:¹yóf®¹æïôéÓ=~¿²×ë½ÛÔv“1G€ÁÀ§uºàWãäì/ ?«8F³°éÔ*"ÒÜì1Æä©€%ubŒIõz½—dee½>bĈ@ß¾}½o¿ý6ÅÅÅ!áõzYºt)C‡õ]uÕUfÛ¶mŸøýþK1oÖCÀ"àF`<µÛ†â¡¢ïÖe„Öh^DDDD~Z¶ââéNÇiBöáÅÇ2«fm…DªfYÖÅ3\.×–e5»îºëÉÉÉQ‰‰‰tíÚ•¸¸8Š‹‹ÉÉÉa×®]¬Y³ÆŸ––fÊÊÊLDDį×û´1¦a–ýYV3²n€.@t-G3À! ‹Š^‹1¦ RŠˆˆˆÈyÊr[˸‚¡ŒA ~Â:ÇvææKn`cîF¶ÙvÆ5.ËEà,›™Wu¾¦’:'1:a4­£[“[šËâ}‹ùúÈ×uwüe㉊¥ ¼€ù{æнMw†tÀš¬5ì>¾»ÎÏ\X×;IDAT©‰úú™‰¼ÀcØømŒYét9c2€ß[–5š––6ä‹/¾Höù|—ø|¾æ§_ámÖ¬YfyyùZÛ¶S¿ß_ÔÀm`å¿DDDDD—ŸwÙF2£p¡¿‰ýdõŠëÅìëgðК‡*°Ú&ðBò ÌX5ƒ]y»*ÝwÛ¥·q{âíŒZ<ªNÏŸ8wnz +x®Ø[\/¬g~ù ][veWÞ®`«_Ç~Á÷ôé¤F+`µ‰nÃc×>Æö£ÛysGÝ÷ˆ¾xøaƒ6ý¶)õÎã>úá–eµZ@¡mÛ%%%šþ'""""?%âç5vãær§£H¸¹»Ïݼ:ôU¢"¢˜±jF¥ïþ=úßÜ|ÉÍõRdšúSƒÅ«9;æPä-"-3­Î㆓~ûñño>æ‚æ0eù§ãHmmÁ|`Œ)°¤‘c B§sˆˆˆˆˆ8ÅSjEZKØÂh.ÇítqƦC›èÿvrJr‚çûuìGTDÔYïÐe@½=¿slg²‹³¹{ÙÝõ6nUR3Sƒï›Y”ÙàÏèѶ4¿ Qž% ¤øŽ ÿü¼ÃÏxcûØÆ¦eTKúuê€?×Ï¡ÒCŒí9–Þí{ÇÛs,¹¥¹,ùn ·ö¸•舊ֽíbÚ1¹ïd2 3Xž±œ¾ôåš ¯à“ŸUœcTÂ(:4ï€/àcÎŽ9\Öî2ƤM³6¸]n&÷Ì ß þùm°FpV‘®HF&ŒdPü ZEµ"§$‡UY«X‘Qýæëm›µ ¾o©¯”ceÇ*;¦çÅ"&2†½ù{™·{û öW㆟Ý@·VÝ(ô2o÷<Ædô¥£iÝ–y;ycû/?ÀÀøüºÛ¯ƒ÷ŒˆÁðñÉ.nÈ}½¤^}Dp?«OžRw‘Fd¹­m\ÅåŒR3÷óÅ‹¿~‘?\ù‡JçößÖÃ[Ùxb^ˆ¡Ü_NòEɬWÑ–÷¡5ñ—a÷ÄÝôŒëyƸÞÀç>?ãü¢}‹½x4^ó Ï z€ÜȲôeÁk6ݱ‰þúSì-¦Õ‹­˜Üwr°Õé•âÂW.¬òÝâ[ÆóÑèè{Aß3¾K9ÂèÅ£ñÚ^¾Ÿò}°V⛉Œé9†ù#*úaMút¯o€Ž-:’r[ ý:ö«4¦×örÿÊûymÛkÁs‹G-æÖ·²/)Rx ß•îÉ)ÉáʹWräÄf_?›É}'Ÿ‘õ†ÝÀòŒåU¾§„/ðg¼x¸ßüA¿aŠˆˆˆˆˆ4&?Oð%††ÝÂHɽ﯊¼EÌÜ4“ç7?OçØÎÁâUu6ÜPivІƒHÍL¥ÐSHjf*¾€¨h¶žš™Êö£Ûkœ3§8‡ÔÌT<¶€¾¤f¦²&{M•÷XXÌ1?X¼Z—½Žç6=ÇÖÃ[¸©ûM<:àÑgxïæ÷‚Å«{ðØúÇØ›¿—¨ˆ(^ú*¿èü‹3îéѶ÷_y?o}ó®{4¸$±Kl—`ÑjWÞ.vݼgWÞ.R3SÉ+Ë«UNqÀÀO ðöé§µ„PDDDDD¤q-ÂÅ÷¬âbFœ¶œ4I÷]q_ðøæ…7³6{-îýµã×VÚí¯*w}rò7¦^1€‰Ë&w!:(y÷ç×,Žýû:h­r¦H!å@ Ù÷fÓ%¶ Ù%ÙÕŽ5¨ë `!)å@ ·,¼ƒ!&2†ÝwÓÜÝœKÛ^Š……!ôÕ]I“H¾(€7w¼ÉÄexù«—Ù{Ï^âšÅ1½ÿtÆ,Sé> ‹‡V?ÄÌM3ø,ë3ÖþWÅÏûòö;#ÌÚ2‹ÜÒ\>¸å^Üò"³·9óL”ø /6OcÊOÿJ,‘FdŒ X–õëYÄUXT½zKœÛåæÊŽW°/_°x°>g=;ŽîöÀjŠÅ ¿³ó`‘ªÌ_Fï9½)ñ–ÔjÜÁ]·ÞJ÷6݃Ÿ·ÝÎஃùe×_žõÞ™§úníÏ?Õ+«]L»Ze‘0³ƒ‡ãÀ+?þJ,‘FfŒYjEZËYH2¿'J󰚦VÑ­ˆtUüµ:·4÷Œï³Š³­€aETú|2W]tlÑ1x|¨ôP¥ïj[¼¸0öTÕöå!/ŸõšÍ;C™¿¬Òù#¥G‚ǧ7„ÿñûKtX!Àdc̉­–ˆˆˆˆˆˆlî#›]lúU{µ„¡‚ò|n—›ö1íÏø¾CL‡FË[ésLdLÇ<Ù/  Mt›:wRAyAðø½]ï‘U”uŽ«++ôOö“ó€âÃÅgÆ6ÿ>Û%*`‰ˆˆˆˆˆ8À³ßаþć<ÉÅD¢PMŽmlöì§W\/zÆõ$¡mûò÷Ð6!¸¼0ÿxö”°ÏzÞã?U`jÕªÒýñ-ãC‘*ìÎÛá;A˨–<ºîÑJMÔ#\ Ë•cÔ¢Q¤¦Ð,²YEYŒ\4’”)ä—çSà9Õo  ÐSH~y>Ež¢Þï¯[ÿÊí)·W* |uä+†/ΦC›ÎÛ{jl¯í ¾ïé=µžÛô÷,»‡ŒÂ ,,:µè„…Åê¬Õ$ÏKOþŒOŽa~ô¿ÈÉó%¾S³Àößà ›_.A´M”+*¤÷•FT¼†ß`Zu—[¡¬‘†eYÖ,ñ\\åt Õµ]®%`de’[’[©À²éŽMôïÔŸüò|â^Š i¼ö1íiÓžôÂôJ—å¢{›î”ûËÉ)Ωô—åââÖWä(Ì<£ÈS_â[ÆÓ*ªE•–éÕU—Ø.´ŽnMvqv¥âW]µp·à¢Vq°ä …žÂêoÆcoà#ƒØ\mŒ9RÝ-*`‰ˆˆˆˆˆ„ ˲îÅÅËLÀE§ÓH(^ö:ûLàï0qÙDü?7]rŽü·ËÍŠŒ\ÿ¯ëN*&üÀ»øÙM>6ý1߇r› X""""""aÄŠ°þÃÓŒÂE’Ói¤:×Å_GÚ¸4Ü.7P±D~˜Ã cEÆ Ç2Š„r`.>2(Àf°1&äFqê%"""""FŒmžÅ0‘EX¡žXán]ö:n[|[oÅðãv¹q»ÜL€Í‡6së¢[U¼Іí¯â%ƒ¬f^…¾Ëš%"""""–,˺  I$‚1Dãt"©ŽËrѱyGbÜ1*=T¯}¢Dš´t*v,g'~†còj:„ X""""""aʲ¬Ëp³˜f\ÌDÑÍéD""5`€uR0Àû˜lŒ©UeW,‘0fYV "xÃx~EÉ€ÛéT""Õ8 ,ÄÇ÷x ð[cÌ‚º §–ˆˆˆˆˆH`YÖX"x™´bÑôv:‘ˆÈYx€Öap±?“1uV,‘&²¬–¸ø? ð3l†E§S‰ˆ^à /Žb3Õ³´¾†WKDDDDD¤‰±,ë2\<Ša âcÍHDûÌ‹Hã+6kðcSŒŸ§—1eõù°DDDDDDš(˲ºãâAà.¢1\›+±¸Èéd"r^ó;-øØGÄÏ“À[ÆOCÞVc A,‘óˆeY7ÁàWØ´¡^Ú QtÀEs ˆB;ŠÈ)åTÌ®òùÀQüÆÇq¢ `áf/>–iÀrcLycESKDDDDDäÜŸ.×ÁX’$I’´ˆøp!mõ[a…Ø{ï½Ùn»íØ`ƒ X}õÕéÛ×{Ý% `öìÙLœ8‘‡zˆÿû¿ÿãÚk¯eÆŒívÊÌÛm0À’$I’¤¢ˆØøл\ŸQ£Fqüñdz÷Þ{³ÄKt\q’Ô…Íœ9“‹/¾˜Ÿüä'¼ôÒKmul™ÿZø£–$I’$1xRª½ÿþüèG?âˆ#Ž0¸’¤͘1ƒã?žóÏ?Ÿ62©'€eæû[¶ °$I’$õxÑ ¸ؼTûJ+­Äu×]ÇÆoܱ…IR75fÌ8àfÍšU®Ë/3ók þÄK’$IR‡¿*Õ¶Ê*«0vìX†ÞÁUIR÷6vìXvÜqÇr!Ö|`‹Ì¼ °$I’$õpÑxXiѶrß}÷±Þzëu|a’ÔŒ3†½÷Þ»ÜqÂÛ2ó“½:¶,I’$Iêt¢DxpÞyç^IRí¹çž|ë[ß*×¼]DlîÀ’$I’ÔÃEăÀb—[m³Í6Ü~ûíDD ª’¤žcúô鬽öÚ¼üòË¥š›™‡¸K’$IR#)^œrÊ)†W’ÔÌqÇW®y¯ˆà,I’$I=VD œµè÷ 7܇z¨IRÏ4sæLV\qEf̘QªùÓîÀ’$I’Ô“.õñ _øB—!I=ÛÀÙ}÷ÝË5og€%I’$©'Û ÔÇO}êS]‡$õxmü³w”–$I’¤)"–VYô{Ÿ>}5jT *’¤žm“M6)×ô,I’$I=Õ2Àb·´¯¶Új,±Ä-(G’z¶áÇ—kZÎK’$IRO5¨ÔÇ!C†tt’$`РAôîÝ»TÓ`,I’$I=UŸ’û”ü,IêeþÜÇK’$I’$Iš–$I’$I’:5,I’$I’$ujX’$I’$IêÔ °$I’$I’Ô©`I’$I’$©S3À’$I’$IR§f€%I’$I’¤NÍK’$I’$Iš–$I’$I’:µ%Z]€$I’$IÍðóŸÿœ±cÇ6tÎAƒ1pà@V]uUV_}uÖ_}Ö^{m"¢¡ëtv¿ùÍo¸ñÆû>räHN:餎/HÝž–$I’$©[7núÓŸš¾Î°aÃØa‡8à€Øi§èÝ»wÓ×lµ‡~¸äÏíèÑ£;¾õ!”$I’$©o½õW\q»í¶£Fâšk®iuIR·c€%I’$IRƒL˜0=ö؃Ã;ŒY³fµº©Û0À’$I’$©Á~ýë_óÙÏ~–Ù³g·º©[0À’$I’$© þþ÷¿óÕ¯~µÕeHÝ‚—¸K’$I’zœ³Ï>›ÕV[­¢¾S§NeΜ9Ìœ9“éÓ§3eÊž~úiÆÇÔ©SÛ{饗²Ë.»°ß~û5¢l©Ç2À’$I’$õ8ŸùÌg5jT]sÌ›7;3Ÿüä'ÜtÓMeû}ï{ßc·ÝvcÉ%—¬k=©'ó¡$I’$I5èÝ»7£GæÆoäœsÎ!"Jö{饗øÅ/~ÑÁÕIÝ‹–$I’$Iu:òÈ#9î¸ãʶ_tÑEXÔý`I’$I’Ô'žx"«¬²Jɶ &0qâÄ®Hê> °$I’$Ij€pÈ!‡”m¿÷Þ{;°©{1À’$I’$©A¶ß~û²m“&MêÀJ¤îÅW%I’$Ij#F”m›>}zÓÖ}ñÅ™u]X’$I’$Õiüøñì¿ÿþdæbmk­µ|pÕsΛ7}öÙ‡»ï¾»lŸˆ`ß}÷å/ù 3fÌà•W^áá‡æùçŸgÖ¬YÜsÏ=uÔQm^ ÿÔSO±ÓN;5dwפI“˜9sæbßwØa®ºê*fΜɋ/¾È#<«¯¾Êk¯½Æ¹çžÛæî´·Þz‹}öÙ‡Y³fÕ]Ÿº.,I’$I’jôæ›orÚi§±Å[ðúë¯/ÖÞ§O~ÿûßÓ§OŸªç>í´Óøûßÿ^¶}Ô¨QÜ}÷Ý\yå•ì¹çž 0àíÁ[lÁOúS&L˜À¾ûî[v®Ç{ŒC=´êÛ³ì²Ërå•WrÓM7±÷Þ{/¶+l¹å–ãÛßþ6ãÇçˆ#Ž(;Ï„ 8ùä“^ŸºŽ%Z]€$I’$Iíí·ßæ­·Þª¨ï;ï¼ÃìÙ³™:u*³fÍ⥗^âÁdܸqÜ}÷ݼûî»%ÇõîÝ›K.¹„Í6Û¬êú&NœÈüã²í£GæÚk¯eðàÁÍ·òÊ+så•Wr 'pÚi§•ì3fÌ.¿ür>ÿùÏW]o)+¬°·Þz+#GŽl·ï’K.ÉyçÇòË/Ï 'œP²Ï¹çžË7¿ùMV]uÕ†Ô§®ÅK’$I’ÔãlµÕVMèСüîw¿c÷Ýw¯iüñÇ_ö©Í7ßœn¸a±W•8õÔSÉLN?ýô’íÿõ_ÿÅ^{íUÑýYm0`7ß|sEáÕÂ~ðƒðòË/sá….Ö6{öl.ºè"wbõP!”$I’$©vØa|ðÁšÃ«çž{Ž1cÆ”l4h—^ziMáÕ'Ÿ|2ŸøÄ'Ê®}ÅWÔ<÷§Ÿ~:ë­·^McÏ<óLÖXc’m_|1óæÍ«£2uUX’$I’$5ÈZk­Å÷¾÷½²L%.½ôÒ²!͉'žÈˆ#jž GûÛßÒ»wï’í¿üå/ëšýõ×çÈ#¬yü AƒøÑ~T²íÅ_䦛nªynu]X’$I’$5È“O>Ég>óFŒÁþðæÏŸ_õåv_õïߟ/ùËõ–Àˆ#Øu×]K¶Ý{ï½¼òÊ+5Ïýµ¯}^½ê‹öÜsO†Z²íª«®ªknuMX’$I’$5سÏ>ËÈÖ[oͤI“*÷æ›oòð×lÛk¯½Xzé¥U"‡~xÉï™Éßþö·šæ0`p@=e…°®Ü«‰µÖ¦®ÍK’$I’Ôã 2„aÆUü£Ö;§î¹ç6Ùdî¿ÿþŠú?ðÀdfɶO}êS5ÕPζÛnKß¾}K¶7®¦97Ùd–Zj©zÊz_¹{º^yåž{¬¡®ÃW%I’$I=Î]wÝŨQ£ª3þ|¦M›ÆôéÓyóÍ7yâ‰'øÇ?þÁ 7ÜÀO "6Vmu`õV îcë­·æÎ;ï䨣ŽâüóÏ/ÙçñÇç´ÓNã´ÓN+;ÏK/½Tòû2Ë,S6Ì©ÇòË/_òû«¯¾ZÓ|Ë.»l=å|@ß¾}2do¾ùæbm/¿ürÃÖQ×`€%I’$iQãÒO”I*©wïÞœ{î¹¼òÊ+e_Ê;ï¼ó8öØc2dHÉö™3g–ü^®½Ê½ô7wî\Þ}÷]úôéSÕ|ºÿjrÖŒ3ºŽ:?J’$Iú€Ì|Øø0·ÅåH]JDpÁ0lذ’íÓ§Oç²Ë.+;~Ö¬Y%¿7:Z \€0gΜªçktÐVn¾ZjS×f€%I’$i1Yp.°PÛe8RµÜrËqØa‡•m¿å–[ʶ•{°YÞ}÷ݲmýû÷¯z¾ùóç×SNÅóÕR›º6,I’$Ieeæ=ÀúÀŸ[]KÔvÉT½÷Þ»lÛ½÷Þ[¶­\03mÚ´ºkªfÞ¾}ûÖtçÖ;ï¼SoIðöÛo—ün€Õó`I’$IjSfNö¥{)|8®ÕE¨ûÚpà ‰ˆ’m“'O.»ÓjðàÁ%¿— rêUnÞÖ4_¹#µ*°-·Ür ]GŸ–$I’¤v-r¤pR«ëi€ï·ºu_ýúõ+F½÷Þ{eƒ™•W^¹ä÷)S¦ðÞ{ï5¬¾ʽz¸ÒJ+Õ4_#_œ5kÓ§O/Ù¶Ê*«4lu X’$I’*V|xMó=ûì³õ”óãÇ/{ÖZk­Õ°uÔ5`I’$IªJfNv£p¤°ü ÐÓ$ààì蛲ÕãLš4©lø2pà@ P²mÔ¨Qeç|ðÁRÛ³gÏæÑG-ÙVk€5qâÄzJú€þóŸ%¿÷êÕ«ÍŸ'uOX’$I’ª¶Ð‘ÂOϵ¸œJ½ ì_ दºãŽ;ʶ­½öÚeÛ6Þxã²wgÝzë­u×µ°±cÇ–}…pÓM7­iÎÛo¿½ŽŠ>è¶Ûn+ù}½õÖcРA [G]ƒ–$I’¤šeæ8àcÀ ­®¥Ç@JM÷ë_ÿºlÛÇ?þñ²mË-·l°Aɶ«®ºŠ©S§Ö]ÛmÕ¸ÝvÛÕ4çË/¿Ì¿þõ¯ZKzß”)S¸öÚkK¶}úÓŸ®{~u=X’$I’êRÜÑ´+ûHáuÀ9­.B=Ãõ×_Ï=÷”ÏJwÝu×6Çï¶Ûn%¿Ïš5‹ßýîwuÕ¶À«¯¾ÊÕW_]²mÝu×­ë’ôsΩÿj—\r sæÌ)Ù¶Ï>ûÔ=¿º,I’$Iu[èHáVt¾#…/à½Wê /¾ø"_þò—˶èCjwÑATöáI'Ä‹/¾XWßúÖ·˜;wnɶC=´®¹/½ôRÆ_óøW^y…SO=µdÛÈ‘#Ù|óÍkž[]—–$I’¤†ÉÌûMé÷9vØa ð~{fòÀ0fÌ.ºè"^ýõ²k-·ÜrŒ3†>}úTUã¢"âÙm·ÝÆÚk¯Íá‡ÎÈzë­÷û½ÆÏþð.¼ðB¦OŸ^vÞþð‡|⟨«6umX’$I’š*3߈ˆï§Ðü™ ì—™¥/Ñ‘:PDp 'püñÇW=vÀ€\sÍ5l»í¶<õÔS%ûÌŸ?ŸË/¿œË/¿€VXå—_ž·Þz‹×^{­¢#|Æ ãšk®aøðáU׸¨£Ž:ŠsÎ9‡ùóç¿ÿmÆŒœy晜yæ™,µÔR¬¼òÊd&Ï?ÿ<³fÍjwÎ8€N8¡îÚÔµ`I’$IjºâN¨GÄ-4ÿHá±ÅË䥖Zi¥•¸à‚ Øc=jžcå•Wæ®»îb×]w-»ka“'OnóùE­¾úêÜpà Œ9²æ¶ÓN;±âŠ+rì±Ç–lŸ6mÓ¦M«x¾ƒ>˜‹.º¨ì«Œê9¼Ä]’$©‰ˆ5"â ˆ8/"ÆFÄÅwFÄ/"â°ˆÖê:Õsƒ¥fÞÂ<8¯‰óKí>|8?þñ™8qb]áÕ+¬°wÝußùÎwèÝ»w*,8äCxøá‡^-pÌ1Çpæ™g–½„¾}ûö嬳Îâ7¿ùMCÿšÕu`I’$uq±LDwÏ¿޶6)þØ øð+à券$"VoUÍêž"bhD¬_A×ÊŸ%«Î³À¡Þ{¥ŽÖ¿6ÞxcŽ:ê(î»ï>ž~úi¾ÿýï3hР†­Ñ¯_?~ö³Ÿ1nÜ8vÞyçšw$E;ì°·ß~;_|1C‡mX ;æ˜c¸ñÆYsÍ5«ûÉO~’x ¡Ê«ëó¡$IR«G‡KV1´?ðE`¯ˆ8ø™ÿ¯zDÄ`àpà8àˆ™™ïupïdæÔ^WØèÑ£+~õ¯C‡eàÀ 8Áƒ³ì²Ë2räHÖZk­Û%´ÑFqýõ×3aÂ.»ì2®»î:þõ¯1oÞ¼²czõêŨQ£Øc=Øwß}YwÝuë®cË-·,ùú⯠~æ3ŸáÑGå’K.ᢋ.âÁ,ûârË-ÇÎ;ïÌW¾ò/kWIá¯U$I’º–ˆX8™BÕ·S^’™ÍÚ£nj‘àjᣩfæÚ·)Ðè;ª¾“™çV3 ¸[ì‘E¿o¶ÙfüãÿhXaR³MŸ> &ðÜsÏ1uêTfΜÉÀ6l«­¶ë­·lu™Lž<™Gy„^x3f°ä’K²ì²Ë²Î:ëðÑ~´®#‡ê> P25À’$Iê""¢ðmà‡ÀàOff×à9ÕMµ\-ð$PvV¬¿Ÿ­v'¡–$u>å,ãMI’¤*E݈hÜ™”ÊÖÜø'p6¯~œØà9Õ EÄàˆ8x8ƒÒáÀZÀ~TÖ ÀÁƒ•¤îÍK’$©zgP¸$½CDÄWÛõ<õ{À÷3óÐÌœÓà¹ÕDÄRñC aQ[ÁÕÂ~Í÷þ]`ÿÌ|³ÉëH’ZÌKÜ%I’ª[R¸4}jDüª™FGD_à|à«M˜þ5às™yGæV7QÁQÁR¸8©vE™÷6y IR'à,I’¤ ï ú…_C- |¿Éký‘Ú«iªrORýØÔðJåTqTpa \Gá¿[»eæƒÍ¬‘½Wç7y IR'a€%I’T¹£€Q ýù·‹/6ÃyÀUôŸüð‘Ìš™+KQ¸‡è±…úýØ*3_hX¥êV"boª;*˜À_€:(¸‚B°æ½W’Ôƒx„P’$©±:‹_t>8™Â‘ÂF®µ/ðõ »O+Öõ‹Ìœ»pCfήŒˆk€ÿÆgæOY«º¥Ç!ô[ø¨`G„V xï•$õ@îÀ’$IªÌÏ%¾#µHD Ω°ûÍÀÚ™yÞ¢áÕÂ2sNflx¥Jdæ£ÀŸÛêBÇ\Ô÷2ó¾^S’ÔbX’$I툈½€]Ë4÷~ÔÀ徬\A¿ó€2óÕ®--p20‘o­® pïÕÏ[°®$©Å °$I’ÚP|…íÜvº}¶ø:a#ì]AŸK2óÈÌ\4`b‘]X!¸x8È{¯$©gò,I’¤¶ |¨‚~?¶.×+s3ó­6ú, lÕÎ:ïß© žrk¬ l|Xø…£‘ƒ)ì&{x¸ ã¯í ÿ8%3iq-ïdæÔ×!Ij,I’¤2"bcà[vß*"vÏÌk™c)àh /^ÓÆýÛYç±¶B°EEÄ`[`›â5Ú²zñ?÷Έó€Š«ÉÌSÙŽÀŽð]ï½’¤žÍK’$©¼SØR©EÄõ™9¯xû7ïËÛ¿?ËÌ—ËŒVÁkGÄ*™ùR©Æb`5šBhµ°Jõ/ª7…àmçˆØ.3_©c.©V×ÿÓê"$I­e€%I’TÞUÀ8`Ó û¯ |9"8‘Åާl•2¸‚5ú·FÄÙÀÀRÀ:Àú‚«z«r> ÜÛdæ”&̯)îø[93'´º–yï½’$a€%I’TVffDÜZŰÿm§ý°ˆ8+3Ÿ+Ñ6©Â5>ü²Šša$ð3à ^W(>6p8pð\DlÒ BŸ9À~™9­Õ…H’ZÏW%I’Ú™·76pʾÀË´uö]3FÄö­.Bÿƒ‹!ëóÀŽ¡nìÖÒÂãè¾x(Iêd °$I’Ú÷}`~çûbD¬½èÇâåìÿlà:Àé­.Beƒ«…Ññ•5ÌU™ùóV!Iê< °$I’Ú‘™ãK8eoàä2m¿jà:ÍðñâëŒj ‚«ºò.¬§ÃZ]„$©s1À’$IªÌ‰ÀìÍ5 ¸©LÛeÀ Z§Y>ÛêzšˆX*"N^ íàja'6·ª¦ðÞ+IRIX’$Ií(¾ìvPï¥Øoß>’™¿)Õ!3ߎ­sf«ôUFÕi¡WÏRص7´‚a \|­™µ5É÷:øÞ«’GƒçÏoä‰aIR5Êü3x¾¯J’$•KGP”–®cª^ð;»Pµ)3Û_ªcÍöÌžÞ¥J­QáØU›T“ŠyU°’ÝVP®®NꢗŸ_•™ÿÓÁk¾SêãÌ™3;¸ IÀܹs™;wn©¦™X’$IåÝlPç;fækUŽû ÐØ¿ÎõHàßÀ-ÅcÓ"bp+•í®r{JEÄþÀ…T¶Û o¯NÎÌGšVXsµêÞ«’G_~ù厮C’D›ÿüêBI’¤òJó«Ò:‚¨ªdæ»À(ì«å> ùÀ£ÀEÅyVÊÌõ3ó¨Ì¼nÑ`™9ƒÂŽŸJtö;ººº§€¥*è·à¨à¦™¹W¯ZvïUfN¡Äÿ¾¦M›Æ‹/¾ØÑåHR÷裖kzÖK’$©¼ÿ¥páz=ú'Õ20 ~ §p”l¥w?Íþüøði`Xf®—™_ÉÌË3srK>Há¸c{^©è/@5ÉÌ€¿µÕ…ÿW»uÑã‚ ûn‹ÿ/õqìØ±]‡$õxwÜqG¹¦Ç °$I’ÊÈÌÙÀ© ˜êK±Nu¼™™gfæfÀ `}àcÅÊÌ%‹»«öÎÌÓ3óæJîÚ*¡OñG{n­anUç4 »WÊÌ Z\Ã]¥>þå/éè:$©Ç»æškÊ5Ý™õ>¦#I’Ô}ED ¨sª1™¹WJjšˆø"pI;ÝX%3Ý…Õdq#°#]ÿröÅDĦÀåÀ&­8:¸H-;7.ú½_¿~<û쳬´ÒJ-¨J’zžÛn»O~ò“¥šæ«ºK’$© Å»¨NÐé  IDAT*Ñô"ðjSí›7¤¨&ˆˆÀWÐõNës0بí¸Z`6ð¹V‡WE·Pâ^·9sæpöÙg· Iê™N=µì¦÷»2óe,I’¤ö]Ná?€×ïkÇW1G?np]‹/Qõ+Ó±>…‰_®‚îgT]”j’™ÿèâ—³—•™ÿÊ̶ºx?¤¾¬TÛÏþs&L˜ÐÁIRÏsÕUWqÛm·•kþ€G%IR·½€Q¸?*(lEx43gV8Ï.ÀÆÀÏŠ/ö½)\ž^ÍýV;eæMUô¯XDìœ |63® ÿ^<PÁ×gæ®õU)u>±ð$°X¼á†rï½÷Ò¿ÿŽ.K’z„—^z‰ 7Ü7Þ(ùÈñKÀ‡3sŽ–$Iêv"bI`wà Àh`p™®sk‹2³­WßÚZk/àÏU y˜Â½?¥^¬IDô~ |£øipXf^U¦o`àD ÂWb2…º_ª³Ün-"––Ȩ̈Z]‹ª¿+Õ¶ï¾ûrÅWЫ—X$©‘¦OŸÎèÑ£ùç?ËnÊýVfþ¸K’$u#±p,°åC«rn¾QmðÜlVŰ2³ä‘¥j﮺’³EÝAaÛýCÀ Ño ì¬^Å2³2sl}Õv_1˜Ân¶ã€Ç2s«—¤*EÄrÀÀ°RíŸÿüçùíoKß¾};¶0Iê¦Þxã vÛm7î»ï¾r]¡ð›góÀK’$u±ðÿ(3½ë˜j6ppfþ±Êõ?Iá©J= ¬S¼{§f±ðk`H=ó´c6°of^×Ä5º¬E‚«…ƒí3óÖÖT¥ZEÄÀï˵o±Å\~ù嬾z5ù¯$iQ÷ß?ûï¿?Ï>ûl¹.s€-¾/Ñ=°’$©ËŠˆe#â×víG}á@ಈ8¢šAÅ âïU ù0ðÕjÖXXDô‹ˆó?ÑÜðêe`ëÅEÄàˆ8xžÂb‹îÚ9¥ã«R½2óÀEåÚï½÷^Ö[o=Î8ã f̘ѕIR÷0yòd¾õ­o±å–[¶^|oÑÇ>Ü%I’ºœâ±½/ÿ ,Ûàéß¾˜™×VYÓ&À8 —ÄWâu —’N¯rÕ€?›W3®{dæ M^§KicÇU)îÂê‚"¢/…»ñvh«ß2Ë,ÃAÄç>÷96ÝtSz÷®7?—¤îiÖ¬YÜyç\~ùåüñdÖ¬Yí 9'3Zô£–$IêR"bY Î'›0ý£B›š.àŽˆ+}«rBfžVÅü½ ª­­ ³€geæœ&®Ó¥/g?8 Zá°±™¹móªR³DÄ àj`ûJú2„‘#G²êª«2lX{¹¦$uóçÏgÊ”)<ÿüó<úè£Ì™Sñ/)~|³Ôc7X’$©Ë(Þuu5°F¦|©ÚQ ‹ˆÿ–¨pÈÛva•|7ºÌ«R¸pþ#ÕWØ®›(¼öótæî’ªÜqµ@×'eæƒÍªMÍU܉u1p@«k‘¤`>…ÿß<µ\ïÀ’$I]BD|¸›Æ‡Wó½ë ¯2ó à7U _å“€m€ÿ«f\;îvÏÌ ¯ *¸ãª”®6ÍÌÝ ¯º¶Ìœ›™_§°3Q’Ô“Ú ¯ÀX’$© ˆˆ-(ì:Z²Ša“€¿÷SØé´ °3°+ÿùM¼·3ó¯ ¬õCÀD`@…CæRx‘ð™*× à`à,`™jÆͧ°ãê'™9¶†ñÝVD|8Ê &…'gæ#M+L-k?ölm%’Ô­¼GáÈà ™9µ½ÎX’$©S‹ˆå€‡•+2‘ÂŽª«2s^‰ù>\N!8Ú#3oT­ ­q&pLCþ˜™û·1ßîÀß3s±] 1؟«†›ÑöûyÀ‚½1™ùr5öñ)*{UÒ£‚=LDl |Ø èÓâr$©«š \Já¾ÍŠÏK’$ujq …Û38…Â/†æ¶3çŠÀ¬ÌœÖ€KÍ¿4ð °T…CØ*3ï)1×)K<73¿×κƒ þ¬þÀãÀÀã™ùN¥=YDÜAá¨f)W=\1X߇ÂK…ÛPùýh’ÔS½ Ü Ü\›™3«ÀK’$uZ±…KÑÛ»·óe`Ï̼¿ùUU&"Ž*~a ˆ/ß"¢plqŽÞŽüÎÌ;]«Wf–Á•JŠˆ•Ë©ü±$uW3Š?^žÌÌ)õNh€%I’:­ˆ8 8ºn¯Rؽԩ.ˆÀSÀŠU »¸ˆÂѤƒ€O,Òþ4°A-¿k©ê-´ ËàJ’¤3À’$IVDŒFµÓm̼¦Îu†-þ úRÇ^îËÌ·j˜÷k.'m¤ÿÉÌo5xN•ÛßÄËÙ%Ij9,I’Ô)EÄò¤h£Û[Àr¥.kocÞ%€[lJû»¤Þ¥ð â)™y_kõ¢ý® |:3oiàœ’$Iš–$Iê”"bC áO[fR°{o¡yúP©FÛR8–7°Æ²æçGgæüJTñ¢]5žÖÏÌ·<¯$IR§d€%I’:¥ˆØ¸½‚®ß]ð¢`D ¡°ãi[ê¬Êù5ð•¬ðRq-°[ןl™ã8g·KC3óùV×"I’ÇK’$uJ±ð »Ï^¢ðò×ÊM+ꃎËÌ3+é#€G€%°î\àsõÞûÕÝDÄ`àpà8àžÌܵÅ%I’¤2À’$IRD Þ¦í;°Zi0ªÒ×#âHàœ:לN!¼º©ÎyºE‚«a 5}<3ïoMU’$©ÑzµºI’¤R2sðd«ëhÃà´*úŸÔsñú?M ¯ "bpDGá>°3ø`xpbÇW%I’šÅX’$©.ÅWý6Ö†óŸû¦^&Q^ž®ô¾¨Eæ>žêB¢Ž6X-3'WÒ9"–î>ZÅs€³€S3snõ%v/mì¸*Å]X’$uX’$©jÑØ8ØÒÎIÀÕÀ¯3ó‘*ÖYxèS[¥¢â»°"bMà¯Àºít}ÂÏÙÿȨ̈ꨯ[(^Î~$p0´Âa7dæ.Í«J’$u,I’T±…v¿¬Pã4·Çfæ?+\ólà{5®Õþ–™;V3 "§‡òÁ]Ds‡¿¿ÌÌVeU厫¸8)3lVm’$©ã`I’¤vED?¦öàjaó€ (Y³ÛY{Ià>`TÖm†ç2sÍZFD/`U 7… ŸÌÌ9,®«2¸’$I 3À’$ImŠˆåßUí2ªÐƒÀÞ™ù|;5¬Ü¬ÞàõgcÛ)ÜÕ5žÂѽWKW0Ç”Ì\¶Áuõhq(ðßT~T0)·<¹š#ª’$©ë0À’$©‹ˆˆ­)ìÐyµ×\¸‘Â.¡fy øTf>ÞN-+—ÛÔ±Ö»À?(c¼øGf¾[f½½€?W0ç“™ù‘:jÒ""bOà/tuÇ•$I=„–$I]@ñÒô‡Û2óðZs} ;“*=¾U×€­3sb;5p p •)|‰B`u_ñ?ÈÌw*)¨x¼ïuÚß…uofnYÉœªLñïóýv•bp%IRc€%IR'§PØA´nf>Ùäõ>D!ðY¹™ë,âi`ËÌ|­’Î1’Ân¬u€þÅϯÏOQØUÑ\m¬ñ$0¢nÿ“™ßªg-®Ì.,ƒ+I’z(,I’:¹ˆü‹ÿ„4ÊÌÏ5q½ÞŽØm[åФp§Ôà:–¿ødf¾WÇ Q<²ø… ÖÛ²KfÞÐ%õ(‹ìÂ2¸’$©‡ëÕê$IR».à?áÀ>±Y×û•‡W \Cá‚÷¡™9è lœ ¼UåÚ['U9¦YΡýðjp[ÔÒãdáwYOÆeæn†W’$õ\îÀ’$©‹ˆÏ—•hº-3?Ù„õ–¢p”o™ ºOÍ̛ۘoð[`÷*ʘl›™wU1¦aŠ;ÎŽ® û9™yT“K’$IêñÜ%IRˆ‚ý"¢’‹ÇŒ ü´Lóv±Scªû€Ã¨,¼zŽÂ¥ëeÃ+€Ì| ØøU5ô~Y¼¸¾f1("NŽˆŠ4·¿PYx5øI­õI’$©rX’$5Y1hz¸ølC ¬ØV{ñ¥¼Fúz}æŸÏÌç+™°xìpàoUÔ±plý? "ÖÆ'7FÄ íô_""ž ¸UâèÌ|µÖ»«ˆXªx˜$IRÃ`I’Ô$ñ‰ˆ ÜlTü\ѱ¿ˆØøj;Ý6¾P{…‹­¹í¿¸pEfÞWÍÜÅKÙÞ®bر±R5ëDÄ!^P\»øéÀÃñµˆ¸Hßáq,…W ,Wá2¿ÍÌ_W[[wƒ#â8àYÊï”$Iª‰w`I’Ô`Å èt`—ͳa™9»ñKP¨Úó°vfÎ)1O_ !Ø-™9¡‚ºΪ`ÍÏdæß+è·èü+W[U1ì3³’]aéÿ¾ÔF·ÙÀ —¯V©¢–nöÈ̹5ŒívŠG4ކ?Ï6Ì̵¬0I’Ô­¸K’¤Æ;•Òá^ܲñ_¥²ð ` áÁû"¢WD<œœRá\›VØï û-¨g©ˆ8•Â.§j«7€G+\c$p?m‡WPøù߈Âk‡µ„W—{^}`ÇÕóÀü'¼‚¯1oIa’$©[2À’$©ñni§½½c„÷Õl‘þ¯âëDÄ®À–5‹í{GÄÆÌó‘ úÌ.^ÌÞ®ˆèߣðªá€A•Œ¦'Ã3óü Ö9ˆÂÏÙÈ ç¯Å<àØÌ<¨­Ýs=A;ÁÕÂö­æÑI’¤¶x„P’¤+þKûø6ºÜ›™m+€ýªXö2`u ÷=•rCf–Û¶`Í—Jîœê×Ö¤âÈ/?V­`¾æ?ÊÌ×+¿¾Rŵ˜|%3«¹„¾Û)sT°=ÌÌý›W•$Iê) °$Ij°ˆàU`ù2]Þ–ÎÌémÌñaà1 oKÛ*3ïncÍ)ÀÒÌ3²ÔZÅ¿î½€ÓøÏꕘü893_¨b±7…ð®‘?O ¼ œ œ”™3›0—Pcp•Àõ~îlVm’$©çð¡$I –…ߺµ.KÛ´3ÇÓÀ/Y…`©-ïU8ϧýÛSxùï**¯ø30*3¿\mx™>KáØa#Ýl’™ÇôÔ𪊣‚ Kà:`ÓÌÜÍðJ’$5Š–$IÍÑV€°]sœJcƒ™Ñ±Xø´7*œçÀ‹ˆ¿7Sù%ðû<3÷©ð…Ä!Å@‹ÉÌ›€õ€zøÍ§¨m–™ŸêÉ/èEÄ×€¨.¸ldp%I’šÁK’¤æh/Àjï"w2ó5à¬Æ”ó¾cÛh{¾Â96‹ˆC#âO.Oo+[ÔýÀ§2óÓ™9®­ŽÅÐêÀˆ¸x6^oÌÌ2sG Go¡ºKð'PضN1Pk³®â`hýÞqµWf>Òܲ$IROåX’$5ID< ¬Q¦y>°|fNigŽa.Xg9³€Ÿgdæ›eÖú m\õxŒÂK„Wg¿øˆˆ!ÀîÀ¾Àg€þ 5”™—V²XD|ø4…]aóŸŸ¿~À3B«Ç€ÿ«dXO½G–éâW’$©C-Ñê$IêÆn-ÓÖ MáÈÚb"¢ppõ…Wï¿¡pAúKíô½¿ŽuÊyžÂ_Ã¥™9¯T‡vB«…-[颙9˜XU¥z_f΋ˆÓKmÂàJ’$µ€G%IjžªFÁžÀx áÁð×NàOÀº™ùÕ Â+€Û¨ü"÷ö¼|øhfþ¶\xUô$p)…«\x…WÕq.ž(þ±—³K’¤–2À’$©yn¥í»˜J]äÀÉÀºu®ý÷Ìü\q'REŠG ï¬sÝ·ÎÌs3sNc*}}pZíe©ZÅÐñT¼œ]’$uX’$5If¾BᮥrÖ‰ˆ•38¡Ë:"6¯aÜe5®7øo ÁÕ)™9£Š±S+ì÷zõe©™ù/g—$I–$IÍuK;í¥va]Ký÷QðãÆ]AaU5îÖÊÌ£3ó֬䎯¨anI’$uX’$5WÕ÷`_éûAÖ;V3 ¸sê7U®óQ šW%€u*è:±Æp¬Ç‰ˆ¥"bùV×!I’ÔHX’$5×@[˜/`dæß‹cëõã⋆Õ8 ¨äîªV¤p|°»C+èw}ó÷1("ŽžNou=’$Id€%IReæ[À?Ûè²FD¬Y¦íøEþ|… h\%lì_EŠ/^XÍàЈ8ºš±ðÓ ºÎ¯¡žc¡àêà `𥈨õKI’¤NÇK’¤æ«ú!@fÞ Ü¼ ü‚â=SÀqU®JDô©rÌ—ªsfDœ\ÉŽ¯ˆø0…ûÁV¯`Þë3ó©*kéöÊW ôþ_K “$Ij,I’𝦫è;ÀÚ™ùâÎ(2ó6àÿªXÿÃÀW«èOf¾]\»œÜYîî­ˆX9"NÆ£*˜sðÝ*ëèÖÚ ®æ.,I’ÔmDážXI’Ô,±$ð&ЯL—W€U²ŠÿSŽˆ)¼Êy‘™3+]£¸Î_]«³Èš÷oPØ´.°Qñ+u|fþ¨Æõ»•ˆ|“¼r¡Õ¢~™‡5¯*I’¤Ža€%IRˆˆ;€mÚè223'T9çÀ~U 9!3O«r¥#ª× öÌ̶.Áïöj ®’ÂÅ÷'eæƒÍªM’$©£x„P’¤ŽqK;ím#,ç ÷cUêØˆX¾š2óM`7`Z5ãàÀþ=9¼ªâ¨à¸Ø43w3¼’$IÝ…–$I£áVf> ü¦Š!ƒ©þx2óqà`nµckt ðéÌ|§ƒÖët"âpª®Æ\I’¤îÈK’¤Žq?0£öÑ•¼ÞWÂ).:¯Ôá±Zµ‹dæõÀÎÀÛÕŽ­Â|à4`¯ÌœÞÄuºŠjw\í•™4¹&I’¤–0À’$©dæ»ÀmtYذ†y_ΫbHà¤j×)®u ð `R-ãÛ1Ø*3OÈÌùM˜¿«¹x¾v J’¤ÅK’¤ŽÓŒ{°~L­¢ÿA±n- eæ¿M«k_«À‘À&™yoƒæìò2s.pf©& ®$IRd€%IRǹµöš¬Ì|‹ÒaG9½)Ü­T“Ìœœ™{»µYûð `ÍÌÜêz$I’ZÅK’¤öi£Ë'2óžçþpACÆÏ¿ˆˆþÀï€c2ó…*jXN!HëÌžžÉÌÙUÔ&I’$½ÏK’¤TAÈtBfžVãÜ}€Ç€U ;,3½È<€1vþÜ |¦TÈ%I’$uïÀ’$©c5å,xÿ¥Ã«vVD¬µàO"b$p…ð Ç¿^kM=QD,K·ºI’¤îÄX’$u°ˆ˜|¨Lól ÷`ͪqî^îšÚ¨ŠaïwC€Í)\ò¾°À™ùL-5õ1ø&pð»Ì<ªÅ%I’$uX’$u°ˆøpP]¶ÏÌövjµ5ÿv´¿Ó«ZwÛfæ¼ÏÛå-\ +~ž ŒÈÌ—ZV˜$IR7âBI’:^{áÒvõLž™·×Ô3G ŸŽhðœ]ZD Šˆã€€3øOxÐ8º%…I’$uCîÀ’$©ƒEć€Imt¹73·¬sµ€}ë™g³€ 3sbçìrÊì¸*Å]X’$I â,I’:Xf¾´mƒë\ãIàçõÌQÂcÀäÏÙe´³ãªwaI’$5ˆ–$I­qKmKÛ4`“€ç0ÀƒÀŽ™9­Aóu5W \ü¾™µI’$õX’$µF{÷`}²Þ2s:ðe aJ=þ ŒÎÌ7ê­©+©3¸Ú43wËÌ›Y£$IROa€%IRkÜÌo£½î  øšá5Ÿ|ølfÎhD=]ED|›êƒ«1ÀFW’$Ig€%IR w3=ÒF—õ#b™-wÙ!UtB±TDœHåÁ@c€?7³6I’$u ,I’:ïÁZÄ"¯ žLåÁÕuÀ¦™¹Wf>ÒÌ%I’Ô1–hu’$ €Ç€W€•Ê´/ßµ´\D|ø•…VP®®N6´’$Iê~Ü%IR'P¼ ý¶MÏ_FwhA­7w\I’$©ÈK’¤Îcá{°^¤pùG3ó—Å‹Þ{’ŸSÛh_8¸Ú-3옲$I’Ô X’$u7S€ïÉÌs3sN‹kj‰Ìœ œ_ª ƒ+I’¤' '$IRg3sf«ëè "b(… ܇R®®N2´’$Iêy °$IR§§[ß7¸’$Iê¹ °$IR§‘þbE’$©Ç3À’$I’$IR§æ%î’$©¡"b©ˆX²ÕuH’$©û0À’$I ƒ"â8 ¯Ñêz$I’Ô}x„P’$Õ%"ߎ†?OÖÌÌé-+L’$I݆;°$IRMÚqõpÿ ¯–oIa’$IêvÜ%I’ªRfÇU)îÂ’$IRC¸K’$U¤W¥¸ K’$I ±D« $I[D, |‡öC«¸¸±YuI’$©ç0À’$I%UqTpa \œ”™6«6I’$õ,X’$i1q ðÿ¨~ÇÕÉ™ùHÓ “$IRd€%I’JY™ÊÂ+w\I’$©é|…P’$-&"Vž–,ÓÅàJ’$IÿŸ½û“¢Ê>þ=3„!ç( ÊâK0€²‚`@\2Â*+¬®kÚUÌ?Q0Gv]AAWQWŒˆÐ5 äfÎûGõè0SÕSÕ]f8ŸçéGé{ë֙®Ó÷ž›4¶ ¡1Æc PÕÍÀx·&àU ªþÁ’WÆcŒ1&l–1Æc\å›…e3®Œ1ÆcLÊXËcŒ1žDd иÙWÆcŒ1&U,eŒ1ÆO"’¡ª9©ŽÃcŒ1ÆÝ,eŒ1ÆcŒ1ÆcÒšq7ÆcŠ©$"¥S‡1ÆcŒ1a±–1ÆSLˆHy¹ X Ku<ÆcŒ1ƄŖcŒ1Eœˆ”®nªDîþh¤ªûS˜1ÆcŒ1!±XÆcL•gÆÕzà~~K^Ô.MI`ÆcŒ1Æ„Ìf`cŒ1EŒÇŒ+76 ËcŒ1Æ 6ËcŒ)" ™qå&!³°Ddšˆ¨Ï[ŸcV0æÄ°&st‘^Ϥ:^cŒ1æhT"ÕcŒ1&:©\ ü“V¹˜ ¼“¨¸Œ1ÆcŒIK`cŒ1i*ÀRÁ¼˜Ü©ªË›1ÆcŒ1Éd ,cŒ1& ‰È-À Ÿq5JU?KX`ÆcŒ1Ƥ€%°Œ1Ƙôt<þ’W6ãÊcŒ1Æ{VÄÝcŒIOw£´+ð*ÐFUÿ`É+cŒ1ÆSœYËcŒICªº˜âÖ„%®Œ1ÆcÌQ&´%„"R¨ ”²Â×cЍCÀn`‹ªîHu0¦Èº¸(…-4Ƙ´&"•ª@¬T‹1Æìö›Uu_ÆüÆ*"'çg-cÂÈcŠùX ¼ ,–ªª¦6*S¨ê:™4n¶Ä•1Ƥ pð{ 9‰ò(v IDATP=¥AcLš‘Àç8×C¯©êŠXÆ ”À‘,`(ð U,'4Ƙ£Puœdÿ™ÀH`­ˆLžPÕ­)Ì—«jNªƒH‚l`µÏ¾?'2cŒñ""U+aÀ±)ÇcŠŠz‘ÛùÀ"ò0˜¬ªûý"~&ˆH0 ÔŠ)\cŒ1ùíkZ­1É "Ó€‹|v¯¨ª»1ñ‘ÀO>»OTÕa‰ŒÇ¤)Ü \”Oq8ÆS\lÆù‚ÿ?_ØZÄ]Dïã°ä•1Æ„©œ¬¸L0…$®òúhÏ,,©ü苳U{Å€Cä_K€©ªún¬±ä‰)ô"î‘ÄÑ3>Ç\¬ªOúì[(9ôZå3P>¿y\yªêëëÅX‰Hm`0Nb´þfÉoÞ¦‹ŠÛìN9ø3ÎïGõBºo^WÕÏó“ô"UwœçôüýnÞf3TuKŒç¾'ÉîÇSªúV,çq9oœM¡üX©ª£Â8oP"r#ð@”vÈí·ßNÓ¦M“™1Æ}ï¼ó·ß~;o¿ýv´nÙ@U}-ïG$°"ßê.ÇãÃxùòåù׿þÅСCãÚcŽf‹-â’K.áûï¿÷êrèF²Á„'@â*¯KT5ð,¬È ›[qfå•z|_·ªê«± VuàgŸcNRÕK}övζ8E™Ï§Yç>lƄȊ<6#q5Yq µ ¸ xAýlCÆ"IÇGq@AåÓªúcd¼¤&°Dä"œMû,W_}5»wïöê¶h­ªsïÈŸÀšó PõêÕã7Þ yóæ!†mŒ1G¯Ÿ~ú‰îÝ»óñÇ{uY 4·å"©'"•€k¿U|¦83šGªêÏwÎvµ Çô0LUw=°¨'°"³½.Åg­Ï¾.SÕ¨_+ú%"âÔ$­ÆxoÃUumˆc&ˆ\…“,,çP?á$˜ç'+%"õ€‰ÀÙ±ïáœÚ‰ÓÄQg™±Ÿäø!à˜ I2—sfâ,k®ã£û^ n,ïOñ‘2ÀJœÍU hÓ¦ óæÍ£F01æèµbÅ ºuëÆÆ½º¼¡ªÝrÿñë‡6é‹Gòªnݺ,Y²Ä’WÆ¢š5k²hÑ"N=õT¯.ÇãÌô1)’oWÁQøO^ýh«ª½cH^ æ‘Øä8K®Þ\È5D¤ð0œð“Wą̀Y "—Ä3ˆˆdˆÈ#À „›¼è|,"Ej ‰ˆdŠÈxà_ÄŸ¼¨ ¼ùK89ø˜p“Wà,œ*"‹HI?¨ê/8Il?Jâ,[×ÙøK^ÌLvò*âf¢$¯Þ|óMK^cLˆZ´hÁ’%K¨[·®W—óD¤Wî?2à×]6\×£gee1oÞ<Ž;Îõ½ÜcL*T¨À«¯¾íM{Dd)™I2ù?`=p?þ– *08IUÏVUÏ©uQÎyÎ’ Ï]‚CÖx9ò9 Ø‹ÔKzø >U)`²ˆ\]hO‘™*“ë *ŸjÀë‘Ú§i/R¤}"Î2Ê0•ÆãÌÆK9˜ÔJài®ž¿ïãŒ=$†xâ£@áÞD‘ªÀßÝÚŽ9æ^}õU*T¨ä¨Œ1¦ø;î¸ã˜;w.YYžUîŠ|øõ›Ç €fn=ï¿ÿ~Z·n~”Æc¨U«Ï>û¬Wsœ‹“|-ˆmÆÕ§±œLDަ˜YAÑœAøI´#"¿ÃI $kÆ™ŠH§ŽýNaïD+¼™”îîâšÕ…÷%hlD¤ Îrâ²…õ A_|&Tu ð¥ÏqO‘±Y†í·ŽÖJU]ë¹âðWœ¿¹G¦L™BÍš5S’1ÆN>ùdî¹ç¯æ8ýú!ÙuË–-¹êª¨»ÇcŒ A×®]éß¿¿WóŸr¿u0I5§Øs4ÿÚÄ:ã*Ÿ‡ðŸ0 ÛµÅù5&"倹$~Yf~%€"âûÊWD.®H\Hd3#;¦%éŒSÄ>¡§IÈ "µpff†±äѯ¡"â÷5dV< Äþø R1ûJð˜…7`À+ØnŒ1IpÍ5×D+[u)8¬«›p¾…;ÂŒ30`@Â4Æó›•+Wrâ‰'â±9Øéªú~²c:Ú‰È Àíá€[BHZåž§ ðÁ.¢>Áù~¨„S+©NmŸ š¨êÿüt,jEÜEä1œÝƒÊÆ)ìý3N z¸ÌÐðá_ªzMaDä8œ]"˰ 'ÖÊ8³ê›c–ª^𘄑RÀçĶìóN½³õ8Ïå1@b{óò]Ä]D^Ä™ÄAàC`N½>p:Áv#Ý 4SÕu…ÄWøÉ¥€ªš ŽÜó,:ú躧`ü֠爇ˆ´ ìú+"¬X±‚fÍ\ªcŒ Ù /¼ÀÀÝšµ2€N¸$¯jÖ¬Iß¾AÿÞcŒ‰UóæÍ9óÌ3½š]7Ù0 —V˜3®òÿäÕj 7P[UÏUÕ¡ªz¹ªTÕ.ªZ h‹S>ˆvû "r"Á—á~S低ªÖUÕVªÚ g†Üø/~ëòÈÑÂüƒ`É«÷q>ÇÕSÕ?¨ê0Uí«ªM–{ ôq¹c¢ &xòj;p ÎïHGU½XU/QÕ.8ÉÝÀšã,@DÎ#Xòj;Î{AÍH܃"qwƉû& ÐdpDYàáÂ:©ê6`–Ï1ëCzi„ó{ãÇKÉN^E¸þíܹ³%¯Œ1&‰úöíK­Z®å"K2€ß»µöë×%’UCÖc À…zN€ðÌl™ÄQÕU8w‰J\å.oó[Hû œZ[³£Í‚PÕpjxWsQ\·¾‹`uÅNTÕgTõˆaªš­ªïªj?`pÈ瘥€¢u‘Ó€â|h¯ªo«j¥®ªú…ª^€“ôpÖéâÿœ?Y¢>n.–MUõ_ªº=£ªîWÕpf*N#@7‘%iw8ä+ µª>â¶ûžªîTÕÓpffùÑWDšúè—èe„ƒñŸ OúòÁ×ë¡(“1Æ$@‰%èÓ§Wó™x|`íܹs‚2Æã.Jâš\( %"q•Güͺ9 PÕ-~Ug-êõ8K‰ü8Æg¿"CD)úéÓ½ª:BUÖQU§—ûÂÈr8/ 0Ö“ªzƒz¬7Î+’ôåsÜ."Ò2@ Iê™}õÐUU7ÖQU÷â$cž1¼Âœœê³ï@§Â–û¨ê—@WœÙZ…àZc.ÅIŽûÑKD*úì››Èó»!Áÿ€%~Ç™]cLšˆòÞÛ<ø[K«V­1Æw7¦L×R$µD¤|²ã1 ª‡<þ+8õ(ÏÄ)Þý$ð6ͤÈÅk±·~wóªdì"âOøŸ}õ)ÎNw¾©ê$ü_pWº¹5ˆHUœe¡~¬ÆYfÄ]8Ë ý¸8àØ‰Ô/@߃À`·YW^"³¯À©í6_5²"®PÕývVÕoñ‘˜Š(,qšËï,¬28Ùý:ð³|ài?IÙ°Erê–-[–ßýÎõ2ÉcLµléù]Z£ ûÇi%y·Ç˜¬|2JÛ`90¸øÖ£ßùø/ÐýOUÝç?<ˆ,1¼Ïg÷ I£Dë ï“ªêwѯ"Ëõnz\4"Rÿ¯½¯pv) j:þêxUÁßã8ØãóÜC|ö Ò÷ Á–<‡ÉõZ¨~ýúddY}lŒ1& Ç{¬WSÕ \>°–,Y’Ò¥ƒltbŒ1&,*xnïÎYÅžˆ”‘›p.ìz¥:žx©ê&U]¨ªcTõë‡Ùé³_VŒã§%© øN¾˜ã©þƒ³ÃÝw8‰ˆ» qv,¯ª'«êU}HUWzŒqžÏs)±/y{ ¯…ã#E·S*2+&Èr€§ã8Ý, PR°§á‘qñ|,³Ž"ÉÖ}vïêc¼À ŸãágS 2[kvþšsIäš¼/_¾8æô1&ýeeeQ²dI·¦ @fþ{33 ÜeŒ1&I¢¼Û›³‡<‰«uÀý8³ÕF‹ÈQûõ¹ˆÔ‘ËðŸ)nß\µÁáè…ªê· û"Ë4+¨êïTµ·ªÞ®ª³TõË3ºN÷Ùï›X/ò#±¼ë³{ûXβfø_þùE”ä`¡Tuðz¬Ç»²£§ßçÄßå«|öó»ŒÐo]«^€ßzY©*Þ[ízÈcRÇã=8ó¨ý`oŒ1¦èóH\åjôLI`) "D¤³ˆŒ‘ïq.HOô9DqûLÐ:@ßÀKÏò º¤//©ç³»×D¿¾ñÙ¯M´F "[C¾åß±ÏÏîy¹> Ð7‘cä òÚó»Ä×ßçó¤È²Æ¨"»—.÷9æ`Ë´ýîXøð¦Ï¾ÆcŽb…þ13ÆcÒM¤ ý•8u…¢Õ-"s"5€ŠÈ…cSœ¥J§ãÌøhF|³ôŠÕc„ÿ¤@̳wBÐÿ3Å|(÷ð“Ï~'Ò^ÿKäüÊ¿{E]1ý&]¢Y¹ {üòŠç9õû|–â/:ë£_#œíï¸5ŠH]àlŸñ=“ŠâíÆcŠK`cŒ)2$®råÎÂz9‘q%ZdöÄ)@ÇÈí Â/ê_ÜXõôý%aQ.H¢æ"¹(a‘ü&H&Qjè»9„óÅZcÎMçt[’ö›hŒ¿Ötà!üÕ\¼În–~ꇀI>úcŒ1–À2Æ“þ"ˬ®þ†ÿ™ Ì!Ü Ó¤‘Ò8É· qêX•Kð)‹[+H‚ow¢(\­žÛKT@°M+ünTÍŽÆÈUdŸSUÝ%"ÓË}tï/"×x,¡õ»|ðUU #iŒ1æ(PÜê]cŒ)FòÔ¸Z ŒÂòê?@ÛHQíT. LDÊŠÈ-ÀZà /‰O^AñK`å_’Íž„EQ¸ q&K–ˆ$ã5MMÂH@†’Ä‘’¤çÄÕôõ[̽.uEäœÙ¯~¤²x»1Ƙ"ÆXÆcÒŽ8nÖS°8»f'©êÙªúq"cLé|ÜK°%Ta(n ¬Rú¦2á•ÂsGv« ‚<'®{m´ç=$^éú|úž‘¨ªË}vw›iåwöÕ:`¾Ï¾ÆcŒ%°Œ1ƤŸHAßvÄ6ãêÓÄE–8"2XÔa¸l`)p;0Ëç1Å-u @ßTÎ6Šyà 2*èÆ,¶,üÓ&]ŸÏ ]ð? ««ˆü:»KD2q–=û1±¸m°aŒ1&±,eŒ1&]$úŒ^ÚÕW¹D¤ð±ÏÚƒ³ ýhà\ ŠªvPÕ»5>Ç(n»€I$¤2ï΂‰’êepûô-Âù*…0ªz˜ÔÖTóôù|uÁJ=òü»þffÆdŒ1æ(—ê'ÆcŒ+U]&"¯ççoæwªê²äG®ÈΊSö79g÷¯E‘Û‡ªê5cÅÏN`¹c'?è›ìåšyI`½¼¨@òÙ–¤óx òü…1k1Ì]=·ã/©vây£y/HgUÝ#"Sqv}-L~ÛIp€ÏS¼®ªƒÄdŒ1ÆXËcL: tÃYÚS¬Wy\‡ÿ ðƒÀãÀ“ªúÏcü.¯*n ¬ ÇMEá‚$Š>UÕ›IzÙ ïq!œÏoÑq?¶õ|ô+ü_dÖV:‡¿ÖÙ"Rg&hŸc[ñvcŒ1Y+Å~üñGÎ8ã rrܯ†Î-·Ü’ä¨Âµxñb† VàþŠ+²|ùòD”Þ¦NÊwÜQàþêÕ«óÁ¤ "cR'2 ë5œäUqK\åÖ‹ù«Ïî‡ TuaÀÓø]WÜX~|gKD.+€/"·•ªºËÇá_8UÍÂK„-@Ø¿‹ùŽ«Û2„óŸÂ¹Vá/!–Ô X².iTõ Y ´/¤kiàœçÐÏn‡ß¯Åž1¾Íœ93êõÔ´iÓh×®]Bcˆ÷óý‚ ¸âŠ+\Û¾ùæ22¬29:X+ŦL™Â·ß~ëÙþØcqýõ×SªTÐÚ›écïÞ½¬^]ðshåÊ•SMúÛ¹s§ëãµgO*wy7&¥z«ê¡T‘ gµ|ö},†ä@EŸýŠ[+È7$E$#Ž‚Òç#·\*"kp’Y+€Ï#ÿýŸªfÿÚIõ—H??³ˆN‰1¾P©ê<œÙ‰ôE€¾íD¤ŒªÆS@ýœ8ŽÍïcü2?§–_ºGá ,pf^­õ9æ¤4žufŠ¡§žzÊõ³u®'Ÿ|2á ¬x?ßïÙ³'êÏ`ÌÑÂRµ)6yòä¨í›7oföìÙÉ ÆcÒP1N^´ ÐwjŒçhãqEÝG8…¢ý¨¿‹ôD¤4ÐÕ­ 8è ܆SûcÜw÷ó;›éX9>Æ8KŠHK)*߈­Å¬Ò@÷XO$"­“b=ÞE %:Þň4λ`‚̶úè× Ës€gâŠÈ˜Ö¬YÃÛoG/8sæL~þ9HÙ=cLªX+…Þ{ï=V­ZUh¿§žz* ÑcLxD¤Bªc("èû}ÐÁE¤ÐØg÷’AÇOgªº ²îúêOõ ŠÏ¾oªê^—ûÿà|Ð7¯³Ï€Ý"ò…ˆÌ‘[E¤‡ˆ/"㸠¡ª ¼àqœî¦8Žuó1þwQ$"YAOy¾Þ~‘Ddˆ<""‘SE¤lÐ1ÝDfµMñѵ,þf.TÕµqeL'NÄy;ñvàÀž}öÙ$EdŒ‰‡%°RhÒ¤I…w©!µråÊGcŒ1ñ‘ò"r°VDÜf¥˜#ùÙ©,WÆ¿ÿ»ºÍ *ê^Ð÷BédðH ³‘™ëqÿ ÀïLÃËE$–ZX—Gþ[§>Óà`N½°"ò¾ˆŒ‘Ž1ŒŸA–Ö&"~ ŽADzƒM¤ö™ßéó5ùí¹ âlœ~u"ÿ¾˜ˆ3ûp—ˆ|#"/EÞ“ã1§a¬x»Išœœ¦Lñ“…±cÇzÖ$6ƤK`¥Èž={xá… Ü_­šûLðñãÇ':$cŒ‰YžÄÕ:à~œ-éG¥6ª"!ÈtÝ‚ ,"µ–\)*KË‚˜‚³s£_3#ËÉüœè³ï~œdQªú#ðºÏqª“ƒÌ˜‘ö83Å¢)œ†3Ãk·ß±ìeœíüzDDN÷ÛYDN5íbr€¾÷‰ˆï]E¤Nò1š àw8Ëúˆ¥Uýx'ž1"~^ ac|Y°`ëׯ/p¿ÛõÖwß}ÇÂ…±”™4Æ$“%°RdÖ¬Yìܹ³Àý·ß~;nŸIŸ}öY+âmŒI;‰«\ímV¡~ Ðw„ˆøš…é7ÿâ¡&°Tõ'`Z€Cj‹EäÊÈì*W"RVDÆàÔ¶òëÙH¢ÊËÄcužóSÏJDêàÔOó›ðZ¢ªi±E°ªîÆßòµ\¥7EdHaE¤'°HÔŽ2ÿ ^9»+Ì‘V>û?œ –èëe\cL.æ5 Mš™8±àÛjff¦çŽ„ãÆ…ñ27Æ$’íB˜"^ÅÛ À´iÓøè£Ž¸ÇŽ̘1ƒaÆ%!:“J:urýZ¦L™DcŒ;)\ ÜÈ‘I«üFá\ÈwŸè[ xUDzªêf¯N"Òø'P?`,ўǢlpþ—HVn‘—q–cý€S|úœbï$Øã• Ǽh*"׫êb·"r&NÁl?µ‰rÝ o2< Ƕ283Ô.ÃIº,~PÕÃ"R8\€X¥ªÙ"r7Îò;?êÿ‘û€¸ÕJ‹l ”WU5ÈŽŽ^^Ây_©ãñ <BÆø²eË^y¥à„¿Ö­[sñÅsã7X28wî\6nÜH½zõ’¦o't’g‚-ÍJ“P–ÀJÕ«W³dÉ’÷7nܘڵkÓ³gÏ ,€'žxÂXGf͚ѬY³T‡aŒ«‰«\íE¤«ªZËÝ[8˵üÖÂj ¬‘Ç×pvjœÄÊ™8¶-cŒ¥¢ˆTRÕ1Ÿ–TuˆŒ¦ð%Wù5® )Œ ªú]´ªš#"ÇyMø!2𖈬ÁIoöáÔDú=pJÀ8_RÕ7“Pªº62ÛíÆ€‡¶ç·%‹ÈAœBãÉ4 ¸ÿ;–ÇyÞ&"o«€-8I¹VÀyûö×èïIUˆÈdb/–ÿ–ª~F,Æø1uêT8PàþŽ;R»vmÚµkÇÒ¥Kh;|ø0O?ý4wÞyg’¢ô¯aÆ\vÙe©Ø”³%„)ðÌ3ϸî†Ñ±£S3µwïÞ®Ç-_¾œ?²3³1Æ„CD*‰ÈHœ%1ù— zQœBÆwÏ;Z¨êàÅ€‡UîÀÙaïG`3° x”Ø“W¹‚ìŠX”<¼›¢sÏ‹~U]‚3û+¨ãpjWÂùYÿNðäÕvào1œ;Fã<ޱ*¿ÄÏ®8ÎQ€ªþ„ÿ s•Å©YvÎó9 §–UÐÜ(U]ð˜hâ)æ^ZDš„‹1Qy­v9ãŒ3èÕ«—kûøñã9tÈVº“®,•dÙÙÙžÛ´vêÔ ˆ>ç©§žJThÆS@žW«q.¢ªø8Lqvk£ª½#€·‘8³fÒAóT‘DBo Ì‹y?ƒT5HË8³°’é ÐOU7&ù¼¾D¿¾+èÔ8 £P©êg8KÃÚÅϯé¾l5UýXãပ"2ÓY&Ñ>ùä>ýôÓ÷gddü:a oß¾®Kï6mÚäºôГ,•dóçÏçûï NF(Q¢ݺý¶ÁÔàÁƒ]þùçÙºukÂâ3ÆÉŒqÆÕËÀɪúU]–È‹ UÝ@°ÝƒZ†ÿ:d ­ ”Jªú3p¬eLû€žªZ°&@‘"×}÷UAÙÀpU}3I狉ª®z‘˜dï.œgBf‹ªêtà:’—ÄZ\ªnÓýãO•ë  ?–È2 æV¼ C‡Ô¬Y€ã?ž:¸ö;vlÂb3ÆÄÇXI6iÒ$×û;uêDõê¿ÕŦª®:o'øT»€îª:?–ƒUuИjTm.PÕ"ñÁ"RK¯;°-Äa·ç«ê{!ŽY€ªþŒS—*‘žÆùy ÿ Ç‚ížê&o"kš%²L˜öïßÏôéÓ]Ûò—i2Ä}ÃÒ7ß|“¯¿ŽgÕ²1&QŠí‡ÔtôË/¿xNIÍ¿û˜cŽ¡K—.,X° @ß±cÇrÍ5×$}lj½{÷òÉ'Ÿ°råJ¶nÝÊŽ;(_¾aÆ lß¾òåËSµjUŽ=öXÎ8ã *UªòO“xÙÙÙ|þùç¬ZµŠ7²}ûv222¨Zµ* 6¤uëÖ4jÔ(Õašä D›•¡À<àN›mŠ+8KÈâ}ƒß Ü<¡ª*"sqf®¶iUœY.AërªºYDºâ¿ð {Ï®ŒÌ¬‹YdÙ\¯P<³Ê IDATÈnzâ쎦·?G–…ªú–ˆ´¦§Æ9Ü'À……Ø‹ªN‘qʧ‡<üàfUMèNªzPD&7‡0\ΡEä%`¤ª~¸æ(öòË/³m›{Ž»gÏžGüû /äÚk¯eïÞ#óʪʸqãxä‘Gg:ûñÇùì³Ïøúë¯Ù¹s';wî¤B… T©R…š5krÚi§Q¿~ÐŽc·~ýz>ûì3V¯^ÍöíÛÙ·o¥J•¢råÊ4lØ-Zp '$-“zšÿ–••¥&|cÆŒ)ðX*"ºqãÆý§M›æÚÐ7ß|3)1긙™™Ú¾}{ý÷¿ÿ­999þä|^yåן·ÿþ¾ŽŸ:uªëñëׯ/Ð÷óÏ?×áÇkõêÕ£þ\€6lØPo½õV]³fML?—ñ¯mÛ¶^ÏCKU%™7œ-èóÇ‘ƒs‘~J²ã9n@7à«Â~'=n‡)@}—qgùc±§ˆ©‚ÏŸ»z€1'†ôX <‰3+&–Ç;ïïÄbœ%ƒ‰xMTîÅIRħ_½Sý:á1)S¬þ烟€«€ùÆìáóøgâŒ]pŇð|îª$ñ±?géi¼±ç¿e3&I~-µt‹§mÛ¶jŠž³Ï>ÛõõÕªU+×þ]t‘kÿªU«êÞ½{C‹ë‰'žp=O­Zµ|ÿßÿþ×õó}×®]c¾æÈë‹/¾Ð›nºIO8á_¿¯õêÕÓ+¯¼RW­Z÷¹Ý,_¾\¯½öZ­_¿¾¯xjÕª¥ùË_ôƒ>HH<&ù²²²¼žï‚wZ+1Z¶léú$´oßÞµÿž={´bÅŠ®ÇøMfÄãå—_ÖÆþR±bE3f̯‰¡xX›6mr=þ¡‡úµÏÁƒuôèÑÑ^èž·¶mÛêêÕ«]Ï““£O?ý´V®\9ð¸íÚµsML&Þ?pwÝu—ëñ_~ùå¯}~úé'½ä’KTDÿ\%J”ÐáÇë–-[ÿlÆŸ4K`•çü–¸JÞãžs1=ø¥ßËÃ8Â#FQÆ<§~Ÿ[ÅBâ`¬,Ÿ?sùc ùñ®‚³[Ülœä†Ÿ÷Ãí8µ†F“ôº(S«i"ðÏ8ŸcpêÓ¥üõòcRgÆ·pŠæ{=€ùÀP¯×$ÐÊçëï’ão޳AÆœÝ ý<§ßã,þ#P6Eûm.ËsÀ!Ÿ?C´[RYX«ØX»v­fdd¸¾®î¾ûn×cæÏŸïùZœ4iRh±Åûùþßÿþ·gœ~¾Œ÷òá‡j÷îÝcº&gR@=bºæqóÙgŸé¹çž×{ÈYg¥Ÿ~úi(ñ˜Ô±VŠ}ôÑGž¿dãÆó´[·nºsç΄'°6oÞ¬íÛ·+ÖjÕªéŠ+Ž8ïþýûõüc\ãÖ­[WW®\èñOtëã?.t†šŸ[½zõìC‚¤SK÷oa‰«”Ýpfk4Î×Á©§s:…$›ìÓcÞè \\†3ÓgNqõ.@c@Ò Î*À)8I­Kq–¡^yüg‰]éTÇ™ÄÇ£<Ð6òó_\ù=i”Ku|>â/ym \yíý ôégW;Õqò3Œ‰÷óEž[6ÎlϦ ŽÙXÅÄwÞéúZÊÈÈÐuëÖ¹søðaÏÏŧvZh±¥[kÿþýzÓM7ifff(¿¯•+Wv]ã×áÇuäÈ‘Z²dÉPâ)Q¢„Þyçq%÷LjY+Å®¸â ×' L™2º}ûvÏãÞyçÏ_ÌÑ£G‡çÏ?ÿ¬'žxbX<ô´ÓNÓ™3gz¾Ñù-µyófßS] »5hÐ@wìØ¡ªªÐsÎ9'”q›4i¢;wîôý$2õÁxÎê‹åV£F ÏÙk&vi˜À:j.€íf7»ÙÍn±ß€ÚÀž°>gDn¹3²’ÈÂXÅBvv¶6lØÐõ5Ô¥K—¨ÇÞrË-ž¯¿eË–…_:%°vîÜ©:u ówTÁI>õÔS›Ý»wÇ=ëÊëÖ³gOÝ¿à˜Lêy%°Ò£âv1·oß>žþy×¶^½zE-úÝ¡C7nìÚ6a²³³C‰`×®]tëÖ/¾ø"j¿¬¬,ÚµkGŸ>}0`:tðü>øà®¿þúÐbÌëàÁƒ\pÁQw )[¶,UªTñU\~ýúõŒéìd?tèP×ú¹J—.Mùòå}ÅùÕW_që­·úê›H6l {÷îìܹӳOFFUªT¡\¹r¾Æüù矹æšk Ѥ)MÜnVÆcŠUÝLø;ªæîZ¸BDfŠHÓÇ7ÅÀ[o½ÅÚµk]Û†õØhícÇŽ=¨4´ÿ~Î=÷\/^\hßN8®]»2hÐ úõëG›6m(QÂ{¸œœþú׿2mÚ4ßñìÝ»—óÎ;ùó ß´·iÓ¦tëÖAƒÑ£GN;í4Ê”‰¾7Íœ9sèÝ»7‡ò“I²Z6+\ÑŠ±¿ñÆ…÷Ýw{?gΜÐâ>|xÔ vݺuu„ ®3ÆöíÛ§³fÍÒV­ZùΈÇ;«fÍš®÷wèÐA'Ož|ÄZ윜]¾|¹Þpà ZºtiϘʖ-«÷ÜsOûEDûõë§/¾ø¢þüóÏ¿Ž»uëV]¼x±öïß?êÚñ²eËú®•¨XnßJ‰ˆvëÖM'Ož¬kÖ¬9¢äÆõé§ŸÖ-Zú\.^¼ØWlÆŸ0f`‘¢º(v³›Ýìf·£ûFbfaå½…:# ›U,xc¯P¡‚îÙ³§Ðã½Ê‘”+W.êj¿ÒeÖe—]VèõÙ=÷Ü£ß~û­ëñÛ¶mÓûî»/ꊎŠ+êwß}Wh,999Ú¯_¿¨ñÔ¨QC~øaݰaƒë»víÒ3fè©§žuœ+®¸Â÷cdÒƒ-!L¡.]º¸>øÇsŒ>|¸Ðãׯ_ïY°[·n¡Ä8þü¨ ˜þýûë¶mÛ çàÁƒ:räH_…ãM`å¿U«VMgÍšUèxï¿ÿ¾–/_Þ÷¥&Mšøš><}úô¨EäÇŒãëçMT+ÿí”SNÑåË—:^vv¶>òÈ#Qw¡ìׯŸ¯ØŒ?ñ$°pjÀÜ„³[Y‡ÂúÛÍnv³›ÝìöpkayÝBIda ¬"oÛ¶mZ¦L××É¥—^êkŒñãÇ{¾Öþõ¯Åc:$°æÌ™õwjÀ€G|QÍ·ß~«Íš5óËÏ5êSO=5žK.¹Äw–Çëã?u¢ÂìÙ³}eÒƒ%°RdÍš5žÉ§›o¾Ù÷8^õ˜222|e¸ Ó¦MÏ_öúJ´åõØcšÄ 3Õ A]³fïøÆçëÃQ‡}ëíCŸ>}|‘ŒÖðáÃõÀ¾.UÕçž{Îs¼råÊ…ºÕðÑ.–G&®rû¿æÕßnv³›Ýìf·DÝHü,¬¼·¸YX«È{òÉ'=_K–,ñ5ÆöíÛµlÙ²®c4oÞ<îSÀ:|ø°6mÚÔsŒk¯½öˆ•~lÞ¼Y?þx×ñD$êå›6mŠ:‹kÔ¨QbÉõúë¯{&3ëÖ­ëk6žIV+E&OžLNNŽkÛ!C|ãÕ7''‡ñãã+5°páB>úè#×¶Ö­[3eÊ233yõÕWó÷¿ÿ=®¸ü*UªsæÌ¡aƾ6lõë×Ú§víÚÌš5+j²üzôèÁé§ŸîÚöÞ{ïù'‘Î;ï<ÆO©R¥7hÐ ºwïîÚ¶gÏÞyç0Â3‰Hy¹ XÜTÍÓÜMD:¤&2cŒ1G+ML-,/V#ë(7qâD×û5jÄgœákŒJ•*Ñ»wo×¶•+W²dÉ’˜ãKÓ§OçË/¿tmëÑ£ÿøÇ?‘@cÖªU‹)S¦¸ÖVUþñxûÀxÖå6lدu‰ƒ:ï¼óxî¹ç\Û~øáüñ˜Æ5éÃX ”““ÃäÉ“]ÛÚµkG“&M|Õ»woÏDÊĉ9p öËO?ý´ëý™™™L˜0’%KÆ4îèÑ£9öØccŽË¯«®ºŠÖ­[:&33“>}úDí3räHêÔ©8ž3Ï<ÓõþM›6±wïÞÀã…IDxøá‡ÿÊõ—¿üųí»ï¾‹5,ƒBWyÝ–¼¨Œ1Ƙ_=$óƒ%²ŽBŸþ9ü±kÛ!C}æ-ÎÅÜŸyæ×û+W®ÌSO=ó¸:t W¯^®móæÍsȱsçNÏ ÇwO<ñDÌñôíÛ×3ùØc…º šI>K`%Лo¾Éºuë\ۂ̾(S¦ tmûùçŸyñÅÇÎÈܹs]ÛzõêÅ©§žÓ¸åÊ•cÔ¨Q1ð·¿ý-¦cÛ´iãÙV±bEÓ¸'Ÿ|²gÛ¶mÛb3,;w¦yóæqï5oýúõ1kü ¸Êe³°Œ1Æ$]’gaåe‰¬£ˆ×쫌Œ .¹ä’@cuÖY4hÐÀµí¥—^â§Ÿ~ _:X³fç ²Ë/¿œºuëÆ5¾×Žä¿üò Ë–-+pÿÌ™3=¿Ô¿ûî»)]ºt\ñÜpà ®÷ÿý÷¾v<4éËX äõ†š••Å€—ˆo.\Ⱦ}û\Û.¾øâ˜ÆÌ«oß¾”+W.îq¼œrÊ)….ôÒ¸qc϶Ž;R¾|ù˜Æ­]»¶g[ªX^IP¿Ê•+ç¹TÓk° ÕåÀzü%®ÀY+>§.–1Æ“lÉž…•—%²Š¹ƒ2mÚ4×¶N:^ ’‘‘á9ÉààÁƒž³˜ÒÝüùósë½tR…›3Î8ƒ5jP»vm:wîÌW\Ác=Æ‚ 8á„ ôŸ={¶ë8Õ«Wç /Œ;€ÓO?ÝõÜ€çä S4X+A¶mÛæùËÙ³gOªT©xÌhËÿûßÿòÅ_óÝwßu½¿|ùòœþùÇs§Gqãūޔ•+Wöl‹6;«0Ñvñ,õ Ã)§œ÷ÕªUs½ÿþýqm õWÀï›Ç€¶ªÚ[U¿J`LÆcŒ«ÎÂÊËYÅÔ+¯¼Â/¿üâÚkb&Ú²ÃñãÇ{Ö6Ng^uj[¶lIÓ¦ñÿ:dff²~ýz6mÚÄ¢E‹xòÉ'¹úê«9ûì³ ”À9xð ‹/v§W¯^”(Q"îxr}öÙ®÷/\¸0´s˜ä ïbŽ0}útÏ úx2ÝC‡åæ›ovm;vlà5ÃK—.u½ÿÄO eú&8YùçŸ>”±òkÖ¬YÌÇ–-[Ö³Í+cïG¬3·­téÒ´hÑ"îq¼j±¥:9ggÆÕWÀ[À&à<9/ÒöOUÝï DäVâÿòc¯ª>B,ÕïÂlþ­UÕ©!ÄÓ ˆ^\ÏŸUuAñtüU¯n®ª~ï "2ðžúêß3ªº)„x®¼¿Éðï>U« †ˆ”Ü×;³EUc/fò[<õø¿–‡/Uõ¥â9èBqm4hgl?r`¯¿þz¶;w2mÚ´¨;Ååµ}ûv¶nÝêÚÆtÒ\'žx""â¹ö:A’uAÄ“ÀJW^‰§ ¼’`‰x~1Æc$¸ˆ¿f†Iº)S¦xî&­n°ýúõãꫯv)ôú믳nݺ¤ì´†õë×søða×¶0¯÷üZ»v­gÛ=÷Üjíd¯:Ï6lí<&¹,•“&Mòl[±b—_~y\ãG+>vìXß ¬~øÁ³-Z}¨ *UªDÙ²e=§‹Æ#¬¤L~a®¿N‰z¬Œ1ÆcŠ0« \ĨªçõVff&S§Nõ,îîW¥J•\¯]²³³?~<÷ÜsO\ã'ËæÍ›=ÛRñ…}´ëÏ×^{-iqlÚÆDn“ Åï*=Å8õ 3Ñ¿˜Ÿ}öK—.¥}ûö…öõÚ¾ŸÙT¡B…„$°5«8*S¦LªC0ÆcŒI79X«HY²d ß~û­k[vv6&LHèù'L˜ÀÈ‘#C«œHÑ®¿RñåöŽ;’~N7‰¸.5ÉaoÖ!›3g[¶¤vÇú±cÇúê-v!òD½AZRÆcŒ1ÆÄ x˜ê@L0^µ†“åçŸöÜm>ÝD[F—ЧÒe×òt‰Ãg3°Bmù`²Ìš5‹1cÆP­Zµ¨ý22¼ó—^kÊcåwg cL\òïB˜×ÎÎ1’v! #à'àöÆYŸN<†08…àÃx¬?a €ç÷­‚ÙÂÎ.„‡Bc?á¼vÂúm-áÄÖoïNóšÔP³fM®»îº¤Åѹsç¤Ë„ËX!šZý¨;Ú¬‘˜ñŒ9J= üðþ4ò›®@™ƒ“Èú*¡‘cŒ1ùˆHmಇa‰«bÄkù`ff&­[·ý|ëÖ­ã—_~)p¿ª2vìXþùφ~Î0EÛ˜k÷îÝIŒÄáuý™ÍM7Ý”ähLQd ¬DÛ £L™2|øá‡¡×Ú´i 4pÝõÛo¿eáÂ…œsÎ9žÇׯ_ŸÌÌLפ[´+‚ÊÎÎN›‚}Æq〛€+ª…ô ðp‚ã2ÆcÜÜ”Mѹ-qU̬ZµŠ÷ßw_‰Þ¥KæÏŸú9§L™Â!C<Ûî½÷Þ¨«ZR-Ú ,·Ä\¢Õ­[×õþ-[¶°wï^Ê–MÕÛ…)*¬ˆ{H/^ÌêÕ«]Ûzöì™"æuêÔáüóÏ÷l/¬˜{éÒ¥9öØc]Û>ýôÓ¸bËëÿûŸk’ÍœªîVÕ€c›)¼fÉëªúnâ#3Æc~“ÂÙW9À, …ª^hÉ«â#Z­áAƒ%äœýúõ£R¥J®mÛ·oçùçŸOÈyÃR­Z5233]ÛV¬XÚyöîÝËwß}ÇÁƒ£ökذ¡g›×Î’Æäe ¬¤â `ذažmsçÎeãÆèµn[µjåz˜ ¬åË—‡6–1Æ ‘uOò¢2Æc~•ìÙW–¸*Æ:ÄsÏ=çÚV®\9z÷îó–-[–½7ªô»û{ªdeeѨQ#×¶0X ,àw¿ûeÊ”¡Aƒüþ÷¿gÈ!<øà%ñ<¯=Þzë­Ðâ1Å—%°B°cÇÏÝ0j֬ɹ瞛°sŸþùÔ©SǵíðáÃ<ýôÓQ?묳\ï_½z5kÖ¬‰;>€÷Þ{/”qŒ1’ȲÙWÆc’.ɳ¯,qu˜7o?þø£k[ïÞ½C¯5œW´ Ë–-ãÃÃÚ<81N>ùd×ûß{ï½Ðvüè£ÈÉÉaÆ ¼óÎ;L™2…×_ýˆ~Í›7÷\&öfh«V­bôèÑL™2å×ÕR…Í3éÏX!˜1c{÷ºïT>pà@J”H\©±%Jx®Ë˜0aBÔå{]»vu½_UyöÙgãŽïÀ̘1#îqŒ1Ñy$²lö•1ƘTHÆì+K\ERµÚ M›6´lÙÒ³=ÝgauéÒÅõþÕ«W‡–|{ã7\ïoß¾ýÿ.]º´ç€ ,`íÚµ¡ÄðÀpÇw0dÈ:wîL£F(S¦ Çs «V¥Í¦Ñ& K`… •o¨—^z©çnƒ?üðsæÌñ<¶I“&´mÛÖµmòäÉ8p ®ØfϞ͖-[âÃã_žDV}›}eŒ1&Ù’0ûÊWG™Í›7óÚk¯¹¶Õ©SÇó ù0E›…5cÆ ¶mÛ–ðbuþùç{ÖÁòZ–ĪU«øä“O\ÛÎ<óÌ÷õíÛ×µï¡C‡xøápöZ·n/¼ðBûsrrÈÌ̤I“&¡œÇ$Ÿ%°â´råJ>øà×¶&MšÐ¦M›„Çиqc:vìèÙ^Ø·þóŸ]ï_·nwß}wÌqíÞ½›[n¹%æã1±SU÷i¡ÆcLb%jö•%®ŽRS¦Lñ\QòÇ?þÑ39¦‹/¾˜Ò¥K»¶íÛ·É“''<†XÕ­[×3É7a„¸g#y]/Ö«Wϵ\Í…^è¹ÁÙøñãC)?sã7zNÄ8p –)ªì™‹Óĉ=Û’1û*W´oÞ|óM¾þúkÏöAƒÑ A×¶x€¥K—ÆÓˆ#B«£eŒIqÿÄfŒ1Æä‘ ÙW–¸:ÊE[í2xðà¤ÄP­Z5zõêåÙ>nܸÐêI%Âßÿþw×û<ÈŸÿüç˜kC-\¸Ð³\̰aÃ\Ké”+WŽ«¯¾Úõ˜C‡Ñ¿6oÞS<S§NeæÌ™®mYYY\uÕU1mRÏXq8|ø0Ó§Owm.ºè¢¤ÅÒ¿*W®ìÚ¦ªŒ7Î󨬬,FåÚvèÐ!Î9çœÀ»BÜ~ûíQÏiŒ)RÞ‘¹"rJª1Ɠ֜}•LÇWGµ¥K—òÕW_¹¶µhт֭['-–hþ÷¿ÿ±hÑ¢¤ÅÔ¹çžK‡\Û–.]J=سgO 1—/_Nÿþý]w+Vä/ù‹ç±#FŒ V­Z®mßÿ=íÛ·ifØüùó¹üòË=Û¯¸â ω¦h°VæÎë™îر#Çw\Òb)S¦LÔ-^'OžÌ¾}û<Û/¹äÏ{öì¡{÷îÜwß}:t(j›6m¢ÿþq-=4Ƥ9 è\|d‰,cŒ1nBœ}•;㪹ª^l‰«£[´Õ.Éš}•«K—.4lØÐ³=Ý‹¹;–R¥J¹¶ÍŸ?ŸÓO?·ß~»Ðqrrrxæ™gèØ±#;vìpísï½÷R»vmÏ1*W®Ì“O>éÙ¾fÍÚ·oÏ£>ê«&ó¸÷Þ{éÑ£‡çæjÇs ·Ýv[¡c™ôf ¬8¤Ó*DÿV`ëÖ­žS)222˜2e 5jÔpmß·o·Þz+Íš5cÔ¨Q,[¶Œ_~ù…œœ~øá-ZÄ_ÿúWš6mÊ‹/¾X`ì’%KÆöCcRmdžÿ,‘eŒ1Æ]¼³¯ò&®.TU÷i7樱{÷nÏë—ŒŒŒ¤®vÉ=çŸþô'Ïö9sæ°iÓ¦$FL‹-3fŒgû_|A§NèÒ¥ ?þ8+V¬øu3®;wòÉ'Ÿ0fÌN<ñD†î9c«cÇŽ\qÅ…ÆÓ§On¸áÏö;vpýõ×Ó¸qc®¿þz,XÀ† 8pà‡æ§Ÿ~â?ÿù7ß|3Çw·Ýv›çRÈ’%K2sæLªU«Vh\&½\”j|Ù¼y³çv¡YYYôë×/ÉÁ©§žJëÖ­ùôÓO]ÛÇŽË!C<¯[·. ,ଳÎòÜIãÛo¿åÎ;ïäÎ;ïô׈#xî¹çÒú ÝSPdöUÁíc~Kdu‘yÀªº,©ÁcŒIqξÊ^FZÒÊä5kÖ,víÚåÚÖ©S'êÕ«—äˆ`èСŒ5Šœœœm‡âé§ŸæöÛoOz\~ýõ¯eõêÕ<òÈ#ž}-ZórÈ-Z0{ölßEÒï¿ÿ~~úé'ž}öYÏ>6làÑGåÑG)¦ŒŒ žzê)Ú·oÓñ&½Ø ¬Mž<Ùs7Œ?üážõ / IDAT¨-Ú·ï¿ÿ>Ë—/z|ëÖ­™;w.Õ«W%žîÝ»ÛrBcŠ®‘…´ÛŒ,cŒ1Ûì«ÜW6ãʸJ‡âíù5hЀsÎ9dz}„ dgg'1¢à~øaîºë®ÐwâkÒ¤ óçϧjÕª¾ÉÈÈ`âĉŒ1 5€R¥J1mÚ´¨+•LÑb ¬¨jÔ7Ôdî>èvn¯-^ÁßÚì:°lÙ2Ú¶mW,ýû÷gÖ¬Y¶|И"HD2E€ûtÌ|Ýù-‘õoI^EUcŒ1)Ãì«ü5®,qe øúë¯ùïÿëÚV¶lYúôé“äˆ~s饗z¶mذW_}5‰ÑÄæÿþïÿ˜7oõë×e¼Ë.»Œ?þ˜ºuë>6##ƒ‡zˆÙ³g‡:«®eË–,^¼8jhSôX+ï¾û._ýµk[õêÕéÖ­[’#úMÕªUéÝ»·gûôéÓ=‹íåÕ AÞ{ï=&Mšx§†*Uª0vìX^xáÊ”)èXcLzPÕlU 4n¶ú8L€ÞÀ'6#ËcŽ~g_Y+ãÛ¤I“\w·èÙ³'+VLrDGž?Új•t/æžë¼óÎcÕªUŒ5*æÕ7;wfáÂ…Œ7ŽråÊÅO=øê«¯¸÷Þ{©S§NÌã4nܘÇ{ŒeË–qúé§Ç“I?V+eÊ”aܸq®m 6LùŒ£;3Î;{¶ïرƒJ•*:NFFC‡eðàÁ¼ñÆLŸ>wÞy‡ 6è[ªT)Ú·oOß¾}2d*T8¢ýÁ,°#„×.ùU¬XÑóñŽg©c•*U<ÇgÉjÕªyŽ{ì±Çz|§N\÷› ìÞ½;5kÖ,p,߈¸6l:u*p2wÝ4É£ª»Dd,p-ð7 J!‡å­‘5¸Õv‘2ƘâGDêÞ{Ö;r€À]–´2~µmÛÖóótÇŽ“Í‘J•*ÅôéÓY³fk{FF999G,Ñ‹÷óýI'äùxijô®|ùòŒ9’n¸×^{Ù³góî»ïzþl%J” M›6œyæ™ôíÛ—SO=5æs»)W®·Ür #FŒàÍ7ßä•W^áÝwßeåÊ•žK3³²²8餓hÓ¦ }úôá÷¿ÿ}B–#šô @ÔvVVûöíKA8¦(øñÇÙ¼y3[·n¥téÒT«VFQ¢„åC Ãi§Æ‡~èÖÔJU?OvËÛ¶mùàƒR‘1©·gÏÖ®]ËŽ;Ø»w/ÕªU£zõêÔ¨Qƒ¬¬¬¤ÇsèÐ!6nÜÈÖ­[Ù¶m¥J•¢J•*T®\™Úµk§|‰ _™2eØ¿û-ã`«U«µjÕJuƘÈ3#ë ü%²–ZòÊcŠŸ(µ¯lWAcЏråÊѼyóT‡ñ«’%KrÜqÇÙŠc5°Œ1Ƨª»UõàX¢×Ⱥ#yQcŒI¢üµ¯¬Æ•1Ƙ„²–1Ƙ˜’ȲÙWÆS å›}e‰+cŒ1Ia ,cŒ1qóHdÙì+cŒ)žn²°Ä•1Ƙ$²XÆcB“§FÖ“ªº+ÕñcŒ —ˆÔÄÙ¶¹%­Œ1Æ$“%°Œ1ƄΒWÆSlý¬ªCS„1Ƙ£-!4Æ“ÖDÄöF6Ƙ4¡ªšêŒ1Æ,eŒ1&ݽ,"sEä”TbŒ1ÆcŒI K`cŒI[‘¤ÕùÀÀG–È2ÆcŒ1æèd ,cŒ1él4 ‘ÿ,‘eŒ1ÆcÌQÉXÆcÒR$AÕÍ­‰#Y§&72cŒ1ÆcL²YËcLºÊ;ûÊMÞDÖBKdcŒ1ÆS|YËcLÚÞ¶ù<¤+𡈼,"­™1ÆcŒ1&,eŒ1&í¨ã. p3°ÕÇaô–ÛŒ,cŒ1ÆcŠK`cŒI[ªº[UŽî ¶YÍ 1ÆcŒ1&),eŒ1&í©êUMl3²NHdlÆcŒ1Ƙij–1Ƙ"#ÏŒ¬cñ—ÈZÌIx`ÆcŒ1Ƙ„²–1Ƙ"'@"k¤ªæ$/2cŒ1ÆcL"XËcL‘UH"Ëf_cŒ1ÆSLdÙùïÌÎ.p—1Ƙ$9|ø°W“½9{ðHdÙì+cŒ1…qýÛj×CÆ“:ïÁÙÀžü÷:tˆ$<(cŒ1íڵ˫iw2ã(Šò$²Žf§:cŒ1i¯ÀµÀîÝö'×cRaß¾}:tÈ­iwuCÖ­[—РŒ1Æ”““ÃÆ½šýì¼gUÝ©ªšê8Œ1Ƥ=׿­6l 'çÿ³wçÑV—ÕãÇßû"¨ˆ³i)j9à@ˆ&N¡iZ‰CY~MÓÒÌJM´o©’jNýÌl2sªËüšC’³‰8!fbN8䀂€€ û÷ǹâ9—ó¹÷ wx¿Öb­<ϰ7äÂ{÷ÝÏóØÄ+Iöâ‹/VšÔ¯ýë :”»îº«iKRÏ2~üxöÞ{o>ýéOóÊ+¯´5õ¸Ì¼¯Qy©:™9-3GkR]!ë઺'&Iª‰Ì¼8¾Òø+¯¼Â§?ýiöÙgž|òÉf&IÝÃwÞÉ'?ùI>ø`¦OŸ^iÚ\`ÿÌ|iÁ£\7UD|¸˜ÒO+úøÇ?ξûîËN;íÄ&›lÂb‹-Ö¾ß$uS™ÉÓO?ÍèÑ£ùÃþÀwÞɼyóµì‚ÌÞˆüÔ1Ñ8 8X¡Ì”2ó²Æf%Iꨈ¸øN[sZZZ:t(ÿó?ÿÃ;ìÀÇ>ö1JocI’æ›3gãÆã¶Ûn㪫®âÑGùÖQ‡dæ/([Àˆˆo?zU“TŸ>}èß¿?Ë/_íÕ ’Ô½M›6_|‘3fYöàÈÌ\d•KG…BÖ3À†™Yö(Š$©óŠˆà<QÄZÐ’K.Ék¬A¿~ýê—˜$u!o¿ý6'NdöìÙÕ.™ |'3/*7X±€Ÿ~,S0OIR1s€ïgæÙÍNDí·P!ëH»¯$©k‹ˆ£ÓšHR}½|53o¨4¡Í@D¬ \ ©mn’¤VÏR:jvo³Qm´¾Z8-3ËÞ))Iê:"b[àR`ífç"IÝÔÀW2ó_mMZøÂÈÌg€m€o¯Ö&7I0ø!°‘Å«î¥õÕB‹W’Ô ´^ì¾10‚Ò»%Iµñ*p0°Í¢ŠWPEÖû&G,| ø&> .IíõðkàÂÌ|»Ùɨs‹ˆïD“¤Î!"V¾|´¹ÙHR—õ(¥‡/ÉÌYÕ.*TÀz߈€ÏÛ›ýYÄ«…’ÔC½<Ü Üü=Ûû—¯zœˆø¥o’ŽË̱MNG’Dé¹Á­€¡ÀFÀ‡šš”$uN ¼<ŒnÎÌ'Û³Q» XØ(bI``)`‰šl*I]×l`ðzfNkv2êš"bMài ¥ÿøßŒ°%IOD,M©ˆÕèÝät$©Ùfòßï‡ =Ë^IÍ X’$©¶Z»¯^èc Y’$Iêq,`I’Ô -Ô}UŽ…,I’$õ‹|…P’$5ʼnT.^AéÞÉaÀ˜ˆ¸1"7&-I’$©ñ,`I’Ô9= TóJå‚…¬ë"b`}Ó’$I’Ï#„’$uRÑ8 8X¾Êe-”$IR·cK’¤N."–†GR¬uðÃÌW¯Ü$I’¤Fð¡$I\fNÉÌ‘ÀÀqT´ðóÀvõÌM’$Ij;°$Iêb -|X'3g6$1I’$©NìÀ’$©‹ÉÌi™9ŠEwdjñJ’$IÝX’$uq:²ì¾’$IR·a–$I]\…Ž,»¯$I’ÔmØ%IR7ÓújáÌÌœÕì\$I’¤Z°€%I’$I’¤NÍ#„’$©¢ˆðkI’$5_”J’¤¶ü8"n‹ˆÁÍND’$I=—G%IRYñaà_@_ ›€™9¶©‰I’$©Ç±K’$Ur,¥â@À1q£Y’$Ij$;°$IÒ,Ô}UŽY’$Ij;°$IR9 v_•cG–$I’Æ–$I*çàí*æ-XȺ."Ö7-I’$õD!”$IeED?à0JÝXËW¹Ì£…’$Iª9 X’$©M±,08’b…¬(²¯Wn’$IêUó„%I’¤v°€%IRWûT9ýà`' Oó~ü*3g¶/î-"Ž¥ôçu\fŽmv>’$I=•,I’º¨ˆØ¸ ˆ:‡z8<3o¨sœN%"–žV¸ a!K’$©ñ|…P’¤.("Z€ó¨ñ `5àúˆøyDTÓ¹Õ]N©x¥?çaÀ˜ˆ¸1"7/-I’¤žÇ,I’º ˆ8øUBßìž™3š»aê¾*ÇŽ,I’¤²K’¤.&"–NmRø€«"¢_Í´`÷U9vdI’$5,I’ºž€·c]wS*~}8¸ ˜TpŸ=€áíˆß•ÌÞ®bÞ‚…¬ë"b`}Ó’$Iê™ßx§¶gm0"":Rxë¶"âЈ¸-"7;I’¤fò¡$IPD|8«À’— Z_ÆkO¼#€óÛ³˜ ¬Iéü›€™9¶©‰I’$5,I’:™ˆX…Ráb™˾š™¿mg¼e À‡ .F©ÈvQf¾±Ðž«?¢ÔeUç(áÞ+˜G·‡.ô±…,I’Ô#y„P’¤Î犯.ë@¼)^¼ ÎÌ‘ ¯2sbf ìM©C«kûÌ£[jí¾*w¬2€aÀ˜ˆ¸Ñ£…’$©§°€%IR'›QêZªVGf;[ª#âcÀ—vȨ̀™\æ5À±öþjÁ\º«ƒ)¬ÄB–$IêQ,`I’ÔIDDçQì¿ÏWfæý{6ЧÀüIÀî庮Úp!Píü!ѯÀÞÝÙÛUÌY°u]D ¬sN’$IMaK’¤ÎãKÀvæOŽko°ˆØ ؽಋ¾tØz§Õß«œÞX£`NÝNfþ”ÒŸÃqT_Èú<ðˆY’$©;²€%IR'Kg\vff¾ÔÎx‹ç\vufÞØžxÀ’æ®ØÎÝJfNËÌQ/dy´P’$u;°$Iꎡí;ö¥ãíu°qùïGµ'PD,lQ`É»í‰Ó]u°õízæ&I’Ô(°$Ij²ÖÏ!—™í*ôDÄòÀÉ—›™/·'ðнªøZ;ãtkí(dÍN«{b’$I `K’¤&ËÌYÀ àçÀÜ*–Ü ü¾!OV*0ÿJ¹µW‘Wßlï±Èž¢@!ëÒÌ|¶q™I’$ÕO´óÕmI’T­¯È ìPaÊ<`ËÌ|¨û¯Œ£tYz5’™¶3Þ@‘µ¿Í̯¶'VOÕújãaÀ±Àò­Ï6°€%I’º ;°$IêD2ó±Ìüðà_e¦\ÚÞâU«S}ñ à²ö¯Z=ÂvUbõH:²ì¾’$IÝŠX’$uR­wc N t‡ÔT`ýÌ|µû}¸¹À’i­ñ^ig¼[ ,y íøâ$"666–nýx 0Ó“Š9±,°xf¾Þì\$I’je±f' I’Êk½ë̈¸8˜ÐâÕb”º¯Š8£Å«^ÀY—ý¸Hñ*"–¢tþÀEÌ}¸ŒRGÙ„‚yu)™9¥Ù9H’$ÕšX’$õñà‚Kž6ÌÌ™íŒ÷-à¢K&eæì*÷߸X¯xvü‘™÷´c­$I’šÀ;°$Iêæ"b`DÁeGw xµ0²à²«)^EÉ)À´¯x°#pwDÜ[¶s!"ö‹ˆ#bp³s‘$I=›,I’º¿“ Ì¿33¯é@¼“€˜ÿpõ¢&µƒü5¥;Á¢}©½ÏÎÀ}ñ³Ö¢›Ðz ôÀ0`Œ…,I’ÔL°$IêÆ"bð­KæGv ÞúÀá–$0¼Ê»¯–VjWb•µßžŒˆ=j¼wE±XDÜ»7*f;ì ¬ßú¿ Y’$©‰,`I’Ô½C±G[~•™v Þ¹@ïó›™V313§»Sê(«µ7DÄÅ­¯?ÖÛ·=€?FÄ_#bÓĬZk÷Õ å†°%I’šÀKÜ%Iê¦"bpc%S€õ2óõvÆû ps%Ó€õÛóÒaD|•Ò%îõ(6= ËÌ7ê°÷ü;ÉžV\àã9À/€“2óÍzÄ-""ö~[ÅÔn¢t)þØúf%I’z2;°$Iê¾N/8ÿG(^õ¦ÔíUÄéí)^d楔.coW¾‹ð J—¼¯Y‡½¡t¡þŠ }¶¥®¬ qTëŸg3õ&W1oÁެë"b`}Ó’$I=•X’$uS±¥¢Ò®ULŸlœ™ïµ3Öpà¼Kž´÷¥Ãâ® ü XðÞýÀeÀ¿(†>ì -®ü’™ÕrªÒz'Ùc,úXç?ïeæŸk»¨ˆX88X¾ÊevdI’¤º°€%IR7×z´ï\`ƒ6¦íž™EŽ.¸ÿJ”ŽÄU[äøbf^Ûžxeâ÷.> RéÅÖ?‡ €Øþ&`·*/™_¤ˆø °K%?ÌÌ‘µˆÝ^²$IRg`K’¤ õHÚáÀIÀr ßš™EŠ* ï}!¥Gµþ–™;´7^…Z€U2óÕEÌ[8›bùî—™Wv$¿ÖØ»E:ªÞ6ÊÌg:»ÚYÈú}fîS¿¬$IROaK’¤$">œ|èEéòðgæøvî· ð0Õ¿t8œ™µ'^­DÄaÀO(Ýá´(u2svâõÖ/°ììÌ<º½1ë¥@!k¥·Æ5$1I’Ô­y‰»$I=Hf¾‘™ß6î~ÖÞâU«s©¾xðËf¯2óBàÌ*§¯|¡ƒ!·îS­×);Ìœš™£€5〷+L½Úâ•$Iª;°$IêÁ"bñÌœÕε{×X2X/3ßhG¬~™9­èºEì¹ð2°lÓ¯ÌÌý:ou`°‹îü:$3Ñ‘xR¡#Ëî+I’TS°$IRa±80X§À²ïfæ¹Uîÿà J/(n ,Ý:ô6ðð(ðwà–Ì|®@ Ç l_ÅÔ§3³Èñ¿¶bnMéÅÆ-*LyØ<3çÖ"^£,TȺջ¯$IR-YÀ’$I…EÄ1”º‰ªõO`“EÝ#ÕZ |X¼Ê½Ÿ t9úŸŠ~"â^`ë*¦¾™™ªvß*â°?p:°êBÃÛg浊Õh±,°Tf¾Òì\$IR÷aK’$¦TZ¦À²]3óÿ±ï”ŠPEººöðGJGÿÚÖñȈè¼Iu…²'2s“äÕVÇß–®ÉÌ/Õ:Ž$IRWç%î’$©¨S)V¼º¹ŠâÕúÀÝt¬xð!à`à&ൈ¸""öŠˆ¾eæ~ƒê»¼)šHD|!"¾½*ÍÉÌi™y"°!p%pLÑ8’$I=X’$©j±0†ê6Ø43ŸjcÏ~” Dëv<Êf÷£)Ýõ¥‚Ù*U®ÿBfV}a}ëñOýqÀQ™ù×B‹ˆØƒRArDfŽmv>’$©y,`I’¤ª´ÞÛt°meçeæQ‹Ø÷ÿ‡u$·:{X-3gT» "F?Xèã?Ggæ„Z&×]µþûö °9”ºê,dI’ÔCy„P’$U%K?õAéÒôj¼ü¨­ ±¥£|ÙE‹W«ÿ[fh`|Dœßzѹڶ'¥â@À1qcD n^Z’$©,`I’¤ªµƒÛ 8˜¼ˆé'dæ[‹˜sЧhÀ„Ö_ï\[Ô,àÂÞ3½ IDAT‚kÎÊݹЛÒïùéˆ8¤­û±z²Öî«ãË a!K’¤É–$I*$3ggæùÀÇ€‹¹e¦=ü¢Šíö,þ`ÝÌ\/3×£ôrߦ”^ò» ˜Sp¿E¹83_©vrD|Ø»Š©+Sú³Õ^$ß“,Ø}UŽ…,I’z X’$©]2óÍÌü0¸s¡á£2³\aë?"âC”Š`Õº–ÒeêÏ.üÌ—™£2s(¥ÂÐÿ—¯Ø»œ©À)ÕNŽˆ༂1Æf欂kz‚åYt‡¼¿u]D ¬oZ’$©Y¼Ä]’$ÕDD| 8‹RQf¯*æo Ü[åöïkeæëò ` ðYà3ÀÖÀbÕ®~˜™# Ä;˜êºÎ曬Wä÷Ô“DÄÒÀ¡À±” ZÕð²wI’º) X’$©f"bI _f¾QÅÜm»«Üú™ù?Èk%àiª/„¼ ÈÌw«ÜÙÖýW.ÖÑ™yvù=’…,I’!”$I5”™3ª)^µ*rûøöä³€Q}ñJÅ¥ªŠW­N¤XñjpAù=VfNÍÌQÀZÀ)v´°ê:I’Ô¹YÀ’$IÍòÕ_º¾T{ƒDÄàKFgæÕöÿ¥—‹øß̬÷ ŠÝJfNi=Ò¹¥Kûß®bÙÉõÍJ’$5Š,I’Ô­NW9}h{bDDoàçTÿ5Ï`xÁ0?ú˜kfÞXíäˆX2"þßh½(¾G[ #kMÚ.dÝ”™6.3I’TO=þ‹ I’ÔT—W9oˈةûŸlR`þE™9®Úɱ °[ýçG˜ð¿À'(âÆFÄö×wKU²<>(IR7â%î’$©i"b9`"ЯŠé¯;dæÓUî½p¥ûªñ&¥W«9šFD,< ¨r€ 2³ê¯ˆX ø'[ v·¶Ðeï÷eæ°&§$I’jÈ,I’Ô4™9™êï)Zx "¾ÖÖQºˆh‰ˆ#€K¨¾xpbµÅ«V‡R¬x5‰âw2Aùû¿öžŒˆÓ[ 7=ÞB—½Öì|$IRmÙ%I’šªµu+°ceÿ®î^æRzep[à@``Á4¶È̹ÕLŽˆ§ Ä8<3/¬vrëåó÷±è"Ü¿€KÒ/ì$IR7eK’$5]D¬ Ü|¼ áødfÞS킈¸RVµžefU¯.FDPºûª·dæg ä#I’Ô¥x„P’$5]f¾ì@©ã¨Ñ®,X¼R0ÆÿV[¼juÕ¯æPºè]5;FÄuQ´£O’$Õ˜,I’Ô)´Þ‡µ#p¥#ð.¥슸X¬Àü2óÖj'GD?à´"ùdæøóU½“€ÏDÄ1¸Ù I’ÔSy„P’$u:±p1°IC’™?¨vrDì \_`ÿYÀF™ù¯1NޝrúkÀú™9%"zmýµ¥£‘ÿ^È̉r;·-ôq7#2slã³Rgýµ•%[IRO6Ò _žÎÌ7;º¡,I’Ô)µ^î¾7p8°MB¼¬›™S«Ìgq`<°N£2³ê¯ˆX xX¢Ê%ߦ»[•^$|¸¸¸#3çU›SOwŸ¬0l!«‡k½·oo`g`;`¹æf$IÞ«ÀhàfJÝéÓŠn`K’$uz±6° °°ð!þ{ÂK”^|ø ð©*·=,3Z ‡c3ªO©ûi½j d­1®ö*ců„ø0ø½¯–W¡ûª Y=LD|8ØbG‰%Iÿ5ÒÕÎ*Ô¥î×-’$©»ˆˆ1ÀæULý°AW?L©@V©Ã©œƒ2ó’j'GÄÀöï¨{€C2ó ŒÙ%DÄþ”î:«¶«&€“3ó±º%¦¦i-¢ŸK©p%Iª¹”®Œ813ß^Ôd/q—$IÝBD, |¼Êéÿ¯à«€§Q¬xõpiµ“[ï¯:·Àþµ°-0&"öipÜN/3/Ö tÁÿ"¿ /{ï¶"âÛÀX¼’¤Zë <ŸYÔd X’$©»LuGz¦E:£6¾Z Ž,xÏÔÁÀÀóke)àʈøNbwj™953GkR¬5ŒRaÐBV‹GÄ•ÀOñRvIª§•ÿ‹ˆ“ÚšäBI’Ô-DÄ€k«˜úëÌüz•{p7Å.‘¿*3÷­vrD,Géx⇠ÄXÐÿåh X’$©ÛkíDú °CfV<´Àü«€} „¸خڣwÑ›ÒÑÁEÓð7J]=E^7œïûÀiUNŸ,3ÿ‹EÓzÙû‡2ó™fç¢b"â—@ÙöÞ{o®ºê*ZZ|Ä]’jiêÔ©l¿ýö<üðÕ¦ž™‚,I’¤÷‰ˆm»€¨rÉ<à ¶¸Wã(J°Wk&°ifN(°fÁxÛ÷X²ff¾ØžXRW&‹-<6hÐ î¿ÿ~_|ñF§%I=ÂË/¿Ì Aƒxã7Ê ¿¬™³ü‚$IR«ˆh¡TXª¶xpIÁâÕ‡€“ ¦vz{‹W­æœï׈êiŽ¢LñªOŸ>\qů$©ŽV[m5.ºè¢Jëû_œH’$-èkÀæ¿CéµÃ"~,W`þ?Qc,¬ì>êÜ"bˈ¸."6;—Z‹ˆM#b³fçÿ9Ò»o¹±Ã?œ 7ܰÁIRϳ×^{ñ©O}ªÒðWÁ–$I± pjÁe§fæ¿ Ä\`ÿÍÌYóZØöæN&v0žjcðyà‘ˆ¸1"79ŸZZøCëaͶ°ÒÂ.¾øâ|ï{ßkB:’Ô3xbÅŸ n«ZÀ’$I*in úãvÏPzݰˆó€^æ_‘™wŒñ>ÑØ¿À’G3³è‘CÕXDlì2ÿaÀ˜nVÈZøE³“v,÷án»íÆG>ò‘Fç"I=Ö;ìÀzë­Wn(€OYÀ’$I2srf~üµŠ%ß+Ò{Q¼ê¸ó+Ù(ò]øí5ˆ©Žû¼‹­;²¾‡69‡mË}ø…/|¡ÑyHR·çž{VÚΖ$IÒ2s\fî|x²Â´;2óÕî‹gLåôÌ|¹àš…ãö~X`É<à’ŽÄTÇ-Ô}Uv Ý«uN“”ûð“Ÿüd£ó¤oèС•†6°€%I’TFfÞ|8˜²ÀÐÜÖÏŠø°nùs Æ(çp`íóoËÌjW³.ïÿw®’ Y]ù²÷Åß7ã>¬ˆXø@Üå–[ŽÕV[­ÑéHR7`À€JCkYÀ’$Iª 3ggæù”îê¹€Rñê¢ÌWíñaà˜‚¡ÿ73ß-¸fá¸ËÇ\vVGbª62ówÀ”޾]Å’ ë_ö¾ðË&Ä-[4[uÕU‡$ ÚúáÁr°$I’!3'eæp`0¥—áŠ,S`þ=À5c”s °bù·ef5w©2sjfŽÖ¤X!«+-übDÖà˜}Ë~Ø·ìÇ’¤:ëÝ»7}úô)7´”,I’¤*eæc™9©Úù±%Å^œ™™Y8¹÷Ç]bÇø~Gbª>(d­Eé>³ÉU,›_Ⱥ¸ž¹ÕÉ\x+ûýPK‹ß&IR³Tø;Ø¿™%I’ê!"8¾$×–K3sl ŸA…Î’ þP£¸ª“Ìœ’™#)v´pd}³ª‹¦Ý‡%IêÜ,`I’$ÕÇW€!æONèhÐˆØ ør%3)DÔ8Zøpcë­f݇%IêÄ,`I’$ÕXDôN)¸ìÔÌ|µƒqÛÓõuvf>ß‘¸j¼* Y#:zµÉ¾‡7; IRçaK’$©öŽ¡tÔ«ZÏR*¬žm̘1ìµ×^l°Áœp ‹,^U’™ÜsÏ=ì¿ÿþôïߟÓO?Ù³g×8[IµbK’$©k:øHù÷¿«S.ê^6àý T²`!«‘—½÷~+4(ž:‰—^z‰=ö؃O|â\wÝuÌ›W»‡T'MšÄñÇÏ Aƒ¸÷Þ{k¶¯¤Ú±€%I’ÔÅDÄÚÀðK8Ò{ƒT̼X8x»Š%Aã/{_øMD¹ÿM]TfrñųñÆó§?ý©®±ÆÏvÛmÇYgyÚZêl,`I’$u=?/0ÿ·™ù`½’Q÷“™S3s¥BQ‘BV#î|§Î1Ôd³gÏæÀä[ßúS¦L)¼~ùå—g™e–)´&39æ˜c8âˆ#jÚå%©c¼Ä]’$© ‰ˆ€= ,™_§tÔÍeæT`TDü 8²õ×¢žm›_ÈZ7"Ô¹óï숓™÷×1†šdÚ´i|ñ‹_ä–[nYäÜe—]–=ö؃­¶ÚŠÍ7ßœu×]÷}/ Λ7W_}•±cÇrß}÷qå•W2qâÄ6÷üÉO~ÂÌ™3ùùÏÞáß‹¤Ž³K’$©‹ˆˆàÜ‚ËFeæ+õÈG=GfNÉÌ“)v´ð”[õ>¬njΜ9ì¹çž‹,^ 0€+®¸‚W_}•K/½”o}ë[l¾ùæï+^´´´°Új«±ûî»sÆgðÜsÏqà 7ð±}¬Íýñ‹_pñÅwø÷#©ã,`I’$u™98 x¼Ê%sê—‘zšG '¿oPZkà}XÝÎGÁ_ÿú׊ã}ûöåœsÎáÑGeß}÷eÉ%—,´¯^½Øc=xüñÇ9âˆ#™Ëßÿþ÷BûKª= X’$I]HfŽ_ÞXÄôÿÍÌw럕zš* Y'g朦´ÐvB]Æ/ùK.ºè¢Šã«®º*wÝuGu½{÷îP¬%–X‚óÏ?ŸQ£FUœóÞ{ïq衇z–Ôd°$I’º˜Ìœ—™¿Ö.Ê î®ihbêq*²Ù}µ ³"b«&ÄU ½öÚk}ôÑÇû÷ïÏ<ÀàÁµ}#à˜cŽaøðÊ»>üðÃ\qÅ5)© X’$I]Tf¾™Ã›š oÀýCð¾BÖÚÀÜ}5_oàŠˆXÔ%óêÄŽ>úh&Ož\vl饗æÏþ3«¯¾z]bŸyæ™l¹å–ÇO<ñDÞ{コĖ´h°$I’º¸Ìügf~ØøpIf>Ôä´Ôeæä̬掶>uJa-à×Þ‡Õ5=üðÃ\~ùåÇÏ=÷\X·ø}úôá'?ù •þõyñŹ馛ê_RÛ,`I’$u™y#°ð½fç"UŸ*W):îóxV—tþùçS©qtÛm·å ƒª{[l±{ï½wÅñË.»¬î9H*o±f' I’¤ÚÉÌYÀ¬fç!-¬µ+ê8`$õÿ>ä̈¸?3¬sÕÈ믿Îï_ùê´ÓO?½bgT­{ì±s¹é¦›xóÍ7Yi¥•:ç7Þ`̘1<õÔS¼ð üûßÿfΜ9¼óÎ;,³Ì2ôîÝ›UVY…þýû³É&›°ùæ›×$nãÇçšk®aüøñ̘1ƒ•VZ‰Aƒ±ÓN;1`À€í=eÊîºë.~øa&MšôŸ_K/½4ýúõcíµ×f“M6aë­·få•W®ÑïH]™,I’$Iu+—ŸiPÈ>Àï#b³Ì\ø…DuB—]v³f•¯½4ˆm·Ý¶a¹ 4ˆÍ7ßœ‡*Ä^~ùåÙf›mØf›mØn»íXvÙeÛ½÷SO=Åe—]ÆÿøGÆ_hmKK Ûl³ ûí·_ùÊWXj©¥Ú•ÃyçÇÈ‘#?ðù½÷ÞˆnÀäÉ“ùö·¿Íï~÷»ÌûÍo~À–[nɱÇËç?ÿùªc¿ûî»\zé¥üú׿æ‘Gaîܹ‹\lµÕVì¿ÿþpÀôíÛ·êxê^,`I’$Iª›Ö#ƒ¿>ÚàÐ~»û Aç÷ÿ÷Ǿño40“’SN9…gŸ}–í¶ÛŽÐÒÒ±ÛwÆŒÃ÷¿ÿ}î¸ãŽŠÇ$eÞ¼yÜ}÷ÝÜ}÷Ýœp œtÒI~øá…s›9s&o¿ýÁºîübÒ›o¾ÉСCyòÉ'ÛÜçïÿ;_øÂ9r$?øÁÚœ;kÖ,Î<óL.¸àÞ|óÍBùf&÷Ýw÷Ýw't'Ÿ|2‡r½zõ*´º>ïÀ’$I’TsQ2¸‡Æ¯æ oRlUiúôéÜ{ï½Ç?÷¹Ï50›’]vÙ…oûÛl¼ñÆ*^Íž=›#<’!C†ð׿þµÝÅ«…Mš4‰áÇ3lØ0f̘Q“=¡T$Ûk¯½Y¼ZÐ;ìÐæøøñã2d'tRáâÕÂÞxã =ôP†Êĉ;´—º X’$I’jªõÈàŸó€ÞMNç̈تÉ9¨ ûÛß*Üpà YsÍ5œQmÌœ9“Ï}îsœþùÌ›7¯.1n¾ùf¾ò•¯Ô¬0vá…r×]wU=íµ×f›m¶©8~ÅW°ùæ›óè£Ö"½ÿ¸÷Þ{}:_þò—yï½÷ÊÆY|ñŹä’K¸ãŽ;ØvÛm+Þ1Ö«W/vÚi'xàÎ=÷ÜŠ—¶ÿûßÿæk_ûZÝŽiªó°€%I’$©Ý"b à! ñ7m7*"¶nvz¿çž{®âØk¬ÑÀLjãÚk¯åùçŸ/;6|øp=ôÐÂ{¶´´ðýïŸï|ç;ç´õ’cQ_ûÚ×xðÁÙu×]Yl±ÿÜêÝ»7Ûo¿=—_~9·ÞzkÙµ§v/¼ðBÙ±¾}ûòÇ?þ‘¯}ík.¬¥¥…#<’+¯¼’Þ½Ë×Çï¸ã.»ì²ªöS×eK’$IRa ¼2x/羫E™ÖŠÍNDÿõÎ;ï”ý<"èß¿ƒ³é¸Ë/¿¼ìç+¯¼2§žzj‡ö>ñÄY|ñÅËŽM˜0¡C{Ï7tèP~ùË_Ò§OŸ6ç•ËcòäÉ\pÁ×üüç?g—]viW^{ï½7çž{nÅñ“N:‰Ù³g·kou °$I’$ÒZº‘Î}d°’Õ)݇U]û‡êjöìÙ_ \b‰%*vÜtV³fÍbôèÑeÇ;ì0–\rÉí¿òÊ+ó‰O|¢ìX¥®§¢~üãW<®·(—\r Ó¦M+;ö¥/}‰ýöÛ¯#©q衇²ë®»–{ñŹþúë;´¿:7 X’$I’ªÖzï üw‘]Ã0à¨f'!*; tܬ«yùå—4h+¯¼òÆöÞ{ïšÄØtÓMË~^©“­ˆM6Ù¤C/úUº¸¾¥¥…3Ï<³ÝûΜsÎ9^rÉ%Ž¡ÎËW%I’$-Ò¯ ž ´}¶¨k88¸ÙIôtmùjoP3­½öÚÜ{ï½L™2…gžy†gžy†—_~™ 6Ø &1V\±ü ØJlETênªÆ+¯¼RñµÀ]vÙ…~ô£íÞ{Aë­·;ï¼3·ÜrËÆFÍ´iÓèׯ_Mb©s±€%I’$©M±,ð+`¯fçRC½QÍN¢§[j©¥*ŽÍ˜1£™ÔÞ²Ë.ËàÁƒ;ÔÑTN¥î£™3gvxï¡C‡¶{íý÷ßOf–;à€Ú½o9tPÙÖ¬Y³¸ï¾ûØyçkOƒG%I’$UÔzdðqºWñj¾7;ž®oß¾´´”ÿ¶´«°êáí·ßæÉ'Ÿ,;öÞ{ïuxÿ¶{íC=TqlÈ!íÞ·œ­·®ü˜èرckK‡X’$I’> T'ôíÛ·ì]XsæÌáÝwßí’waÕÊĉyä‘Gxà¸çž{¸ÿþû™3gN]bõéÓ‡|ä#í^_éÄ•VZ©fÇçëß¿?«®º*¯¼òÊÆžz꩚ÆRçaK’$IÒûDÄ À¥”.;—êªÿþ‹/½ôë­·^ƒ3j¬I“&1až~úéÿüš0a&L`úôé Ëcùå—ïÐú‰'–ý|£6êо• 0 l륗^ªK<5Ÿ,I’$I [ømë¯îlMà¬f'ÑÓ­»îº X/¾øb·*`ýãÿàž{îa̘1Œ7Ž &0iÒ¤f§À +¬Ð¡õ¯¿þzÙÏ—[n¹í[I¥‚[gùóTíYÀ’$I’ô>™ùð@³ó¨·ˆØ XM·îºëV{æ™gØi§˜MíM™2…‹/¾˜Ë/¿œqãÆ5;Š:ÚUéβe–Y¦CûVR©0öî»ïÖ%žšÏ–$I’$©i Pq¬+_È=wî\~úÓŸ2räHÞ|óÍšì¹ÖZk±ä’KV¼È½#z÷îÝ¡õ•^A\zé¥;´o%ýúõ+ûù¼yóêOÍgK’$I’Ô4Ûm·]ű1cÆ40“÷1bÁŽ;îÈ–[nY¨À3cÆ öÝw_n¸á†åлwo6Ûl3vÞyg>ûÙÏ2dÈŽ<òȺ°:ªÒŸO½^ÁÄš¾cIDAT“,wñ?У/ýïî,`I’$I’šfýõ×gå•W.{‡ÒO<Á믿ÎÊ+¯ÜМfÏžÍùçŸÏäÉ“1bK-µŸüä'ùÔ§>ÅŽ;îÈÀiii©¸v—]váî»ï.s¥•VbàÀïû5`À€†2³Ý¿¯zZb‰%Ê~þÎ;ïÔ%Þ”)SÊ~¾ÔRKÕ%žšÏ–$I’$©i"‚¡C‡rõÕW`lîܹüéOâàƒnhN÷Þ{/“'OþÏ?OŸ>›o¾™›o¾™å–[Ž×^{>}ú”];bĈE¯–Zj)vÜqG¶ÞzkȦ›nʪ«®ZUnõާVX¡ì €õ*`-øÿÏ‚êudQÍW¾d,I’$IRƒìµ×^Ç®¹æšfRR®˜6ßî»ï^±xõðÃ3jÔ¨ŠkW]uU.¿ürÞ|óMþøÇ?rì±Çò™Ï|¦êâÀ[o½Uöó¹sçV½G=Tú=<ûì³u‰7a„²Ÿ¯¹æšu‰§æ³€%I’$IjªÝvÛ­bçÌ­·ÞÊO<Ѱ\Þzë-.½ôÒŠãmÛÎ9眊…¤õ×_ŸÇœýöÛ¯âq»j¼ýöÛe?ovkuÖ)ûù³Ï>[1çözë­·xþùçËŽ­½öÚ5¥ÎÖ$I’$©©úöíËž{îYv,39ãŒ3–ËE]ÄôéÓËŽ-³Ì2ì¼óÎeÇfÏžÍ7ÞXv¬¥¥…믿žW\±Ãù½ñÆe?ovkàÀe?ÏÌš¿&ùðÃW¼ l½õÖ«i,u°$I’$IM÷ï|§âØUW]Åßþö·ºç0qâDÎ:묊ãßøÆ7*vO½øâ‹ï{Úm·ÝØpà ;œßŒ3øç?ÿYvlΜ9Þ¿#¶Új«Šc×^{mMcU:Vl»í¶5¥ÎÖ$I’$©é¶Øb >÷¹Ï•›7ox`Ý.‡R§ÐÁ\ñu»¾}ûrôÑGW\_îóù† Òáün¿ýöŠ…ªfw`m´ÑF¬¶ÚjeÇ®¼òÊŠ]mEMŸ>«®ºªì؆nÈ*«¬R“8ê|,`I’$I’:…“N:©âØóÏ?Ï{ìÁŒ3ê{ĈÜzë­Ç¿õ­oµY™4iRű•VZ©C¹Í÷“Ÿü¤âX³ XÁ¿øÅ²cï¼ó?ûÙÏjçç?ÿyÅBæ{ìQ“êœ,`I’$I’:…-·Ü’<°âøßþö7öØc&Ož\Ó¸§œr #Gެ8¾Â +pÌ1Ç´¹G[÷[½øâ‹íÎm¾+®¸‚Ûn»­âx³ XtPűüà?Vë™gžáÄO,;ÖÒÒÂ7¾ñí¯ÎÍ–$I’$©Ó8ï¼óXsÍ5+ŽßvÛm|üãç¾ûîëp¬É“'sÀðƒü ÍyçŸþ"¦­ºêªÇn¸á†Š—ŽWc̘1rÈ!mΙ:u*óæÍkwŒZØtÓM6lXÙ±3fðå/¹ÍNµ¶¼ýöÛì»ï¾¼ûî»eÇ?ÿùϳÖZkµkou °$I’$IÆ2Ë,Ã¥—^JïÞ½+Îyá…Øn»íøÒ—¾Äã?^8ƬY³¸ä’KØxã¹ì²ËÚœ{ÐAñ•¯|e‘{®»îº¬¾úêeÇÆǯ~õ«Âyüå/aÇw¬X¸™oîܹ5ïLkÓN;­âÿw<òC‡mó¾°r^~ùe†ʘ1cÊŽ÷ë×sÎ9§p®êZ,`I’$I’:•¡C‡r饗ÒÒRù[ÖyóæqÍ5×0pà@¶Øb Î>ûlzè!fÍšUvþÔ©SùË_þÂqÇÇG?úQ:è ^~ùå6óØa‡¸ð «Ê9"Ú¼ƒé°Ãþ{wU{ÇqüwÌdј×% î"nH´A©"1näF¥Æ× ‰ÄJ)bUÄ¥ •Ø;½ðÂåB(Jœ/"¸£¡ob‚Æ¥®3Z›’"F4bªFÌŒ'úôâUÑ8“Ídæ˜óý@.æùŸç9 ä—ç]ü\7ÆèÁƒºyó¦NŸ>­S§N… ãRSSåóù¾ðÙ³g3fL›ûí*Û·o—ÇãQaaaغÏçÓæÍ›µsçNeggköìÙ6l˜úô飗/_ª¶¶VçÏŸWaaa«¡ÜÆ9ûÊ!°¶´fÍ 8P«V­’ÏçkÓœP(¤ššÕÔÔtø¾999r»Ýí ¯$iذaÚ¾}{‹;Š‹‹U\\,é×Ç%ûôé£P($ŸÏ×ê§L™¢cÇŽiîܹªªªú¦îñx”‘‘Ñ®ž»B\\œŽ9¢E‹©´´4âuÏŸ?—Ûí–ÛíîÐ}Ö¯_¯Ý»ww´Mü`x„`[ .Ô­[·4iÒ¤.¿WJJŠ<¨‚‚‚v‡WŸlÛ¶­MgfIRCCƒ=z¤ºººÃ+—Ë¥¼¼<]¹rE#FŒÐøñãÃ^WVVÖ‘–»DïÞ½UXXØ%»£µwï^íß¿_qqq¾>ì‰ `kcÆŒ‘ÇãQAAÆ×éëÇÅÅiåÊ•ªªªúîÀŲ,¹ÝnmݺµÅ3¼Ú*++KG;vìPBBÂç±pÎ;§/^|÷=;Kbb¢<¨ãÇkèС²æ´iÓäõzµiÓ¦NY?,€íõèÑC999ª¨¨ÐÉ“'µbÅ ýôÓOߵ怴eËUWWëðáÃ’€Þ¿®ä'ÀàTõ’LóÁ‡ª©©)퀳ÕÔÔD*=#ÀàHƘגj›‡B!•——Ç #p6¯×©ô_,Nv7Ü`III´ûÇká»÷?Xœ¬,Ü`AAA”Ûg ƒ:{öl¤r'ûg¸Á»w窱¬,Ê­€såçç+ÿn IW,c¾9³ò,¯¤ÉÍÇg̘¡‹/ʲ¬tÎá÷û5nÜ8=yò$\ùƘ?° €Ó7xùòe¹Ýîh÷Ž“——)¼’>~G³ €£Y–•(©FÒ æµääd]»vM'NŒ~cà'OžÔÒ¥K!Ÿ:oŒÉ’$v`p4cL£¤¿†«ƒAÍ›7O÷ïßrWÐý]ºtI¹¹¹‘«’þòéH—t=\áñãÇÊÌÌ”×ërKÐ}8qBsçÎÕ›7o"]’oŒ¹õéÇ3Æ|”+É®þäÉMŸ>]»wïVSSSt›€nÄï÷kÆ Z¶lYKáUµ¤?9ÀXð‘eY?K:.).Ò5iiiÊËËÓ²eËär¹¢×üÀ‚Á òóóµk×®–l—~ýGB¦1¦âËA,ø‚eYk$ýM’ÕÒu ÐâÅ‹5gÎ¥§§kĈJJJŠN“`sÁ`P÷îÝÓíÛ·URR¢³gÏ*´6í­¤yƘ²æ,hƲ¬ßK:$‰-VI‹1%ኜÍcKÊ–ô<Ö½€TKʈ^IX–1æI“$ýë^ ›ú 逤ßcÊ[º "0ÆÔcæKúYR‹\Ú弤iƘ?cZ=‹3°  ,˲$ýNÒ%Í“”ÛŽà‡ã“tJÒcÌõöL$À€v²,«¯¤Ù’fIJ“4ZRß?-¾½ I’_Òÿ$Ý“ôoIe’þeŒyÛ‘ÿ œÔvŸ0ZõIEND®B`‚gittuf-0.9.0/docs/roadmap.md000066400000000000000000000116171475150141000157230ustar00rootroot00000000000000# gittuf Roadmap Last Modified: April 24, 2024 This document details gittuf's ongoing roadmap. As gittuf is under active development, this document is not considered immutable, and some items may be added or changed. The items are divided between those that are currently being worked upon, those that are planned for the future, and those that we have already completed. ## Partial / Work in Progress ### Dogfood gittuf Once gittuf achieves sufficient maturity, the gittuf source must be protected using gittuf. This will contribute significantly to the usability and further development of the tool, and will demonstrate its features and viability. ### Support Developer Teams gittuf currently identifies each developer by their signing key or identity. Policies grant permissions to each individual developer. Eventually, gittuf must support declaring teams of developers, with policies granting permissions to those teams as a whole. Further, thresholds on required authorizations for policies must be granular enough to apply across team boundaries. For example, it must be possible to require two members of the development team and one member of the security team to sign off on a change. This is not the same as a total threshold of three across the members of the development and security teams. ### Support For Different Hats (Roles) Related to the concept of teams, is the concept that a single developer might be on different teams and wish to choose how an action is perceived. Suppose Alice is both a maintainer and also on the security team. She sometimes may be approving something she authored (wearing her maintainer hat) and other times doing a security review of a dependency (wearing her security hat). It is reasonable that she may want to control how a statement of trust by her is used. These could naturally be linked to the teams for which a statement should be trusted. ## Planned ### Integrate with Git Ecosystem Git forges like GitHub and GitLab allow repository owners to specify policies such as the developers authorized to push to a branch, the developers who must approve changes to certain files, and more. These repository policies can be specified in gittuf, making conformance with repository policies publicly verifiable. In addition, as gittuf tracks historic policies, auditing repositories hosted on such forges at some older state can be made possible. Another Git-specific tool that gittuf could integrate with is Gerrit, the code review system. This integration, in combination with support for [in-toto attestations](#integrate-in-toto-attestations) would allow for transparent and auditable code review policy enforcement. ### Read Permissions gittuf's design implements _write_ permission policies such as who can write to a Git reference or a file. This must be accompanied by support for _read_ permissions. This needs to be developed further as the feature can range from the ability to store secrets all the way to maintaining encrypted objects for certain Git references so only specific users can read that reference. ### Develop Hash Algorithm Agility Extension The [hash algorithm agility](/docs/extensions/hash-algorithm-agility.md) extension describes how gittuf can be used to maintain a record of object hashes using stronger hash algorithms like SHA-256 while continuing to use SHA-1. While Git is working on SHA-256 support, it is currently not backwards compatible with existing repositories and unsupported by major Git hosts and forges. This feature needs to be fleshed out as the current document merely records some early ideas. ## Reached ### Reach Alpha Milestone The gittuf implementation is built based on the specification defined in the [design document](/docs/design-document.md). Therefore, as features are fleshed out and built, the two are updated together. Currently, gittuf is in a pre-alpha stage. Its core features are still being developed, and therefore the primary item on the roadmap is building gittuf to reach the alpha milestone. The gittuf alpha version must include support for the main design document with features like policies for Git namespaces, file namespaces, key distribution, the Reference State Log, and the ability to sync gittuf metadata with remote repositories. ### Integrate in-toto Attestations [in-toto](https://in-toto.io/) is a framework for comprehensive software supply chain security. Of specific interest to gittuf is in-toto's Attestation Framework that provides a standard way to express software supply chain claims. By integrating support for source control specific in-toto attestations, gittuf can also support verification against requirements specified by projects like [SLSA](https://slsa.dev/). As of April 2024, in-toto attestations can be used in gittuf for actions such as approving a change in the repository. We are actively working upstream with the SLSA project to enable using other predicate types that may be defined as part of the source track. gittuf-0.9.0/docs/testing/000077500000000000000000000000001475150141000154255ustar00rootroot00000000000000gittuf-0.9.0/docs/testing/test-get-started-md.py000066400000000000000000000107761475150141000216100ustar00rootroot00000000000000#!/usr/bin/env python import os import platform import re import shlex import shutil import subprocess import sys import tempfile import difflib import stat REQUIRED_BINARIES = ["git", "gittuf", "ssh-keygen"] SNIPPET_PATTERN = r"```bash\n([\s\S]*?)\n```" if platform.system() == "Windows": EXPECTED_OUTPUT_FILENAME = "tester-expected-win.txt" SHELL = "powershell.exe" else: EXPECTED_OUTPUT_FILENAME = "tester-expected-unix.txt" SHELL = "/bin/bash" GET_STARTED_FILENAME = "get-started.md" # Validate that we have all the binaries required to run the test commands def check_binaries(): for p in REQUIRED_BINARIES: if not shutil.which(p): raise Exception(f"required command {p} not found") # This is required for deleting the directory on Windows def remove_readonly(func, path, _): """Clear the readonly attribute and retry the operation.""" os.chmod(path, stat.S_IWRITE) func(path) # Replacements for some bash commands to work in PowerShell def powershellify(cmds): for i in range(len(cmds)): # chaining commands in PowerShell works with ; instead of &&. cmds[i] = cmds[i].replace("&&", ";") # mkdir generates output in PowerShell, redirecting that to null if "mkdir" in cmds[i]: cmds[i] = re.sub(r'(mkdir\s+[a-zA-Z0-9-]+)', r'\1 > $null', cmds[i]) # quotes work differently in PowerShell, and the -N "" in the # documentation causes ssh-keygen to break down in PowerShell. # This can be solved by encapsulating the "" in single quotes like such: if "ssh-keygen" in cmds[i]: cmds[i] = cmds[i].replace('-N ""', "-N '\"\"'") return cmds def test_commands(): curr_path = os.getcwd() docs_path = os.path.join(curr_path, "docs") testing_path = os.path.join(docs_path, "testing") get_started_file = os.path.realpath(os.path.join(docs_path, GET_STARTED_FILENAME)) os.chdir(testing_path) # Check for supported platform match platform.system(): case "Linux" | "Darwin" | "Windows": expected_output_file = os.path.realpath(os.path.join(testing_path, EXPECTED_OUTPUT_FILENAME)) case _: raise SystemExit("Unknown platform.") # Prepare temporary directory tmp_dir = os.path.realpath(tempfile.mkdtemp()) os.chdir(tmp_dir) try: with open(expected_output_file) as fp1, open(get_started_file) as fp2: # Read in the get_started.md and expected output files expected_output = fp1.read() expected_output = re.sub(r'[\r\n]', '', expected_output) get_started = fp2.read() snippets = re.findall(SNIPPET_PATTERN, get_started) # Prepend the set command to echo commands and exit in case of # failure if platform.system() == "Windows": snippets = powershellify(snippets) script = "\nSet-PSDebug -Trace 1\n {}".format("\n".join(snippets)) else: script = "\nset -xe\n{}".format("\n".join(snippets)) script += "\ngittuf verify-ref main" # Workaround for non-deterministic hashes # Set some environment variables to control commit creation cmd_env = os.environ.copy() cmd_env["GIT_AUTHOR_NAME"] = "Jane Doe" cmd_env["GIT_AUTHOR_EMAIL"] = "jane.doe@example.com" cmd_env["GIT_AUTHOR_DATE"] = "2024-06-03T14:00:00.000Z" cmd_env["GIT_COMMITTER_NAME"] = "Jane Doe" cmd_env["GIT_COMMITTER_EMAIL"] = "jane.doe@example.com" cmd_env["GIT_COMMITTER_DATE"] = "2024-06-03T14:00:00.000Z" # Execute generated script proc = subprocess.Popen( [SHELL, "-c", script], stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, env=cmd_env) stdout, _ = proc.communicate() stdout = re.sub(r'[\r\n]', '', stdout) # Compare and notify user of result if stdout != expected_output: difflist = list(difflib.Differ().compare( expected_output.splitlines(), stdout.splitlines())) raise SystemExit("Testing failed due to unexpected output:\n {}".format("\n".join(difflist))) else: print("Testing completed successfully.") finally: # Cleanup os.chdir(curr_path) shutil.rmtree(tmp_dir, onerror=remove_readonly) if __name__ == "__main__": check_binaries() test_commands() gittuf-0.9.0/docs/testing/tester-expected-unix.txt000066400000000000000000000016631475150141000222620ustar00rootroot00000000000000+ mkdir gittuf-get-started + cd gittuf-get-started + mkdir keys + cd keys + ssh-keygen -q -t ecdsa -N '' -f root + ssh-keygen -q -t ecdsa -N '' -f policy + ssh-keygen -q -t ecdsa -N '' -f developer + cd .. + mkdir repo + cd repo + git init -q -b main + git config --local gpg.format ssh + git config --local user.signingkey ../keys/developer + gittuf trust init -k ../keys/root + gittuf trust add-policy-key -k ../keys/root --policy-key ../keys/policy.pub + gittuf policy init -k ../keys/policy --policy-name targets + gittuf policy add-key -k ../keys/policy --public-key ../keys/developer.pub + gittuf policy add-rule -k ../keys/policy --rule-name protect-main --rule-pattern git:refs/heads/main --authorize-key ../keys/developer.pub Flag --authorize-key has been deprecated, use --authorize instead + gittuf policy apply + echo 'Hello, world!' + git add . + git commit -q -S -m 'Initial commit' + gittuf rsl record main + gittuf verify-ref main gittuf-0.9.0/docs/testing/tester-expected-win.txt000066400000000000000000000067211475150141000220740ustar00rootroot00000000000000DEBUG: 3+ >>>> mkdir gittuf-get-started > $null ; cd gittuf-get-started DEBUG: 36+ begin >>>> { DEBUG: 39+ >>>> $wrappedCmd = $ExecutionContext.InvokeCommand.GetCommand('New-Item', [System.Management.Automation.CommandTypes]::Cmdlet) DEBUG: 40+ >>>> $scriptCmd = {& $wrappedCmd -Type Directory @PSBoundParameters } DEBUG: 41+ >>>> $steppablePipeline = $scriptCmd.GetSteppablePipeline() DEBUG: 42+ >>>> $steppablePipeline.Begin($PSCmdlet) DEBUG: 47+ >>>> } DEBUG: 49+ process >>>> { DEBUG: 52+ >>>> $steppablePipeline.Process($_) DEBUG: 57+ >>>> } DEBUG: 59+ end >>>> { DEBUG: 62+ >>>> $steppablePipeline.End() DEBUG: 67+ >>>> } DEBUG: 3+ mkdir gittuf-get-started > $null ; >>>> cd gittuf-get-started DEBUG: 4+ >>>> mkdir keys > $null ; cd keys DEBUG: 36+ begin >>>> { DEBUG: 39+ >>>> $wrappedCmd = $ExecutionContext.InvokeCommand.GetCommand('New-Item', [System.Management.Automation.CommandTypes]::Cmdlet) DEBUG: 40+ >>>> $scriptCmd = {& $wrappedCmd -Type Directory @PSBoundParameters } DEBUG: 41+ >>>> $steppablePipeline = $scriptCmd.GetSteppablePipeline() DEBUG: 42+ >>>> $steppablePipeline.Begin($PSCmdlet) DEBUG: 47+ >>>> } DEBUG: 49+ process >>>> { DEBUG: 52+ >>>> $steppablePipeline.Process($_) DEBUG: 57+ >>>> } DEBUG: 59+ end >>>> { DEBUG: 62+ >>>> $steppablePipeline.End() DEBUG: 67+ >>>> } DEBUG: 4+ mkdir keys > $null ; >>>> cd keys DEBUG: 5+ >>>> ssh-keygen -q -t ecdsa -N '""' -f root DEBUG: 6+ >>>> ssh-keygen -q -t ecdsa -N '""' -f policy DEBUG: 7+ >>>> ssh-keygen -q -t ecdsa -N '""' -f developer DEBUG: 8+ >>>> cd .. ; mkdir repo > $null ; cd repo DEBUG: 8+ cd .. ; >>>> mkdir repo > $null ; cd repo DEBUG: 36+ begin >>>> { DEBUG: 39+ >>>> $wrappedCmd = $ExecutionContext.InvokeCommand.GetCommand('New-Item', [System.Management.Automation.CommandTypes]::Cmdlet) DEBUG: 40+ >>>> $scriptCmd = {& $wrappedCmd -Type Directory @PSBoundParameters } DEBUG: 41+ >>>> $steppablePipeline = $scriptCmd.GetSteppablePipeline() DEBUG: 42+ >>>> $steppablePipeline.Begin($PSCmdlet) DEBUG: 47+ >>>> } DEBUG: 49+ process >>>> { DEBUG: 52+ >>>> $steppablePipeline.Process($_) DEBUG: 57+ >>>> } DEBUG: 59+ end >>>> { DEBUG: 62+ >>>> $steppablePipeline.End() DEBUG: 67+ >>>> } DEBUG: 8+ cd .. ; mkdir repo > $null ; >>>> cd repo DEBUG: 9+ >>>> git init -q -b main DEBUG: 10+ >>>> git config --local gpg.format ssh DEBUG: 11+ >>>> git config --local user.signingkey ../keys/developer DEBUG: 12+ >>>> gittuf trust init -k ../keys/root DEBUG: 13+ >>>> gittuf trust add-policy-key -k ../keys/root --policy-key ../keys/policy.pub DEBUG: 14+ >>>> gittuf policy init -k ../keys/policy --policy-name targets DEBUG: 15+ >>>> gittuf policy add-key -k ../keys/policy --public-key ../keys/developer.pub DEBUG: 16+ >>>> gittuf policy add-rule -k ../keys/policy --rule-name protect-main --rule-pattern git:refs/heads/main --authorize-key ../keys/developer.pub Flag --authorize-key has been deprecated, use --authorize instead DEBUG: 17+ >>>> gittuf policy apply DEBUG: 18+ >>>> echo "Hello, world!" > README.md DEBUG: 19+ >>>> git add . ; git commit -q -S -m "Initial commit" DEBUG: 19+ git add . ; >>>> git commit -q -S -m "Initial commit" DEBUG: 20+ >>>> gittuf rsl record main DEBUG: 21+ >>>> gittuf verify-ref maingittuf-0.9.0/experimental/000077500000000000000000000000001475150141000155155ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/000077500000000000000000000000001475150141000170175ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/attestations.go000066400000000000000000000552661475150141000221060ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "encoding/json" "errors" "fmt" "log/slog" "os" "strings" githubopts "github.com/gittuf/gittuf/experimental/gittuf/options/github" "github.com/gittuf/gittuf/internal/attestations" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/attestations/github" githubv01 "github.com/gittuf/gittuf/internal/attestations/github/v01" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" "github.com/go-git/go-git/v5/plumbing" gogithub "github.com/google/go-github/v61/github" ita "github.com/in-toto/attestation/go/v1" ) const githubTokenEnvKey = "GITHUB_TOKEN" //nolint:gosec var ( ErrNotSigningKey = errors.New("expected signing key") ErrNoGitHubToken = errors.New("authentication token for GitHub API not provided") ) var githubClient *gogithub.Client // AddReferenceAuthorization adds a reference authorization attestation to the // repository for the specified target ref. The from ID is identified using the // last RSL entry for the target ref. The to ID is that of the expected Git tree // created by merging the feature ref into the target ref. The commit used to // calculate the merge tree ID is identified using the RSL for the feature ref. func (r *Repository) AddReferenceAuthorization(ctx context.Context, signer sslibdsse.SignerVerifier, targetRef, featureRef string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } var err error targetRef, err = r.r.AbsoluteReference(targetRef) if err != nil { return err } featureRef, err = r.r.AbsoluteReference(featureRef) if err != nil { return err } var ( fromID gitinterface.Hash featureCommitID gitinterface.Hash toID gitinterface.Hash ) isTag := false if strings.HasPrefix(targetRef, gitinterface.TagRefPrefix) { isTag = true } slog.Debug("Identifying current status of target Git reference...") latestTargetEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.r, rsl.ForReference(targetRef)) if err == nil { if isTag { return fmt.Errorf("cannot approve a tag that already exists: %w", gitinterface.ErrTagAlreadyExists) } fromID = latestTargetEntry.GetTargetID() } else { if !errors.Is(err, rsl.ErrRSLEntryNotFound) { return err } fromID = gitinterface.ZeroHash } slog.Debug("Identifying current status of feature Git reference...") latestFeatureEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.r, rsl.ForReference(featureRef)) if err != nil { // We don't have an RSL entry for the feature ref to use to approve the // merge return err } featureCommitID = latestFeatureEntry.GetTargetID() if isTag { // for tags, the toID is the commitID the tag will point to toID = featureCommitID } else { slog.Debug("Computing expected merge tree...") mergeTreeID, err := r.r.GetMergeTree(fromID, featureCommitID) if err != nil { return err } toID = mergeTreeID } slog.Debug("Loading current set of attestations...") allAttestations, err := attestations.LoadCurrentAttestations(r.r) if err != nil { return err } // Does a reference authorization already exist for the parameters? hasAuthorization := false env, err := allAttestations.GetReferenceAuthorizationFor(r.r, targetRef, fromID.String(), toID.String()) if err == nil { slog.Debug("Found existing reference authorization...") hasAuthorization = true } else if !errors.Is(err, authorizations.ErrAuthorizationNotFound) { return err } if !hasAuthorization { // Create a new reference authorization and embed in env slog.Debug("Creating new reference authorization...") var statement *ita.Statement if isTag { statement, err = attestations.NewReferenceAuthorizationForTag(targetRef, fromID.String(), toID.String()) } else { statement, err = attestations.NewReferenceAuthorizationForCommit(targetRef, fromID.String(), toID.String()) } if err != nil { return err } env, err = dsse.CreateEnvelope(statement) if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug(fmt.Sprintf("Signing reference authorization using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if err := allAttestations.SetReferenceAuthorization(r.r, env, targetRef, fromID.String(), toID.String()); err != nil { return err } commitMessage := fmt.Sprintf("Add reference authorization for '%s' from '%s' to '%s'", targetRef, fromID, toID) if isTag { commitMessage = fmt.Sprintf("Add reference authorization for '%s' at '%s'", targetRef, toID.String()) } slog.Debug("Committing attestations...") return allAttestations.Commit(r.r, commitMessage, signCommit) } // RemoveReferenceAuthorization removes a previously issued authorization for // the specified parameters. The issuer of the authorization is identified using // their key. func (r *Repository) RemoveReferenceAuthorization(ctx context.Context, signer sslibdsse.SignerVerifier, targetRef, fromID, toID string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } // Ensure only the key that created a reference authorization can remove it slog.Debug("Evaluating if key can sign...") _, err := signer.Sign(ctx, nil) if err != nil { return errors.Join(ErrNotSigningKey, err) } keyID, err := signer.KeyID() if err != nil { return err } targetRef, err = r.r.AbsoluteReference(targetRef) if err != nil { return err } slog.Debug("Loading current set of attestations...") allAttestations, err := attestations.LoadCurrentAttestations(r.r) if err != nil { return err } slog.Debug("Loading reference authorization...") env, err := allAttestations.GetReferenceAuthorizationFor(r.r, targetRef, fromID, toID) if err != nil { if errors.Is(err, authorizations.ErrAuthorizationNotFound) { // No reference authorization at all return nil } return err } slog.Debug("Removing signature...") newSignatures := []sslibdsse.Signature{} for _, signature := range env.Signatures { // This handles cases where the envelope may unintentionally have // multiple signatures from the same key if signature.KeyID != keyID { newSignatures = append(newSignatures, signature) } } if len(newSignatures) == 0 { // No signatures, we can remove the ReferenceAuthorization altogether if err := allAttestations.RemoveReferenceAuthorization(targetRef, fromID, toID); err != nil { return err } } else { // We still have other signatures, so set the ReferenceAuthorization // envelope env.Signatures = newSignatures if err := allAttestations.SetReferenceAuthorization(r.r, env, targetRef, fromID, toID); err != nil { return err } } commitMessage := fmt.Sprintf("Remove reference authorization for '%s' from '%s' to '%s' by '%s'", targetRef, fromID, toID, keyID) slog.Debug("Committing attestations...") return allAttestations.Commit(r.r, commitMessage, signCommit) } // AddGitHubPullRequestAttestationForCommit identifies the pull request for a // specified commit ID and triggers AddGitHubPullRequestAttestationForNumber for // that pull request. The authentication token for the GitHub API can be passed // in as an option. If it is not, it is read from the GITHUB_TOKEN environment // variable. A custom GitHub instance can be specified via opts. func (r *Repository) AddGitHubPullRequestAttestationForCommit(ctx context.Context, signer sslibdsse.SignerVerifier, owner, repository, commitID, baseBranch string, signCommit bool, opts ...githubopts.Option) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } options := githubopts.DefaultOptions for _, fn := range opts { fn(options) } if options.GitHubToken == "" { options.GitHubToken = os.Getenv(githubTokenEnvKey) if options.GitHubToken == "" { // still empty return ErrNoGitHubToken } } client, err := getGitHubClient(options.GitHubBaseURL, options.GitHubToken) if err != nil { return err } slog.Debug("Identifying GitHub pull requests for commit...") pullRequests, _, err := client.PullRequests.ListPullRequestsWithCommit(ctx, owner, repository, commitID, nil) if err != nil { return err } baseBranch, err = r.r.AbsoluteReference(baseBranch) if err != nil { return err } for _, pullRequest := range pullRequests { slog.Debug(fmt.Sprintf("Inspecting GitHub pull request %d...", *pullRequest.Number)) pullRequestBranch := plumbing.NewBranchReferenceName(*pullRequest.Base.Ref).String() // pullRequest.Merged is not set on this endpoint for some reason if pullRequest.MergedAt != nil && pullRequestBranch == baseBranch { return r.addGitHubPullRequestAttestation(ctx, signer, options.GitHubBaseURL, owner, repository, pullRequest, signCommit) } } return fmt.Errorf("pull request not found for commit") } // AddGitHubPullRequestAttestationForNumber wraps the API response for the // specified pull request in an in-toto attestation. `pullRequestID` must be the // number of the pull request. The authentication token for the GitHub API can // be passed in as an option. If it is not passed in, it is read from the // GITHUB_TOKEN environment variable. A custom GitHub instance can be specified // via opts. func (r *Repository) AddGitHubPullRequestAttestationForNumber(ctx context.Context, signer sslibdsse.SignerVerifier, owner, repository string, pullRequestNumber int, signCommit bool, opts ...githubopts.Option) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } options := githubopts.DefaultOptions for _, fn := range opts { fn(options) } if options.GitHubToken == "" { options.GitHubToken = os.Getenv(githubTokenEnvKey) if options.GitHubToken == "" { // still empty return ErrNoGitHubToken } } client, err := getGitHubClient(options.GitHubBaseURL, options.GitHubToken) if err != nil { return err } slog.Debug(fmt.Sprintf("Inspecting GitHub pull request %d...", pullRequestNumber)) pullRequest, _, err := client.PullRequests.Get(ctx, owner, repository, pullRequestNumber) if err != nil { return err } return r.addGitHubPullRequestAttestation(ctx, signer, options.GitHubBaseURL, owner, repository, pullRequest, signCommit) } // AddGitHubPullRequestApprover adds a GitHub pull request approval attestation // for the specified parameters. If an attestation already exists, the specified // approver is added to the existing attestation's predicate and it is re-signed // and stored in the repository. To find the review information, the GitHub API // is used and the authentication token for the API is passed in as an option. // If the token is not passed in, it's read from the GITHUB_TOKEN environment // variable. A custom GitHub instance can be specified via opts. Currently, this // is limited to developer mode. func (r *Repository) AddGitHubPullRequestApprover(ctx context.Context, signer sslibdsse.SignerVerifier, owner, repository string, pullRequestNumber int, reviewID int64, approver string, signCommit bool, opts ...githubopts.Option) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } options := githubopts.DefaultOptions for _, fn := range opts { fn(options) } if options.GitHubToken == "" { options.GitHubToken = os.Getenv(githubTokenEnvKey) if options.GitHubToken == "" { // still empty return ErrNoGitHubToken } } currentAttestations, err := attestations.LoadCurrentAttestations(r.r) if err != nil { return err } keyID, err := signer.KeyID() if err != nil { return err } appName := tuf.GitHubAppRoleName // TODO: make this configurable, check appName's key matches signer baseRef, fromID, toID, err := getGitHubPullRequestReviewDetails(ctx, currentAttestations, options.GitHubBaseURL, options.GitHubToken, owner, repository, pullRequestNumber, reviewID) if err != nil { return err } // TODO: if the helper above has an indexPath, we can directly load that blob, simplifying the logic here hasApprovalAttestation := false env, err := currentAttestations.GetGitHubPullRequestApprovalAttestationFor(r.r, appName, baseRef, fromID, toID) if err == nil { slog.Debug("Found existing GitHub pull request approval attestation...") hasApprovalAttestation = true } else if !errors.Is(err, github.ErrPullRequestApprovalAttestationNotFound) { return err } approvers := []string{approver} var dismissedApprovers []string if !hasApprovalAttestation { // Create a new GitHub pull request approval attestation slog.Debug("Creating new GitHub pull request approval attestation...") } else { // Update existing statement's predicate and create new env slog.Debug("Adding approver to existing GitHub pull request approval attestation...") predicate, err := getGitHubPullRequestApprovalPredicateFromEnvelope(env) if err != nil { return err } approvers = append(approvers, predicate.GetApprovers()...) dismissedApprovers = predicate.GetDismissedApprovers() } statement, err := attestations.NewGitHubPullRequestApprovalAttestation(baseRef, fromID, toID, approvers, dismissedApprovers) if err != nil { return err } env, err = dsse.CreateEnvelope(statement) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing GitHub pull request approval attestation using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(r.r, env, options.GitHubBaseURL, reviewID, appName, baseRef, fromID, toID); err != nil { return err } commitMessage := fmt.Sprintf("Add GitHub pull request approval for '%s' from '%s' to '%s' (review ID %d) for approval by '%s'", baseRef, fromID, toID, reviewID, approver) slog.Debug("Committing attestations...") return currentAttestations.Commit(r.r, commitMessage, signCommit) } // DismissGitHubPullRequestApprover removes an approver from the GitHub pull // request approval attestation for the specified parameters. A custom GitHub // instance can be specified via opts. func (r *Repository) DismissGitHubPullRequestApprover(ctx context.Context, signer sslibdsse.SignerVerifier, reviewID int64, dismissedApprover string, signCommit bool, opts ...githubopts.Option) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } options := githubopts.DefaultOptions for _, fn := range opts { fn(options) } // We don't hit the GitHub API for this flow, so no need to check the token // option currentAttestations, err := attestations.LoadCurrentAttestations(r.r) if err != nil { return err } keyID, err := signer.KeyID() if err != nil { return err } appName := tuf.GitHubAppRoleName env, err := currentAttestations.GetGitHubPullRequestApprovalAttestationForReviewID(r.r, options.GitHubBaseURL, reviewID, appName) if err != nil { return err } // Update existing statement's predicate and create new env slog.Debug("Updating existing GitHub pull request approval attestation...") predicate, err := getGitHubPullRequestApprovalPredicateFromEnvelope(env) if err != nil { return err } dismissedApprovers := []string{dismissedApprover} dismissedApprovers = append(dismissedApprovers, predicate.GetDismissedApprovers()...) approvers := make([]string, 0, len(predicate.GetApprovers())) for _, approver := range predicate.GetApprovers() { approver := approver if approver == dismissedApprover { continue } approvers = append(approvers, approver) } baseRef := predicate.GetRef() fromID := predicate.GetFromID() toID := predicate.GetTargetID() statement, err := attestations.NewGitHubPullRequestApprovalAttestation(baseRef, fromID, toID, approvers, dismissedApprovers) if err != nil { return err } env, err = dsse.CreateEnvelope(statement) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing GitHub pull request approval attestation using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(r.r, env, options.GitHubBaseURL, reviewID, appName, baseRef, fromID, toID); err != nil { return err } commitMessage := fmt.Sprintf("Dismiss GitHub pull request approval for '%s' from '%s' to '%s' (review ID %d) for approval by '%s'", baseRef, fromID, toID, reviewID, dismissedApprover) slog.Debug("Committing attestations...") return currentAttestations.Commit(r.r, commitMessage, signCommit) } func (r *Repository) addGitHubPullRequestAttestation(ctx context.Context, signer sslibdsse.SignerVerifier, githubBaseURL, owner, repository string, pullRequest *gogithub.PullRequest, signCommit bool) error { var ( targetRef string targetCommitID string ) if pullRequest.MergedAt == nil { // not yet merged targetRef = fmt.Sprintf("%s-%d/refs/heads/%s", *pullRequest.Head.User.Login, *pullRequest.Head.User.ID, *pullRequest.Head.Ref) targetCommitID = *pullRequest.Head.SHA } else { // merged targetRef = fmt.Sprintf("%s-%d/refs/heads/%s", *pullRequest.Base.User.Login, *pullRequest.Base.User.ID, *pullRequest.Base.Ref) targetCommitID = *pullRequest.MergeCommitSHA } slog.Debug("Creating GitHub pull request attestation...") statement, err := attestations.NewGitHubPullRequestAttestation(owner, repository, *pullRequest.Number, targetCommitID, pullRequest) if err != nil { return err } env, err := dsse.CreateEnvelope(statement) if err != nil { return err } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug(fmt.Sprintf("Signing GitHub pull request attestation using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } allAttestations, err := attestations.LoadCurrentAttestations(r.r) if err != nil { return err } if err := allAttestations.SetGitHubPullRequestAuthorization(r.r, env, targetRef, targetCommitID); err != nil { return err } commitMessage := fmt.Sprintf("Add GitHub pull request attestation for '%s' at '%s'\n\nSource: %s/%s/%s/pull/%d\n", targetRef, targetCommitID, strings.TrimSuffix(githubBaseURL, "/"), owner, repository, *pullRequest.Number) slog.Debug("Committing attestations...") return allAttestations.Commit(r.r, commitMessage, signCommit) } func getGitHubPullRequestApprovalPredicateFromEnvelope(env *sslibdsse.Envelope) (github.PullRequestApprovalAttestation, error) { payloadBytes, err := env.DecodeB64Payload() if err != nil { return nil, err } // TODO: support multiple versions here // tmpGitHubPullRequestApprovalStatement is essentially a definition of // in-toto's v1 Statement. The difference is that we fix the predicate to be // the GitHub pull request approval type, making unmarshalling easier. type tmpGitHubPullRequestApprovalStatement struct { Type string `json:"_type"` Subject []*ita.ResourceDescriptor `json:"subject"` PredicateType string `json:"predicateType"` Predicate *githubv01.PullRequestApprovalAttestation `json:"predicate"` } stmt := new(tmpGitHubPullRequestApprovalStatement) if err := json.Unmarshal(payloadBytes, stmt); err != nil { return nil, err } return stmt.Predicate, nil } func indexPathToComponents(indexPath string) (string, string, string) { components := strings.Split(indexPath, "/") fromTo := strings.Split(components[len(components)-2], "-") components = components[:len(components)-2] // remove last two items which are from-to and system base := strings.Join(components, "/") // reconstruct ref from := fromTo[0] to := fromTo[1] return base, from, to } func getGitHubPullRequestReviewDetails(ctx context.Context, currentAttestations *attestations.Attestations, githubBaseURL, githubToken, owner, repository string, pullRequestNumber int, reviewID int64) (string, string, string, error) { indexPath, has, err := currentAttestations.GetGitHubPullRequestApprovalIndexPathForReviewID(githubBaseURL, reviewID) if err != nil { return "", "", "", err } if has { base, from, to := indexPathToComponents(indexPath) return base, from, to, nil } // Compute details for review, this is when the review is first created as // other times we use the existing indexPath for the reviewID // Note: there's the potential for a TOCTOU issue here, we may query the // repo after things have moved in either branch. client, err := getGitHubClient(githubBaseURL, githubToken) if err != nil { return "", "", "", err } pullRequest, _, err := client.PullRequests.Get(ctx, owner, repository, pullRequestNumber) if err != nil { return "", "", "", err } if _, _, err := client.PullRequests.GetReview(ctx, owner, repository, pullRequestNumber, reviewID); err != nil { // testing validity of reviewID for the pull request in question return "", "", "", err } baseRef := gitinterface.BranchReferenceName(*pullRequest.Base.Ref) referenceDetails, _, err := client.Git.GetRef(ctx, owner, repository, baseRef) if err != nil { return "", "", "", err } fromID := *referenceDetails.Object.SHA // current tip of base ref // GitHub has already computed a merge commit, use that tree ID as target // tree ID commit, _, err := client.Git.GetCommit(ctx, owner, repository, *pullRequest.MergeCommitSHA) if err != nil { return "", "", "", err } toID := *commit.Tree.SHA return baseRef, fromID, toID, nil } // getGitHubClient creates a client to interact with a GitHub instance. If a // base URL other than https://github.com is supplied, the client is configured // to interact with the specified enterprise instance. func getGitHubClient(baseURL, githubToken string) (*gogithub.Client, error) { if githubClient == nil { githubClient = gogithub.NewClient(nil).WithAuthToken(githubToken) } if baseURL != githubopts.DefaultGitHubBaseURL { baseURL = strings.TrimSuffix(baseURL, "/") endpointAPI := fmt.Sprintf("%s/%s/%s/", baseURL, "api", "v3") endpointUpload := fmt.Sprintf("%s/%s/%s/", baseURL, "api", "uploads") var err error githubClient, err = githubClient.WithEnterpriseURLs(endpointAPI, endpointUpload) if err != nil { return nil, err } } return githubClient, nil } gittuf-0.9.0/experimental/gittuf/attestations_test.go000066400000000000000000000323761475150141000231420ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "fmt" "os" "strings" "testing" "github.com/gittuf/gittuf/internal/attestations" "github.com/gittuf/gittuf/internal/attestations/authorizations" authorizationsv01 "github.com/gittuf/gittuf/internal/attestations/authorizations/v01" githubv01 "github.com/gittuf/gittuf/internal/attestations/github/v01" "github.com/gittuf/gittuf/internal/common" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/gitinterface" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/stretchr/testify/assert" ) func TestAddAndRemoveReferenceAuthorization(t *testing.T) { t.Run("for commit", func(t *testing.T) { testDir := t.TempDir() r := gitinterface.CreateTestGitRepository(t, testDir, false) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(testDir); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck repo := &Repository{r: r} targetRef := "main" absTargetRef := "refs/heads/main" featureRef := "feature" absFeatureRef := "refs/heads/feature" // Create common base for main and feature branches treeBuilder := gitinterface.NewTreeBuilder(repo.r) emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } initialCommitID, err := repo.r.Commit(emptyTreeID, absTargetRef, "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.r.SetReference(absFeatureRef, initialCommitID); err != nil { t.Fatal(err) } // Create main branch as the target branch with a Git commit // Add a single commit commitIDs := common.AddNTestCommitsToSpecifiedRef(t, r, absTargetRef, 1, gpgKeyBytes) fromCommitID := commitIDs[0] if err := repo.RecordRSLEntryForReference(testCtx, targetRef, false); err != nil { t.Fatal(err) } // Create feature branch with two Git commits // Add two commits commitIDs = common.AddNTestCommitsToSpecifiedRef(t, r, absFeatureRef, 2, gpgKeyBytes) featureCommitID := commitIDs[1] if err := repo.RecordRSLEntryForReference(testCtx, featureRef, false); err != nil { t.Fatal(err) } targetTreeID, err := r.GetMergeTree(fromCommitID, featureCommitID) if err != nil { t.Fatal(err) } // Create signers firstSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) firstKeyID, err := firstSigner.KeyID() if err != nil { t.Fatal(err) } secondSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) secondKeyID, err := secondSigner.KeyID() if err != nil { t.Fatal(err) } // First authorization attestation signature err = repo.AddReferenceAuthorization(testCtx, firstSigner, absTargetRef, absFeatureRef, false) assert.Nil(t, err) allAttestations, err := attestations.LoadCurrentAttestations(r) if err != nil { t.Fatal(err) } env, err := allAttestations.GetReferenceAuthorizationFor(r, absTargetRef, fromCommitID.String(), targetTreeID.String()) if err != nil { t.Fatal(err) } assert.Len(t, env.Signatures, 1) assert.Equal(t, firstKeyID, env.Signatures[0].KeyID) // Second authorization attestation signature err = repo.AddReferenceAuthorization(testCtx, secondSigner, absTargetRef, absFeatureRef, false) assert.Nil(t, err) allAttestations, err = attestations.LoadCurrentAttestations(r) if err != nil { t.Fatal(err) } env, err = allAttestations.GetReferenceAuthorizationFor(r, absTargetRef, fromCommitID.String(), targetTreeID.String()) if err != nil { t.Fatal(err) } assert.Len(t, env.Signatures, 2) assert.Equal(t, firstKeyID, env.Signatures[0].KeyID) assert.Equal(t, secondKeyID, env.Signatures[1].KeyID) // Remove second authorization attestation signature err = repo.RemoveReferenceAuthorization(testCtx, secondSigner, absTargetRef, fromCommitID.String(), targetTreeID.String(), false) assert.Nil(t, err) allAttestations, err = attestations.LoadCurrentAttestations(r) if err != nil { t.Fatal(err) } env, err = allAttestations.GetReferenceAuthorizationFor(r, absTargetRef, fromCommitID.String(), targetTreeID.String()) if err != nil { t.Fatal(err) } assert.Len(t, env.Signatures, 1) assert.Equal(t, firstKeyID, env.Signatures[0].KeyID) }) t.Run("for tag", func(t *testing.T) { testDir := t.TempDir() r := gitinterface.CreateTestGitRepository(t, testDir, false) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(testDir); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck repo := &Repository{r: r} fromRef := "refs/heads/main" targetTagRef := "refs/tags/v1" // Create common base for main and feature branches treeBuilder := gitinterface.NewTreeBuilder(repo.r) emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } initialCommitID, err := repo.r.Commit(emptyTreeID, fromRef, "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.RecordRSLEntryForReference(testCtx, fromRef, false); err != nil { t.Fatal(err) } // Create signer signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) keyID, err := signer.KeyID() if err != nil { t.Fatal(err) } err = repo.AddReferenceAuthorization(testCtx, signer, targetTagRef, fromRef, false) assert.Nil(t, err) allAttestations, err := attestations.LoadCurrentAttestations(r) if err != nil { t.Fatal(err) } env, err := allAttestations.GetReferenceAuthorizationFor(repo.r, targetTagRef, gitinterface.ZeroHash.String(), initialCommitID.String()) assert.Nil(t, err) assert.Len(t, env.Signatures, 1) assert.Equal(t, keyID, env.Signatures[0].KeyID) // Create tag _, err = repo.r.TagUsingSpecificKey(initialCommitID, strings.TrimPrefix(targetTagRef, gitinterface.TagRefPrefix), "v1", artifacts.SSHRSAPrivate) if err != nil { t.Fatal(err) } // Add it to RSL if err := repo.RecordRSLEntryForReference(testCtx, targetTagRef, false); err != nil { t.Fatal(err) } // Trying to approve it now fails as we're approving a tag already seen in the RSL err = repo.AddReferenceAuthorization(testCtx, signer, targetTagRef, fromRef, false) assert.ErrorIs(t, err, gitinterface.ErrTagAlreadyExists) err = repo.RemoveReferenceAuthorization(testCtx, signer, targetTagRef, gitinterface.ZeroHash.String(), initialCommitID.String(), false) assert.Nil(t, err) allAttestations, err = attestations.LoadCurrentAttestations(r) if err != nil { t.Fatal(err) } _, err = allAttestations.GetReferenceAuthorizationFor(repo.r, targetTagRef, gitinterface.ZeroHash.String(), initialCommitID.String()) assert.ErrorIs(t, err, authorizations.ErrAuthorizationNotFound) }) } func TestGetGitHubPullRequestApprovalPredicateFromEnvelope(t *testing.T) { tests := map[string]struct { envelope *dsse.Envelope expectedPredicate *githubv01.PullRequestApprovalAttestation }{ "one approver, no dismissals": { envelope: &dsse.Envelope{ PayloadType: "application/vnd.gittuf+json", Payload: "eyJ0eXBlIjoiaHR0cHM6Ly9pbi10b3RvLmlvL1N0YXRlbWVudC92MSIsInN1YmplY3QiOlt7ImRpZ2VzdCI6eyJnaXRUcmVlIjoiZWUyNWIxYjZjMjc4NjJlYTFjYzQxOWMxNDQxMjcxMjNmZDZmNDdkMyJ9fV0sInByZWRpY2F0ZV90eXBlIjoiaHR0cHM6Ly9naXR0dWYuZGV2L2dpdGh1Yi1wdWxsLXJlcXVlc3QtYXBwcm92YWwvdjAuMSIsInByZWRpY2F0ZSI6eyJhcHByb3ZlcnMiOlsiYWxpY2UiXSwiZGlzbWlzc2VkQXBwcm92ZXJzIjpudWxsLCJmcm9tUmV2aXNpb25JRCI6IjJmNTkzZTMxOTVhNTk5ODM0MjNmNDVmZTZkNDMzNWYxNDhmZmVlY2YiLCJ0YXJnZXRSZWYiOiJyZWZzL2hlYWRzL21haW4iLCJ0YXJnZXRUcmVlSUQiOiJlZTI1YjFiNmMyNzg2MmVhMWNjNDE5YzE0NDEyNzEyM2ZkNmY0N2QzIn19Cg==", Signatures: []dsse.Signature{ { KeyID: "kid", Sig: "sig", }, }, }, expectedPredicate: &githubv01.PullRequestApprovalAttestation{ Approvers: set.NewSetFromItems("alice"), ReferenceAuthorization: &authorizationsv01.ReferenceAuthorization{ FromRevisionID: "2f593e3195a59983423f45fe6d4335f148ffeecf", TargetRef: "refs/heads/main", TargetTreeID: "ee25b1b6c27862ea1cc419c144127123fd6f47d3", }, }, }, "one approver, one dismissal": { envelope: &dsse.Envelope{ PayloadType: "application/vnd.gittuf+json", Payload: "eyJ0eXBlIjoiaHR0cHM6Ly9pbi10b3RvLmlvL1N0YXRlbWVudC92MSIsInN1YmplY3QiOlt7ImRpZ2VzdCI6eyJnaXRUcmVlIjoiZWUyNWIxYjZjMjc4NjJlYTFjYzQxOWMxNDQxMjcxMjNmZDZmNDdkMyJ9fV0sInByZWRpY2F0ZV90eXBlIjoiaHR0cHM6Ly9naXR0dWYuZGV2L2dpdGh1Yi1wdWxsLXJlcXVlc3QtYXBwcm92YWwvdjAuMSIsInByZWRpY2F0ZSI6eyJhcHByb3ZlcnMiOlsiYWxpY2UiXSwiZGlzbWlzc2VkQXBwcm92ZXJzIjpbImJvYiJdLCJmcm9tUmV2aXNpb25JRCI6IjJmNTkzZTMxOTVhNTk5ODM0MjNmNDVmZTZkNDMzNWYxNDhmZmVlY2YiLCJ0YXJnZXRSZWYiOiJyZWZzL2hlYWRzL21haW4iLCJ0YXJnZXRUcmVlSUQiOiJlZTI1YjFiNmMyNzg2MmVhMWNjNDE5YzE0NDEyNzEyM2ZkNmY0N2QzIn19Cg==", Signatures: []dsse.Signature{ { KeyID: "kid", Sig: "sig", }, }, }, expectedPredicate: &githubv01.PullRequestApprovalAttestation{ Approvers: set.NewSetFromItems("alice"), DismissedApprovers: set.NewSetFromItems("bob"), ReferenceAuthorization: &authorizationsv01.ReferenceAuthorization{ FromRevisionID: "2f593e3195a59983423f45fe6d4335f148ffeecf", TargetRef: "refs/heads/main", TargetTreeID: "ee25b1b6c27862ea1cc419c144127123fd6f47d3", }, }, }, "no approvers, one dismissal": { envelope: &dsse.Envelope{ PayloadType: "application/vnd.gittuf+json", Payload: "eyJ0eXBlIjoiaHR0cHM6Ly9pbi10b3RvLmlvL1N0YXRlbWVudC92MSIsInN1YmplY3QiOlt7ImRpZ2VzdCI6eyJnaXRUcmVlIjoiZWUyNWIxYjZjMjc4NjJlYTFjYzQxOWMxNDQxMjcxMjNmZDZmNDdkMyJ9fV0sInByZWRpY2F0ZV90eXBlIjoiaHR0cHM6Ly9naXR0dWYuZGV2L2dpdGh1Yi1wdWxsLXJlcXVlc3QtYXBwcm92YWwvdjAuMSIsInByZWRpY2F0ZSI6eyJhcHByb3ZlcnMiOm51bGwsImRpc21pc3NlZEFwcHJvdmVycyI6WyJib2IiXSwiZnJvbVJldmlzaW9uSUQiOiIyZjU5M2UzMTk1YTU5OTgzNDIzZjQ1ZmU2ZDQzMzVmMTQ4ZmZlZWNmIiwidGFyZ2V0UmVmIjoicmVmcy9oZWFkcy9tYWluIiwidGFyZ2V0VHJlZUlEIjoiZWUyNWIxYjZjMjc4NjJlYTFjYzQxOWMxNDQxMjcxMjNmZDZmNDdkMyJ9fQo=", Signatures: []dsse.Signature{ { KeyID: "kid", Sig: "sig", }, }, }, expectedPredicate: &githubv01.PullRequestApprovalAttestation{ DismissedApprovers: set.NewSetFromItems("bob"), ReferenceAuthorization: &authorizationsv01.ReferenceAuthorization{ FromRevisionID: "2f593e3195a59983423f45fe6d4335f148ffeecf", TargetRef: "refs/heads/main", TargetTreeID: "ee25b1b6c27862ea1cc419c144127123fd6f47d3", }, }, }, "multiple approvers, multiple dismissals": { envelope: &dsse.Envelope{ PayloadType: "application/vnd.gittuf+json", Payload: "eyJ0eXBlIjoiaHR0cHM6Ly9pbi10b3RvLmlvL1N0YXRlbWVudC92MSIsInN1YmplY3QiOlt7ImRpZ2VzdCI6eyJnaXRUcmVlIjoiZWUyNWIxYjZjMjc4NjJlYTFjYzQxOWMxNDQxMjcxMjNmZDZmNDdkMyJ9fV0sInByZWRpY2F0ZV90eXBlIjoiaHR0cHM6Ly9naXR0dWYuZGV2L2dpdGh1Yi1wdWxsLXJlcXVlc3QtYXBwcm92YWwvdjAuMSIsInByZWRpY2F0ZSI6eyJhcHByb3ZlcnMiOlsiYWxpY2UiLCJib2IiXSwiZGlzbWlzc2VkQXBwcm92ZXJzIjpbImFsaWNlIiwiYm9iIl0sImZyb21SZXZpc2lvbklEIjoiMmY1OTNlMzE5NWE1OTk4MzQyM2Y0NWZlNmQ0MzM1ZjE0OGZmZWVjZiIsInRhcmdldFJlZiI6InJlZnMvaGVhZHMvbWFpbiIsInRhcmdldFRyZWVJRCI6ImVlMjViMWI2YzI3ODYyZWExY2M0MTljMTQ0MTI3MTIzZmQ2ZjQ3ZDMifX0K", Signatures: []dsse.Signature{ { KeyID: "kid", Sig: "sig", }, }, }, expectedPredicate: &githubv01.PullRequestApprovalAttestation{ Approvers: set.NewSetFromItems("alice", "bob"), DismissedApprovers: set.NewSetFromItems("alice", "bob"), ReferenceAuthorization: &authorizationsv01.ReferenceAuthorization{ FromRevisionID: "2f593e3195a59983423f45fe6d4335f148ffeecf", TargetRef: "refs/heads/main", TargetTreeID: "ee25b1b6c27862ea1cc419c144127123fd6f47d3", }, }, }, } for name, test := range tests { predicate, err := getGitHubPullRequestApprovalPredicateFromEnvelope(test.envelope) assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) assert.Equal(t, test.expectedPredicate, predicate, fmt.Sprintf("unexpected predicate in test '%s'", name)) } } func TestIndexPathToComponents(t *testing.T) { tests := map[string]struct { baseRef string from string to string }{ "simple ref": { baseRef: "refs/heads/main", from: gitinterface.ZeroHash.String(), to: gitinterface.ZeroHash.String(), }, "complicated ref": { baseRef: "refs/heads/jane.doe/feature-branch", from: gitinterface.ZeroHash.String(), to: gitinterface.ZeroHash.String(), }, } for name, test := range tests { // construct indexPath programmatically to force breaking changes / // regressions to be detected here indexPath := attestations.GitHubPullRequestApprovalAttestationPath(test.baseRef, test.from, test.to) baseRef, from, to := indexPathToComponents(indexPath) assert.Equal(t, test.baseRef, baseRef, fmt.Sprintf("unexpected 'base ref' in test '%s'", name)) assert.Equal(t, test.from, from, fmt.Sprintf("unexpected 'from' in test '%s'", name)) assert.Equal(t, test.to, to, fmt.Sprintf("unexpected 'to' in test '%s'", name)) } } gittuf-0.9.0/experimental/gittuf/cache.go000066400000000000000000000010671475150141000204150ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "github.com/gittuf/gittuf/internal/cache" "github.com/gittuf/gittuf/internal/dev" ) // PopulateCache scans the repository's RSL and generates a persistent // local-only cache of policy and attestation entries. This makes subsequent // verifications faster. This is currently only available in gittuf's developer // mode. func (r *Repository) PopulateCache() error { if !dev.InDevMode() { return dev.ErrNotInDevMode } return cache.PopulatePersistentCache(r.r) } gittuf-0.9.0/experimental/gittuf/cache_test.go000066400000000000000000000037621475150141000214600ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "testing" "github.com/gittuf/gittuf/internal/cache" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/stretchr/testify/assert" ) func TestPopulateCache(t *testing.T) { t.Run("successful cache population", func(t *testing.T) { t.Setenv(dev.DevModeKey, "1") tmpDir := t.TempDir() repo := createTestRepositoryWithPolicy(t, tmpDir) err := repo.PopulateCache() assert.Nil(t, err) firstEntry, _, err := rsl.GetFirstEntry(repo.r) if err != nil { t.Fatal(err) } latestEntry, err := rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } // This is sorted in order of occurrence for us allPolicyEntries, _, err := rsl.GetReferenceUpdaterEntriesInRangeForRef(repo.r, firstEntry.GetID(), latestEntry.GetID(), policy.PolicyRef) if err != nil { t.Fatal(err) } expectedPolicyEntries := []cache.RSLEntryIndex{} for _, entry := range allPolicyEntries { expectedPolicyEntries = append(expectedPolicyEntries, cache.RSLEntryIndex{EntryNumber: entry.GetNumber(), EntryID: entry.GetID().String()}) } persistentCache, err := cache.LoadPersistentCache(repo.r) if err != nil { t.Fatal(err) } assert.Equal(t, expectedPolicyEntries, persistentCache.PolicyEntries) }) t.Run("successful repeated cache population", func(t *testing.T) { t.Setenv(dev.DevModeKey, "1") tmpDir := t.TempDir() repo := createTestRepositoryWithPolicy(t, tmpDir) err := repo.PopulateCache() assert.Nil(t, err) currentCacheID, err := repo.r.GetReference(cache.Ref) if err != nil { t.Fatal(err) } err = repo.PopulateCache() // No error is reported assert.Nil(t, err) // However, no changes were committed either, because the cache // didn't change. newCacheID, err := repo.r.GetReference(cache.Ref) if err != nil { t.Fatal(err) } assert.Equal(t, currentCacheID, newCacheID) }) } gittuf-0.9.0/experimental/gittuf/common_test.go000066400000000000000000000010521475150141000216730ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/stretchr/testify/assert" ) func assertLocalAndRemoteRefsMatch(t *testing.T, localRepo, remoteRepo *gitinterface.Repository, refName string) { t.Helper() localRefTip, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRefTip, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRefTip, remoteRefTip) } gittuf-0.9.0/experimental/gittuf/helpers_test.go000066400000000000000000000065261475150141000220600ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "os" "path/filepath" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" ) var ( gpgKeyBytes = artifacts.GPGKey1Private gpgPubKeyBytes = artifacts.GPGKey1Public gpgUnauthorizedKeyBytes = artifacts.GPGKey2Private rootKeyBytes = artifacts.SSHRSAPrivate rootPubKeyBytes = artifacts.SSHRSAPublicSSH targetsKeyBytes = artifacts.SSHECDSAPrivate targetsPubKeyBytes = artifacts.SSHECDSAPublicSSH rsaKeyBytes = artifacts.SSHRSAPrivate ecdsaKeyBytes = artifacts.SSHECDSAPrivate testCtx = context.Background() ) func createTestRepositoryWithRoot(t *testing.T, location string) *Repository { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) var repo *gitinterface.Repository if location == "" { tempDir := t.TempDir() repo = gitinterface.CreateTestGitRepository(t, tempDir, false) } else { repo = gitinterface.CreateTestGitRepository(t, location, false) } r := &Repository{r: repo} if err := r.InitializeRoot(testCtx, signer, false); err != nil { t.Fatal(err) } if err := policy.Apply(testCtx, repo, false); err != nil { t.Fatalf("failed to apply policy staging changes into policy, err = %s", err) } return r } func createTestRepositoryWithPolicy(t *testing.T, location string) *Repository { t.Helper() r := createTestRepositoryWithRoot(t, location) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) if err := r.AddTopLevelTargetsKey(testCtx, rootSigner, targetsPubKey, false); err != nil { t.Fatal(err) } if err := r.InitializeTargets(testCtx, targetsSigner, policy.TargetsRoleName, false); err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) if err := r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, []tuf.Principal{gpgKey}, false); err != nil { t.Fatal(err) } if err := r.AddDelegation(testCtx, targetsSigner, policy.TargetsRoleName, "protect-main", []string{gpgKey.KeyID}, []string{"git:refs/heads/main"}, 1, false); err != nil { t.Fatal(err) } if err := policy.Apply(testCtx, r.r, false); err != nil { t.Fatalf("failed to apply policy staging changes into policy, err = %s", err) } return r } func setupSSHKeysForSigning(t *testing.T, privateBytes, publicBytes []byte) *ssh.Signer { t.Helper() keysDir := t.TempDir() privKeyPath := filepath.Join(keysDir, "key") pubKeyPath := filepath.Join(keysDir, "key.pub") if err := os.WriteFile(privKeyPath, privateBytes, 0o600); err != nil { t.Fatal(err) } if err := os.WriteFile(pubKeyPath, publicBytes, 0o600); err != nil { t.Fatal(err) } signer, err := ssh.NewSignerFromFile(privKeyPath) if err != nil { t.Fatal(err) } return signer } gittuf-0.9.0/experimental/gittuf/hook.go000066400000000000000000000032161475150141000203100ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "errors" "fmt" "io/fs" "log/slog" "os" "path/filepath" ) type ErrHookExists struct { HookType HookType } func (e *ErrHookExists) Error() string { return fmt.Sprintf("hook '%s' already exists", e.HookType) } type HookType string var HookPrePush = HookType("pre-push") // UpdateHook updates a git hook in the repository's .git/hooks folder. // Existing hook files are not overwritten, unless force flag is set. func (r *Repository) UpdateHook(hookType HookType, content []byte, force bool) error { // TODO: rely on go-git to find .git folder, once // https://github.com/go-git/go-git/issues/977 is available. // Note, until then gittuf does not support separate git dir. slog.Debug("Adding gittuf hooks...") gitDir := r.r.GetGitDir() hookFolder := filepath.Join(gitDir, "hooks") if err := os.MkdirAll(hookFolder, 0o750); err != nil { return fmt.Errorf("making sure folder exist: %w", err) } hookFile := filepath.Join(hookFolder, string(hookType)) hookExists, err := doesFileExist(hookFile) if err != nil { return fmt.Errorf("checking if hookFile '%s' exists: %w", hookFile, err) } if hookExists && !force { return &ErrHookExists{ HookType: hookType, } } slog.Debug("Writing hooks...") if err := os.WriteFile(hookFile, content, 0o700); err != nil { // nolint:gosec return fmt.Errorf("writing %s hook: %w", hookType, err) } return nil } func doesFileExist(path string) (bool, error) { _, err := os.Stat(path) if err != nil { if errors.Is(err, fs.ErrNotExist) { return false, nil } return false, err } return true, nil } gittuf-0.9.0/experimental/gittuf/hook_test.go000066400000000000000000000035151475150141000213510ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "os" "path/filepath" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestUpdatePrePushHook(t *testing.T) { t.Run("write hook", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) r := &Repository{r: repo} err := r.UpdateHook(HookPrePush, []byte("some content"), false) require.NoError(t, err) hookFile := filepath.Join(repo.GetGitDir(), "hooks", "pre-push") prepushScript, err := os.ReadFile(hookFile) require.NoError(t, err) assert.Equal(t, []byte("some content"), prepushScript) }) t.Run("hook exists", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) r := &Repository{r: repo} hookFile := filepath.Join(repo.GetGitDir(), "hooks", "pre-push") err := os.WriteFile(hookFile, []byte("existing hook script"), 0o700) // nolint:gosec require.NoError(t, err) err = r.UpdateHook(HookPrePush, []byte("new hook script"), false) var hookErr *ErrHookExists if assert.ErrorAs(t, err, &hookErr) { assert.Equal(t, HookPrePush, hookErr.HookType) } }) t.Run("force overwrite hook", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) r := &Repository{r: repo} hookFile := filepath.Join(repo.GetGitDir(), "hooks", "pre-push") err := os.WriteFile(hookFile, []byte("existing hook script"), 0o700) // nolint:gosec require.NoError(t, err) err = r.UpdateHook(HookPrePush, []byte("new hook script"), true) assert.NoError(t, err) content, err := os.ReadFile(hookFile) assert.NoError(t, err) assert.Equal(t, []byte("new hook script"), content) }) } gittuf-0.9.0/experimental/gittuf/keys.go000066400000000000000000000067151475150141000203320ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "fmt" "os/exec" "strings" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" sigstoresigneropts "github.com/gittuf/gittuf/internal/signerverifier/sigstore/options/signer" "github.com/gittuf/gittuf/internal/signerverifier/ssh" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) const ( GPGKeyPrefix = "gpg:" FulcioPrefix = "fulcio:" ) // LoadPublicKey returns a signerverifier.SSLibKey object for a PGP / Sigstore // Fulcio / SSH (on-disk) key for use in gittuf metadata. func LoadPublicKey(keyRef string) (tuf.Principal, error) { var ( keyObj *signerverifier.SSLibKey err error ) switch { case strings.HasPrefix(keyRef, GPGKeyPrefix): fingerprint := strings.ToLower(strings.TrimSpace(strings.TrimPrefix(keyRef, GPGKeyPrefix))) command := exec.Command("gpg", "--export", "--armor", fingerprint) stdOut, err := command.Output() if err != nil { return nil, err } keyObj, err = gpg.LoadGPGKeyFromBytes(stdOut) if err != nil { return nil, err } case strings.HasPrefix(keyRef, FulcioPrefix): keyID := strings.TrimPrefix(keyRef, FulcioPrefix) ks := strings.Split(keyID, "::") if len(ks) != 2 { return nil, fmt.Errorf("incorrect format for fulcio identity") } keyObj = &signerverifier.SSLibKey{ KeyID: keyID, KeyType: sigstore.KeyType, Scheme: sigstore.KeyScheme, KeyVal: signerverifier.KeyVal{ Identity: ks[0], Issuer: ks[1], }, } default: keyObj, err = ssh.NewKeyFromFile(keyRef) if err != nil { return nil, err } } return tufv01.NewKeyFromSSLibKey(keyObj), nil } // LoadSigner loads a metadata signer for the specified key bytes. Currently, // the signer must be either for an SSH key (in which case the `key` is a path // to the private key) or for signing with Sigstore (where `key` has a prefix // `fulcio:`). For Sigstore, developer mode must be enabled by setting // GITTUF_DEV=1 in the environment. func LoadSigner(repo *Repository, key string) (sslibdsse.SignerVerifier, error) { switch { case strings.HasPrefix(key, GPGKeyPrefix): return nil, fmt.Errorf("not implemented") case strings.HasPrefix(key, FulcioPrefix): if !dev.InDevMode() { return nil, dev.ErrNotInDevMode } opts := []sigstoresigneropts.Option{} gitRepo := repo.GetGitRepository() config, err := gitRepo.GetGitConfig() if err != nil { return nil, err } // Parse relevant gitsign.<> config values if value, has := config[sigstore.GitConfigIssuer]; has { opts = append(opts, sigstoresigneropts.WithIssuerURL(value)) } if value, has := config[sigstore.GitConfigClientID]; has { opts = append(opts, sigstoresigneropts.WithClientID(value)) } if value, has := config[sigstore.GitConfigFulcio]; has { opts = append(opts, sigstoresigneropts.WithFulcioURL(value)) } if value, has := config[sigstore.GitConfigRekor]; has { opts = append(opts, sigstoresigneropts.WithRekorURL(value)) } if value, has := config[sigstore.GitConfigRedirectURL]; has { opts = append(opts, sigstoresigneropts.WithRedirectURL(value)) } return sigstore.NewSigner(opts...), nil default: return ssh.NewSignerFromFile(key) } } gittuf-0.9.0/experimental/gittuf/keys_test.go000066400000000000000000000022241475150141000213600ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "fmt" "os" "path/filepath" "testing" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/stretchr/testify/assert" ) func TestLoadSigner(t *testing.T) { tmpDir := t.TempDir() tests := map[string]struct { keyBytes []byte publicKeyBytes []byte }{ "ssh-rsa-key": {keyBytes: artifacts.SSHRSAPrivate, publicKeyBytes: artifacts.SSHRSAPublicSSH}, "ssh-ecdsa-key": {keyBytes: artifacts.SSHECDSAPrivate, publicKeyBytes: artifacts.SSHECDSAPublicSSH}, "ssh-ed25519-key": {keyBytes: artifacts.SSHED25519Private, publicKeyBytes: artifacts.SSHED25519PublicSSH}, } for name, test := range tests { keyPath := filepath.Join(tmpDir, name) if err := os.WriteFile(keyPath, test.keyBytes, 0o600); err != nil { t.Fatal(err) } if err := os.WriteFile(keyPath+".pub", test.publicKeyBytes, 0o600); err != nil { t.Fatal(err) } signer, err := LoadSigner(nil, keyPath) assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) _, err = signer.Sign(context.Background(), nil) assert.Nil(t, err) } } gittuf-0.9.0/experimental/gittuf/options/000077500000000000000000000000001475150141000205125ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/options/github/000077500000000000000000000000001475150141000217745ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/options/github/github.go000066400000000000000000000013351475150141000236070ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package github const DefaultGitHubBaseURL = "https://github.com" type Options struct { GitHubToken string GitHubBaseURL string } var DefaultOptions = &Options{ GitHubBaseURL: DefaultGitHubBaseURL, } type Option func(o *Options) // WithGitHubToken can be used to specify an authentication token to use the // GitHub API. func WithGitHubToken(token string) Option { return func(o *Options) { o.GitHubToken = token } } // WithGitHubBaseURL can be used to specify a custom GitHub instance, such as an // on-premises GitHub Enterprise Server. func WithGitHubBaseURL(baseURL string) Option { return func(o *Options) { o.GitHubBaseURL = baseURL } } gittuf-0.9.0/experimental/gittuf/options/root/000077500000000000000000000000001475150141000214755ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/options/root/root.go000066400000000000000000000004401475150141000230050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package root type Options struct { RepositoryLocation string } type Option func(o *Options) func WithRepositoryLocation(location string) Option { return func(o *Options) { o.RepositoryLocation = location } } gittuf-0.9.0/experimental/gittuf/options/rsl/000077500000000000000000000000001475150141000213125ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/options/rsl/rsl.go000066400000000000000000000014221475150141000224400ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl type Options struct { RefNameOverride string SkipCheckForDuplicate bool SkipPropagation bool } type Option func(o *Options) func WithOverrideRefName(refNameOverride string) Option { return func(o *Options) { o.RefNameOverride = refNameOverride } } // WithSkipCheckForDuplicateEntry indicates that the RSL entry creation must not // check if the latest entry for the reference has the same target ID. func WithSkipCheckForDuplicateEntry() Option { return func(o *Options) { o.SkipCheckForDuplicate = true } } // WithSkipPropagation disables execution of the propagation workflow. func WithSkipPropagation() Option { return func(o *Options) { o.SkipPropagation = true } } gittuf-0.9.0/experimental/gittuf/options/verify/000077500000000000000000000000001475150141000220165ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/options/verify/verify.go000066400000000000000000000006231475150141000236520ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package verify type Options struct { RefNameOverride string LatestOnly bool } type Option func(o *Options) func WithOverrideRefName(refNameOverride string) Option { return func(o *Options) { o.RefNameOverride = refNameOverride } } func WithLatestOnly() Option { return func(o *Options) { o.LatestOnly = true } } gittuf-0.9.0/experimental/gittuf/options/verifymergeable/000077500000000000000000000000001475150141000236625ustar00rootroot00000000000000gittuf-0.9.0/experimental/gittuf/options/verifymergeable/verifymergeable.go000066400000000000000000000004421475150141000273610ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package verifymergeable type Options struct { BypassRSLForFeatureRef bool } type Option func(o *Options) func WithBypassRSLForFeatureRef() Option { return func(o *Options) { o.BypassRSLForFeatureRef = true } } gittuf-0.9.0/experimental/gittuf/policy.go000066400000000000000000000044101475150141000206440ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "errors" "fmt" "log/slog" "strings" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/tuf" ) var ( ErrPushingPolicy = errors.New("unable to push policy") ErrPullingPolicy = errors.New("unable to pull policy") ) // PushPolicy pushes the local gittuf policy to the specified remote. As this // push defaults to fast-forward only, divergent policy states are detected. // Note that this also pushes the RSL as the policy cannot change without an // update to the RSL. func (r *Repository) PushPolicy(remoteName string) error { slog.Debug(fmt.Sprintf("Pushing policy and RSL references to %s...", remoteName)) if err := r.r.Push(remoteName, []string{policy.PolicyRef, policy.PolicyStagingRef, rsl.Ref}); err != nil { return errors.Join(ErrPushingPolicy, err) } return nil } // PullPolicy fetches gittuf policy from the specified remote. The fetches is // marked as fast forward only to detect divergence. Note that this also fetches // the RSL as the policy must be updated in sync with the RSL. func (r *Repository) PullPolicy(remoteName string) error { slog.Debug(fmt.Sprintf("Pulling policy and RSL references from %s...", remoteName)) if err := r.r.Fetch(remoteName, []string{policy.PolicyRef, policy.PolicyStagingRef, rsl.Ref}, true); err != nil { return errors.Join(ErrPullingPolicy, err) } return nil } func (r *Repository) ApplyPolicy(ctx context.Context, signRSLEntry bool) error { return policy.Apply(ctx, r.r, signRSLEntry) } func (r *Repository) DiscardPolicy() error { return policy.Discard(r.r) } func (r *Repository) ListRules(ctx context.Context, targetRef string) ([]*policy.DelegationWithDepth, error) { if strings.HasPrefix(targetRef, "refs/gittuf/") { return policy.ListRules(ctx, r.r, targetRef) } return policy.ListRules(ctx, r.r, "refs/gittuf/"+targetRef) } func (r *Repository) ListPrincipals(ctx context.Context, targetRef, policyName string) (map[string]tuf.Principal, error) { if strings.HasPrefix(targetRef, "refs/gittuf/") { return policy.ListPrincipals(ctx, r.r, targetRef, policyName) } return policy.ListPrincipals(ctx, r.r, "refs/gittuf/"+targetRef, policyName) } gittuf-0.9.0/experimental/gittuf/policy_test.go000066400000000000000000000111531475150141000217050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/stretchr/testify/assert" ) func TestPushPolicy(t *testing.T) { remoteName := "origin" t.Run("successful push", func(t *testing.T) { remoteTmpDir := t.TempDir() remoteRepo := gitinterface.CreateTestGitRepository(t, remoteTmpDir, false) localRepo := createTestRepositoryWithPolicy(t, "") if err := policy.Apply(testCtx, localRepo.r, false); err != nil { t.Fatal(err) } if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PushPolicy(remoteName) assert.Nil(t, err) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo, policy.PolicyRef) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo, policy.PolicyStagingRef) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo, rsl.Ref) // No updates, successful push err = localRepo.PushPolicy(remoteName) assert.Nil(t, err) }) t.Run("divergent policies, unsuccessful push", func(t *testing.T) { remoteTmpDir := t.TempDir() remoteRepo := gitinterface.CreateTestGitRepository(t, remoteTmpDir, false) if err := rsl.NewReferenceEntry(policy.PolicyRef, gitinterface.ZeroHash).Commit(remoteRepo, false); err != nil { t.Fatal(err) } localRepo := createTestRepositoryWithPolicy(t, "") if err := policy.Apply(testCtx, localRepo.r, false); err != nil { t.Fatal(err) } if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PushPolicy(remoteName) assert.ErrorIs(t, err, ErrPushingPolicy) }) } func TestPullPolicy(t *testing.T) { remoteName := "origin" t.Run("successful pull", func(t *testing.T) { remoteTmpDir := t.TempDir() remoteRepo := createTestRepositoryWithPolicy(t, remoteTmpDir) if err := policy.Apply(testCtx, remoteRepo.r, false); err != nil { t.Fatal(err) } localTmpDir := t.TempDir() localRepoR := gitinterface.CreateTestGitRepository(t, localTmpDir, false) localRepo := &Repository{r: localRepoR} if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PullPolicy(remoteName) assert.Nil(t, err) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo.r, policy.PolicyRef) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo.r, policy.PolicyStagingRef) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo.r, rsl.Ref) // No updates, successful push err = localRepo.PullPolicy(remoteName) assert.Nil(t, err) }) t.Run("divergent policies, unsuccessful pull", func(t *testing.T) { remoteTmpDir := t.TempDir() createTestRepositoryWithPolicy(t, remoteTmpDir) localTmpDir := t.TempDir() localRepoR := gitinterface.CreateTestGitRepository(t, localTmpDir, false) localRepo := &Repository{r: localRepoR} if err := rsl.NewReferenceEntry(policy.PolicyRef, gitinterface.ZeroHash).Commit(localRepo.r, false); err != nil { t.Fatal(err) } if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PullPolicy(remoteName) assert.ErrorIs(t, err, ErrPullingPolicy) }) } func TestDiscardPolicy(t *testing.T) { t.Run("successful discard with existing policy", func(t *testing.T) { repo := createTestRepositoryWithPolicy(t, "") if err := policy.Apply(testCtx, repo.r, false); err != nil { t.Fatal(err) } initialPolicyRef, err := repo.r.GetReference(policy.PolicyRef) assert.Nil(t, err) err = repo.DiscardPolicy() assert.Nil(t, err) stagingRef, err := repo.r.GetReference(policy.PolicyStagingRef) assert.Nil(t, err) assert.Equal(t, initialPolicyRef, stagingRef) }) t.Run("discard with no policy references", func(t *testing.T) { tmpDir := t.TempDir() r := gitinterface.CreateTestGitRepository(t, tmpDir, false) repo := &Repository{r: r} err := repo.DiscardPolicy() assert.Nil(t, err) _, err = repo.r.GetReference(policy.PolicyStagingRef) assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) }) t.Run("discard after policy changes", func(t *testing.T) { repo := createTestRepositoryWithPolicy(t, "") initialRef, err := repo.r.GetReference(policy.PolicyRef) assert.Nil(t, err) if err := repo.r.SetReference(policy.PolicyStagingRef, gitinterface.ZeroHash); err != nil { t.Fatal(err) } err = repo.DiscardPolicy() assert.Nil(t, err) stagingRef, err := repo.r.GetReference(policy.PolicyStagingRef) assert.Nil(t, err) assert.Equal(t, initialRef, stagingRef) }) } gittuf-0.9.0/experimental/gittuf/repository.go000066400000000000000000000016441475150141000215720ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "errors" "log/slog" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/tuf" ) var ( ErrUnauthorizedKey = errors.New("unauthorized key presented when updating gittuf metadata") ErrCannotReinitialize = errors.New("cannot reinitialize metadata, it exists already") ) type Repository struct { r *gitinterface.Repository } func (r *Repository) GetGitRepository() *gitinterface.Repository { return r.r } func LoadRepository() (*Repository, error) { slog.Debug("Loading Git repository...") repo, err := gitinterface.LoadRepository() if err != nil { return nil, err } return &Repository{ r: repo, }, nil } func isKeyAuthorized(authorizedKeyIDs []tuf.Principal, keyID string) bool { for _, k := range authorizedKeyIDs { if k.ID() == keyID { return true } } return false } gittuf-0.9.0/experimental/gittuf/repository_test.go000066400000000000000000000041051475150141000226240ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "os" "testing" "github.com/gittuf/gittuf/internal/gitinterface" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestLoadRepository(t *testing.T) { t.Run("load with no repo, unsuccessful", func(t *testing.T) { tmpDir := t.TempDir() currentDir, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(tmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repository, err := LoadRepository() assert.NotNil(t, err) assert.Nil(t, repository) }) t.Run("successful load", func(t *testing.T) { tmpDir := t.TempDir() currentDir, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(tmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck gitinterface.CreateTestGitRepository(t, tmpDir, false) repository, err := LoadRepository() assert.Nil(t, err) assert.NotNil(t, repository.r) }) } func TestUnauthorizedKey(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) r := &Repository{r: repo} if err := r.InitializeRoot(testCtx, rootSigner, false); err != nil { t.Fatal(err) } t.Run("test add targets key", func(t *testing.T) { key := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) err := r.AddTopLevelTargetsKey(testCtx, targetsSigner, key, false) assert.ErrorIs(t, err, ErrUnauthorizedKey) }) t.Run("test remove targets key", func(t *testing.T) { err := r.RemoveTopLevelTargetsKey(testCtx, targetsSigner, "some key ID", false) assert.ErrorIs(t, err, ErrUnauthorizedKey) }) } func TestGetGitRepository(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) r := &Repository{r: repo} result := r.GetGitRepository() assert.Equal(t, repo, result) } gittuf-0.9.0/experimental/gittuf/root.go000066400000000000000000000523631475150141000203420ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "encoding/base64" "encoding/json" "fmt" "log/slog" "github.com/gittuf/gittuf/experimental/gittuf/options/root" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/signerverifier/common" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" "github.com/gittuf/gittuf/internal/signerverifier/ssh" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) // InitializeRoot is the interface for the user to create the repository's root // of trust. func (r *Repository) InitializeRoot(ctx context.Context, signer sslibdsse.SignerVerifier, signCommit bool, opts ...root.Option) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } options := &root.Options{} for _, fn := range opts { fn(options) } var ( publicKeyRaw *signerverifier.SSLibKey err error ) switch signer := signer.(type) { case *ssh.Signer: publicKeyRaw = signer.MetadataKey() case *sigstore.Signer: publicKeyRaw, err = signer.MetadataKey() if err != nil { return err } default: return common.ErrUnknownKeyType } publicKey := tufv01.NewKeyFromSSLibKey(publicKeyRaw) slog.Debug("Creating initial root metadata...") rootMetadata, err := policy.InitializeRootMetadata(publicKey) if err != nil { return err } if options.RepositoryLocation != "" { slog.Debug("Setting repository location...") rootMetadata.SetRepositoryLocation(options.RepositoryLocation) } env, err := dsse.CreateEnvelope(rootMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing initial root metadata using '%s'...", publicKey.KeyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } state := &policy.State{ RootPublicKeys: []tuf.Principal{publicKey}, RootEnvelope: env, } commitMessage := "Initialize root of trust" slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } func (r *Repository) SetRepositoryLocation(ctx context.Context, signer sslibdsse.SignerVerifier, location string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } rootMetadata.SetRepositoryLocation(location) commitMessage := fmt.Sprintf("Set repository location to '%s' in root", location) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // AddRootKey is the interface for the user to add an authorized key // for the Root role. func (r *Repository) AddRootKey(ctx context.Context, signer sslibdsse.SignerVerifier, newRootKey tuf.Principal, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Adding root key...") if err := rootMetadata.AddRootPrincipal(newRootKey); err != nil { return err } found := false for _, key := range state.RootPublicKeys { if key.ID() == newRootKey.ID() { found = true break } } if !found { state.RootPublicKeys = append(state.RootPublicKeys, newRootKey) } commitMessage := fmt.Sprintf("Add root key '%s' to root", newRootKey.ID()) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // RemoveRootKey is the interface for the user to de-authorize a key // trusted to sign the Root role. func (r *Repository) RemoveRootKey(ctx context.Context, signer sslibdsse.SignerVerifier, keyID string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Removing root key...") if err := rootMetadata.DeleteRootPrincipal(keyID); err != nil { return err } newRootPublicKeys := []tuf.Principal{} for _, key := range state.RootPublicKeys { if key.ID() != keyID { newRootPublicKeys = append(newRootPublicKeys, key) } } state.RootPublicKeys = newRootPublicKeys commitMessage := fmt.Sprintf("Remove root key '%s' from root", keyID) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // AddTopLevelTargetsKey is the interface for the user to add an authorized key // for the top level Targets role / policy file. func (r *Repository) AddTopLevelTargetsKey(ctx context.Context, signer sslibdsse.SignerVerifier, targetsKey tuf.Principal, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Adding policy key...") if err := rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey); err != nil { return fmt.Errorf("failed to add policy key: %w", err) } commitMessage := fmt.Sprintf("Add policy key '%s' to root", targetsKey.ID()) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // RemoveTopLevelTargetsKey is the interface for the user to de-authorize a key // trusted to sign the top level Targets role / policy file. func (r *Repository) RemoveTopLevelTargetsKey(ctx context.Context, signer sslibdsse.SignerVerifier, targetsKeyID string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Removing policy key...") if err := rootMetadata.DeletePrimaryRuleFilePrincipal(targetsKeyID); err != nil { return err } commitMessage := fmt.Sprintf("Remove policy key '%s' from root", targetsKeyID) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // AddGitHubApp is the interface for the user to add the authorized key for the // trusted GitHub app. This key is used to verify GitHub pull request approval // attestation signatures recorded by the app. func (r *Repository) AddGitHubApp(ctx context.Context, signer sslibdsse.SignerVerifier, appKey tuf.Principal, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Adding GitHub app key...") if err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey); err != nil { return fmt.Errorf("failed to add GitHub app key: %w", err) } commitMessage := fmt.Sprintf("Add GitHub app key '%s' to root", appKey.ID()) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // RemoveGitHubApp is the interface for the user to de-authorize the key for the // special GitHub app role. func (r *Repository) RemoveGitHubApp(ctx context.Context, signer sslibdsse.SignerVerifier, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Removing GitHub app key...") rootMetadata.DeleteGitHubAppPrincipal(tuf.GitHubAppRoleName) commitMessage := "Remove GitHub app key from root" return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // TrustGitHubApp updates the root metadata to mark GitHub app pull request // approvals as trusted. func (r *Repository) TrustGitHubApp(ctx context.Context, signer sslibdsse.SignerVerifier, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } if rootMetadata.IsGitHubAppApprovalTrusted() { slog.Debug("GitHub app approvals are already trusted, exiting...") return nil } slog.Debug("Marking GitHub app approvals as trusted in root...") rootMetadata.EnableGitHubAppApprovals() commitMessage := "Mark GitHub app approvals as trusted" return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // UntrustGitHubApp updates the root metadata to mark GitHub app pull request // approvals as untrusted. func (r *Repository) UntrustGitHubApp(ctx context.Context, signer sslibdsse.SignerVerifier, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } if !rootMetadata.IsGitHubAppApprovalTrusted() { slog.Debug("GitHub app approvals are already untrusted, exiting...") return nil } slog.Debug("Marking GitHub app approvals as untrusted in root...") rootMetadata.DisableGitHubAppApprovals() commitMessage := "Mark GitHub app approvals as untrusted" return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // UpdateRootThreshold sets the threshold of valid signatures required for the // Root role. func (r *Repository) UpdateRootThreshold(ctx context.Context, signer sslibdsse.SignerVerifier, threshold int, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Updating root threshold...") if err := rootMetadata.UpdateRootThreshold(threshold); err != nil { return err } commitMessage := fmt.Sprintf("Update root threshold to %d", threshold) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // UpdateTopLevelTargetsThreshold sets the threshold of valid signatures // required for the top level Targets role. func (r *Repository) UpdateTopLevelTargetsThreshold(ctx context.Context, signer sslibdsse.SignerVerifier, threshold int, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Updating policy threshold...") if err := rootMetadata.UpdatePrimaryRuleFileThreshold(threshold); err != nil { return err } commitMessage := fmt.Sprintf("Update policy threshold to %d", threshold) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // AddGlobalRuleThreshold adds a threshold global rule to the root metadata. func (r *Repository) AddGlobalRuleThreshold(ctx context.Context, signer sslibdsse.SignerVerifier, name string, patterns []string, threshold int, signCommit bool) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Adding threshold global rule...") if err := rootMetadata.AddGlobalRule(tufv01.NewGlobalRuleThreshold(name, patterns, threshold)); err != nil { return err } commitMessage := fmt.Sprintf("Add global rule (%s) '%s' to root metadata", tuf.GlobalRuleThresholdType, name) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // AddGlobalRuleBlockForcePushes adds a global rule that blocks force pushes to the root metadata. func (r *Repository) AddGlobalRuleBlockForcePushes(ctx context.Context, signer sslibdsse.SignerVerifier, name string, patterns []string, signCommit bool) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } globalRule, err := tufv01.NewGlobalRuleBlockForcePushes(name, patterns) if err != nil { return err } slog.Debug("Adding threshold global rule...") if err := rootMetadata.AddGlobalRule(globalRule); err != nil { return err } commitMessage := fmt.Sprintf("Add global rule (%s) '%s' to root metadata", tuf.GlobalRuleBlockForcePushesType, name) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // RemoveGlobalRule removes a global rule from the root metadata. func (r *Repository) RemoveGlobalRule(ctx context.Context, signer sslibdsse.SignerVerifier, name string, signCommit bool) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Removing global rule...") if err := rootMetadata.DeleteGlobalRule(name); err != nil { return err } commitMessage := fmt.Sprintf("Remove global rule '%s' from root metadata", name) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } func (r *Repository) AddPropagationDirective(ctx context.Context, signer sslibdsse.SignerVerifier, directiveName, upstreamRepository, upstreamReference, downstreamReference, downstreamPath string, signCommit bool) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Adding propagation directive...") var directive tuf.PropagationDirective switch rootMetadata.(type) { case *tufv01.RootMetadata: directive = tufv01.NewPropagationDirective(directiveName, upstreamRepository, upstreamReference, downstreamReference, downstreamPath) case *tufv02.RootMetadata: directive = tufv02.NewPropagationDirective(directiveName, upstreamRepository, upstreamReference, downstreamReference, downstreamPath) } if err := rootMetadata.AddPropagationDirective(directive); err != nil { return err } commitMessage := fmt.Sprintf("Add propagation directive '%s' to root metadata", directiveName) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } func (r *Repository) RemovePropagationDirective(ctx context.Context, signer sslibdsse.SignerVerifier, name string, signCommit bool) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } rootKeyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } rootMetadata, err := r.loadRootMetadata(state, rootKeyID) if err != nil { return err } slog.Debug("Removing propagation directive...") if err := rootMetadata.DeletePropagationDirective(name); err != nil { return err } commitMessage := fmt.Sprintf("Remove propagation directive '%s' from root metadata", name) return r.updateRootMetadata(ctx, state, signer, rootMetadata, commitMessage, signCommit) } // SignRoot adds a signature to the Root envelope. Note that the metadata itself // is not modified, so its version remains the same. func (r *Repository) SignRoot(ctx context.Context, signer sslibdsse.SignerVerifier, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } env := state.RootEnvelope slog.Debug(fmt.Sprintf("Signing root metadata using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } state.RootEnvelope = env commitMessage := fmt.Sprintf("Add signature from key '%s' to root metadata", keyID) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } func (r *Repository) loadRootMetadata(state *policy.State, keyID string) (tuf.RootMetadata, error) { slog.Debug("Loading current root metadata...") rootMetadata, err := state.GetRootMetadata(false) if err != nil { return nil, err } authorizedPrincipals, err := rootMetadata.GetRootPrincipals() if err != nil { return nil, err } if !isKeyAuthorized(authorizedPrincipals, keyID) { return nil, ErrUnauthorizedKey } return rootMetadata, nil } func (r *Repository) updateRootMetadata(ctx context.Context, state *policy.State, signer sslibdsse.SignerVerifier, rootMetadata tuf.RootMetadata, commitMessage string, signCommit bool) error { rootMetadataBytes, err := json.Marshal(rootMetadata) if err != nil { return err } env := state.RootEnvelope env.Signatures = []sslibdsse.Signature{} env.Payload = base64.StdEncoding.EncodeToString(rootMetadataBytes) slog.Debug("Signing updated root metadata...") env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } state.RootEnvelope = env slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } gittuf-0.9.0/experimental/gittuf/root_test.go000066400000000000000000000673621475150141000214060ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "testing" rootopts "github.com/gittuf/gittuf/experimental/gittuf/options/root" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/ssh" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestInitializeRoot(t *testing.T) { t.Run("no repository location", func(t *testing.T) { // The helper also runs InitializeRoot for this test r := createTestRepositoryWithRoot(t, "") key := ssh.NewKeyFromBytes(t, rootPubKeyBytes) verifier, err := ssh.NewVerifierFromKey(key) if err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } assert.Equal(t, key.KeyID, state.RootEnvelope.Signatures[0].KeyID) assert.True(t, getRootPrincipalIDs(t, rootMetadata).Has(key.KeyID)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{verifier}, 1) assert.Nil(t, err) }) t.Run("with repository location", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) r := &Repository{r: repo} signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) location := "https://example.com/repository/location" err := r.InitializeRoot(testCtx, signer, false, rootopts.WithRepositoryLocation(location)) assert.Nil(t, err) if err := policy.Apply(testCtx, repo, false); err != nil { t.Fatalf("failed to apply policy staging changes into policy, err = %s", err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } assert.Equal(t, location, rootMetadata.GetRepositoryLocation()) }) } func TestSetRepositoryLocation(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) location := "https://example.com/repository/location" err := r.SetRepositoryLocation(testCtx, sv, location, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) assert.Nil(t, err) assert.Equal(t, location, rootMetadata.GetRepositoryLocation()) } func TestAddRootKey(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) originalKeyID, err := sv.KeyID() if err != nil { t.Fatal(err) } newRootKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) err = r.AddRootKey(testCtx, sv, newRootKey, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) assert.Nil(t, err) assert.Equal(t, originalKeyID, state.RootEnvelope.Signatures[0].KeyID) assert.Equal(t, 2, len(state.RootPublicKeys)) assert.Equal(t, set.NewSetFromItems(originalKeyID, newRootKey.KeyID), getRootPrincipalIDs(t, rootMetadata)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) } func TestRemoveRootKey(t *testing.T) { r := createTestRepositoryWithRoot(t, "") originalSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) rootKey := tufv01.NewKeyFromSSLibKey(originalSigner.MetadataKey()) err := r.AddRootKey(testCtx, originalSigner, rootKey, false) if err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } // We should have no additions as we tried to add the same key assert.Equal(t, 1, len(state.RootPublicKeys)) rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.Nil(t, err) assert.Equal(t, 1, len(rootPrincipals)) newRootKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) err = r.AddRootKey(testCtx, originalSigner, newRootKey, false) if err != nil { t.Fatal(err) } state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } rootPrincipalIDs := getRootPrincipalIDs(t, rootMetadata) assert.True(t, rootPrincipalIDs.Has(rootKey.KeyID)) assert.True(t, rootPrincipalIDs.Has(newRootKey.KeyID)) assert.Equal(t, 2, len(state.RootPublicKeys)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{originalSigner}, 1) assert.Nil(t, err) newSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) // We can use the newly added root key to revoke the old one err = r.RemoveRootKey(testCtx, newSigner, rootKey.KeyID, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } rootPrincipalIDs = getRootPrincipalIDs(t, rootMetadata) assert.True(t, rootPrincipalIDs.Has(newRootKey.KeyID)) assert.Equal(t, 1, rootPrincipalIDs.Len()) assert.Equal(t, 1, len(state.RootPublicKeys)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{newSigner}, 1) assert.Nil(t, err) } func TestAddTopLevelTargetsKey(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) err := r.AddTopLevelTargetsKey(testCtx, sv, key, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) assert.Nil(t, err) assert.Equal(t, key.KeyID, state.RootEnvelope.Signatures[0].KeyID) assert.True(t, getRootPrincipalIDs(t, rootMetadata).Has(key.KeyID)) assert.True(t, getPrimaryRuleFilePrincipalIDs(t, rootMetadata).Has(key.KeyID)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) } func TestRemoveTopLevelTargetsKey(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) rootKey := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) err := r.AddTopLevelTargetsKey(testCtx, sv, rootKey, false) if err != nil { t.Fatal(err) } targetsKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) err = r.AddTopLevelTargetsKey(testCtx, sv, targetsKey, false) if err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } targetsPrincipalIDs := getPrimaryRuleFilePrincipalIDs(t, rootMetadata) assert.True(t, targetsPrincipalIDs.Has(rootKey.KeyID)) assert.True(t, targetsPrincipalIDs.Has(targetsKey.KeyID)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) err = r.RemoveTopLevelTargetsKey(testCtx, sv, rootKey.KeyID, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } targetsPrincipalIDs = getPrimaryRuleFilePrincipalIDs(t, rootMetadata) assert.True(t, targetsPrincipalIDs.Has(targetsKey.KeyID)) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) } func TestAddGitHubApp(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) err := r.AddGitHubApp(testCtx, sv, key, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) assert.Nil(t, err) appPrincipals, err := rootMetadata.GetGitHubAppPrincipals() if err != nil { t.Fatal(err) } assert.Equal(t, key, appPrincipals[0]) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) } func TestRemoveGitHubApp(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) err := r.AddGitHubApp(testCtx, sv, key, false) if err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } appPrincipals, err := rootMetadata.GetGitHubAppPrincipals() if err != nil { t.Fatal(err) } assert.Equal(t, key, appPrincipals[0]) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) err = r.RemoveGitHubApp(testCtx, sv, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } appPrincipals, err = rootMetadata.GetGitHubAppPrincipals() // We see an error (correctly that the app is trusted but no key is present) assert.ErrorIs(t, err, tuf.ErrGitHubAppInformationNotFoundInRoot) assert.Empty(t, appPrincipals) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) } func TestTrustGitHubApp(t *testing.T) { t.Run("GitHub app role not defined", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err := r.TrustGitHubApp(testCtx, sv, false) assert.Nil(t, err) _, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) assert.ErrorIs(t, err, tuf.ErrGitHubAppInformationNotFoundInRoot) }) t.Run("GitHub app role defined", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) assert.Nil(t, err) assert.False(t, rootMetadata.IsGitHubAppApprovalTrusted()) err = r.AddGitHubApp(testCtx, sv, key, false) assert.Nil(t, err) err = r.TrustGitHubApp(testCtx, sv, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) assert.Nil(t, err) assert.True(t, rootMetadata.IsGitHubAppApprovalTrusted()) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) // Test if we can trust again if already trusted err = r.TrustGitHubApp(testCtx, sv, false) assert.Nil(t, err) }) } func TestUntrustGitHubApp(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) assert.Nil(t, err) assert.False(t, rootMetadata.IsGitHubAppApprovalTrusted()) err = r.AddGitHubApp(testCtx, sv, key, false) assert.Nil(t, err) err = r.TrustGitHubApp(testCtx, sv, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) assert.Nil(t, err) assert.True(t, rootMetadata.IsGitHubAppApprovalTrusted()) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) err = r.UntrustGitHubApp(testCtx, sv, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) assert.Nil(t, err) assert.False(t, rootMetadata.IsGitHubAppApprovalTrusted()) _, err = dsse.VerifyEnvelope(testCtx, state.RootEnvelope, []sslibdsse.Verifier{sv}, 1) assert.Nil(t, err) } func TestUpdateRootThreshold(t *testing.T) { r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } assert.Equal(t, 1, getRootPrincipalIDs(t, rootMetadata).Len()) rootThreshold, err := rootMetadata.GetRootThreshold() if err != nil { t.Fatal(err) } assert.Equal(t, 1, rootThreshold) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) secondKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) if err := r.AddRootKey(testCtx, signer, secondKey, false); err != nil { t.Fatal(err) } err = r.UpdateRootThreshold(testCtx, signer, 2, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } assert.Equal(t, 2, getRootPrincipalIDs(t, rootMetadata).Len()) rootThreshold, err = rootMetadata.GetRootThreshold() if err != nil { t.Fatal(err) } assert.Equal(t, 2, rootThreshold) } func TestUpdateTopLevelTargetsThreshold(t *testing.T) { r := createTestRepositoryWithRoot(t, "") sv := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(sv.MetadataKey()) if err := r.AddTopLevelTargetsKey(testCtx, sv, key, false); err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } assert.Equal(t, 1, getPrimaryRuleFilePrincipalIDs(t, rootMetadata).Len()) targetsThreshold, err := rootMetadata.GetPrimaryRuleFileThreshold() if err != nil { t.Fatal(err) } assert.Equal(t, 1, targetsThreshold) targetsKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) if err := r.AddTopLevelTargetsKey(testCtx, sv, targetsKey, false); err != nil { t.Fatal(err) } err = r.UpdateTopLevelTargetsThreshold(testCtx, sv, 2, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } assert.Equal(t, 2, getPrimaryRuleFilePrincipalIDs(t, rootMetadata).Len()) targetsThreshold, err = rootMetadata.GetPrimaryRuleFileThreshold() if err != nil { t.Fatal(err) } assert.Equal(t, 2, targetsThreshold) } func TestSignRoot(t *testing.T) { r := createTestRepositoryWithRoot(t, "") rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) // Add targets key as a root key secondKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) if err := r.AddRootKey(testCtx, rootSigner, secondKey, false); err != nil { t.Fatal(err) } secondSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) // Add signature to root err := r.SignRoot(testCtx, secondSigner, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } assert.Equal(t, 2, len(state.RootEnvelope.Signatures)) } func TestAddGlobalRuleThreshold(t *testing.T) { t.Setenv(dev.DevModeKey, "1") r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules := rootMetadata.GetGlobalRules() assert.Empty(t, globalRules) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err = r.AddGlobalRuleThreshold(testCtx, rootSigner, "require-approval-for-main", []string{"git:refs/heads/main"}, 1, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules = rootMetadata.GetGlobalRules() assert.Len(t, globalRules, 1) assert.Equal(t, "require-approval-for-main", globalRules[0].GetName()) assert.Equal(t, []string{"git:refs/heads/main"}, globalRules[0].(tuf.GlobalRuleThreshold).GetProtectedNamespaces()) assert.Equal(t, 1, globalRules[0].(tuf.GlobalRuleThreshold).GetThreshold()) err = r.AddGlobalRuleThreshold(testCtx, rootSigner, "require-approval-for-main", []string{"git:refs/heads/main"}, 1, false) assert.ErrorIs(t, err, tuf.ErrGlobalRuleAlreadyExists) } func TestAddGlobalRuleBlockForcePushes(t *testing.T) { t.Setenv(dev.DevModeKey, "1") r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules := rootMetadata.GetGlobalRules() assert.Empty(t, globalRules) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err = r.AddGlobalRuleBlockForcePushes(testCtx, rootSigner, "block-force-pushes-for-main", []string{"git:refs/heads/main"}, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules = rootMetadata.GetGlobalRules() assert.Len(t, globalRules, 1) assert.Equal(t, "block-force-pushes-for-main", globalRules[0].GetName()) assert.Equal(t, []string{"git:refs/heads/main"}, globalRules[0].(tuf.GlobalRuleBlockForcePushes).GetProtectedNamespaces()) } func TestRemoveGlobalRule(t *testing.T) { t.Setenv(dev.DevModeKey, "1") t.Run("remove threshold global rule", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err := r.AddGlobalRuleThreshold(testCtx, rootSigner, "require-approval-for-main", []string{"git:refs/heads/main"}, 1, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules := rootMetadata.GetGlobalRules() assert.Len(t, globalRules, 1) err = r.RemoveGlobalRule(testCtx, rootSigner, "require-approval-for-main", false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules = rootMetadata.GetGlobalRules() assert.Empty(t, globalRules) }) t.Run("remove force push global rule", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err := r.AddGlobalRuleBlockForcePushes(testCtx, rootSigner, "block-force-pushes-for-main", []string{"git:refs/heads/main"}, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules := rootMetadata.GetGlobalRules() assert.Len(t, globalRules, 1) err = r.RemoveGlobalRule(testCtx, rootSigner, "block-force-pushes-for-main", false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules = rootMetadata.GetGlobalRules() assert.Empty(t, globalRules) }) t.Run("remove global rule when none exist", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } globalRules := rootMetadata.GetGlobalRules() assert.Empty(t, globalRules) err = r.RemoveGlobalRule(testCtx, rootSigner, "require-approval-for-main", false) assert.ErrorIs(t, err, tuf.ErrGlobalRuleNotFound) }) } func TestAddPropagationDirective(t *testing.T) { t.Setenv(dev.DevModeKey, "1") t.Run("with tuf v01 metadata", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives := rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err = r.AddPropagationDirective(testCtx, rootSigner, "test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/", false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives = rootMetadata.GetPropagationDirectives() assert.Len(t, directives, 1) assert.Equal(t, tufv01.NewPropagationDirective("test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/"), directives[0]) }) t.Run("with tuf v02 metadata", func(t *testing.T) { t.Setenv(tufv02.AllowV02MetadataKey, "1") r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives := rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err = r.AddPropagationDirective(testCtx, rootSigner, "test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/", false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives = rootMetadata.GetPropagationDirectives() assert.Len(t, directives, 1) assert.Equal(t, tufv02.NewPropagationDirective("test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/"), directives[0]) }) } func TestRemovePropagationDirective(t *testing.T) { t.Setenv(dev.DevModeKey, "1") t.Run("with tuf v01 metadata", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives := rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err = r.AddPropagationDirective(testCtx, rootSigner, "test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/", false) require.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives = rootMetadata.GetPropagationDirectives() require.Len(t, directives, 1) require.Equal(t, tufv01.NewPropagationDirective("test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/"), directives[0]) err = r.RemovePropagationDirective(testCtx, rootSigner, "test", false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives = rootMetadata.GetPropagationDirectives() require.Empty(t, directives) err = r.RemovePropagationDirective(testCtx, rootSigner, "test", false) assert.ErrorIs(t, err, tuf.ErrPropagationDirectiveNotFound) }) t.Run("with tuf v02 metadata", func(t *testing.T) { t.Setenv(tufv02.AllowV02MetadataKey, "1") r := createTestRepositoryWithRoot(t, "") state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyRef) if err != nil { t.Fatal(err) } rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives := rootMetadata.GetPropagationDirectives() require.Empty(t, directives) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) err = r.AddPropagationDirective(testCtx, rootSigner, "test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/", false) require.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives = rootMetadata.GetPropagationDirectives() require.Len(t, directives, 1) require.Equal(t, tufv02.NewPropagationDirective("test", "https://example.com/git/repository", "refs/heads/main", "refs/heads/main", "upstream/"), directives[0]) err = r.RemovePropagationDirective(testCtx, rootSigner, "test", false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) // we haven't applied if err != nil { t.Fatal(err) } rootMetadata, err = state.GetRootMetadata(false) if err != nil { t.Fatal(err) } directives = rootMetadata.GetPropagationDirectives() require.Empty(t, directives) err = r.RemovePropagationDirective(testCtx, rootSigner, "test", false) assert.ErrorIs(t, err, tuf.ErrPropagationDirectiveNotFound) }) } func getRootPrincipalIDs(t *testing.T, rootMetadata tuf.RootMetadata) *set.Set[string] { t.Helper() principals, err := rootMetadata.GetRootPrincipals() if err != nil { t.Fatal(err) } principalIDs := set.NewSet[string]() for _, principal := range principals { principalIDs.Add(principal.ID()) } return principalIDs } func getPrimaryRuleFilePrincipalIDs(t *testing.T, rootMetadata tuf.RootMetadata) *set.Set[string] { t.Helper() principals, err := rootMetadata.GetPrimaryRuleFilePrincipals() if err != nil { t.Fatal(err) } principalIDs := set.NewSet[string]() for _, principal := range principals { principalIDs.Add(principal.ID()) } return principalIDs } gittuf-0.9.0/experimental/gittuf/rsl.go000066400000000000000000000466321475150141000201610ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "errors" "fmt" "log/slog" "os" "strings" rslopts "github.com/gittuf/gittuf/experimental/gittuf/options/rsl" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/tuf" "github.com/go-git/go-git/v5/plumbing/transport" ) const gittufTransportPrefix = "gittuf::" var ( ErrCommitNotInRef = errors.New("specified commit is not in ref") ErrPushingRSL = errors.New("unable to push RSL") ErrPullingRSL = errors.New("unable to pull RSL") ) // RecordRSLEntryForReference is the interface for the user to add an RSL entry // for the specified Git reference. func (r *Repository) RecordRSLEntryForReference(ctx context.Context, refName string, signCommit bool, opts ...rslopts.Option) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } options := &rslopts.Options{} for _, fn := range opts { fn(options) } if !options.SkipPropagation { if err := r.PropagateChangesFromUpstreamRepositories(ctx, signCommit); err != nil { return fmt.Errorf("unable to execute propagation directives: %w", err) } } slog.Debug("Identifying absolute reference path...") refName, err := r.r.AbsoluteReference(refName) if err != nil { return err } // Track localRefName to check the expected tip as we may override refName localRefName := refName if options.RefNameOverride != "" { // dst differs from src // Eg: git push : slog.Debug("Name of reference overridden to match remote reference name, identifying absolute reference path...") refNameOverride, err := r.r.AbsoluteReference(options.RefNameOverride) if err != nil { return err } refName = refNameOverride } // The tip of the ref is always from the localRefName slog.Debug(fmt.Sprintf("Loading current state of '%s'...", localRefName)) refTip, err := r.r.GetReference(localRefName) if err != nil { return err } if !options.SkipCheckForDuplicate { slog.Debug("Checking if latest entry for reference has same target...") isDuplicate, err := r.isDuplicateEntry(refName, refTip) if err != nil { return err } if isDuplicate { slog.Debug("The latest entry has the same target, skipping creation of new entry...") return nil } } else { slog.Debug("Not checking if latest entry for reference has same target") } // TODO: once policy verification is in place, the signing key used by // signCommit must be verified for the refName in the delegation tree. slog.Debug("Creating RSL reference entry...") return rsl.NewReferenceEntry(refName, refTip).Commit(r.r, signCommit) } // RecordRSLEntryForReferenceAtTarget is a special version of // RecordRSLEntryForReference used for evaluation. It is only invoked when // gittuf is explicitly set in developer mode. func (r *Repository) RecordRSLEntryForReferenceAtTarget(refName, targetID string, signingKeyBytes []byte, opts ...rslopts.Option) error { // Double check that gittuf is in developer mode if !dev.InDevMode() { return dev.ErrNotInDevMode } options := &rslopts.Options{} for _, fn := range opts { fn(options) } slog.Debug("Identifying absolute reference path...") refName, err := r.r.AbsoluteReference(refName) if err != nil { return err } targetIDHash, err := gitinterface.NewHash(targetID) if err != nil { return err } if options.RefNameOverride != "" { // dst differs from src // Eg: git push : slog.Debug("Name of reference overridden to match remote reference name, identifying absolute reference path...") refName, err = r.r.AbsoluteReference(options.RefNameOverride) if err != nil { return err } } // TODO: once policy verification is in place, the signing key used by // signCommit must be verified for the refName in the delegation tree. slog.Debug("Creating RSL reference entry...") return rsl.NewReferenceEntry(refName, targetIDHash).CommitUsingSpecificKey(r.r, signingKeyBytes) } func (r *Repository) SkipAllInvalidReferenceEntriesForRef(targetRef string, signCommit bool) error { return rsl.SkipAllInvalidReferenceEntriesForRef(r.r, targetRef, signCommit) } // RecordRSLAnnotation is the interface for the user to add an RSL annotation // for one or more prior RSL entries. func (r *Repository) RecordRSLAnnotation(ctx context.Context, rslEntryIDs []string, skip bool, message string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } if err := r.PropagateChangesFromUpstreamRepositories(ctx, signCommit); err != nil { return fmt.Errorf("unable to execute propagation directives: %w", err) } rslEntryHashes := []gitinterface.Hash{} for _, id := range rslEntryIDs { hash, err := gitinterface.NewHash(id) if err != nil { return err } rslEntryHashes = append(rslEntryHashes, hash) } // TODO: once policy verification is in place, the signing key used by // signCommit must be verified for the refNames of the rslEntryIDs. slog.Debug("Creating RSL annotation entry...") return rsl.NewAnnotationEntry(rslEntryHashes, skip, message).Commit(r.r, signCommit) } // ReconcileLocalRSLWithRemote checks the local RSL against the specified remote // and reconciles the local RSL if needed. If the local RSL doesn't exist or is // strictly behind the remote RSL, then the local RSL is updated to match the // remote RSL. If the local RSL is ahead of the remote RSL, nothing is updated. // Finally, if the local and remote RSLs have diverged, then the local only RSL // entries are reapplied over the latest entries in the remote if the local only // RSL entries and remote only entries are for different Git references. func (r *Repository) ReconcileLocalRSLWithRemote(ctx context.Context, remoteName string, sign bool) error { if sign { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } remoteURL, err := r.r.GetRemoteURL(remoteName) if err != nil { return err } if strings.HasPrefix(remoteURL, gittufTransportPrefix) { slog.Debug("Creating new remote to avoid using gittuf transport...") remoteName = fmt.Sprintf("check-remote-%s", remoteName) if err := r.r.AddRemote(remoteName, strings.TrimPrefix(remoteURL, gittufTransportPrefix)); err != nil { return err } defer r.r.RemoveRemote(remoteName) //nolint:errcheck } // Fetch status of RSL on the remote trackerRef := rsl.RemoteTrackerRef(remoteName) rslRemoteRefSpec := []string{fmt.Sprintf("%s:%s", rsl.Ref, trackerRef)} slog.Debug(fmt.Sprintf("Updating remote RSL tracker (%s)...", rslRemoteRefSpec)) if err := r.r.FetchRefSpec(remoteName, rslRemoteRefSpec); err != nil { return err } remoteRefState, err := r.r.GetReference(trackerRef) if err != nil { return err } slog.Debug(fmt.Sprintf("Remote RSL is at '%s'", remoteRefState.String())) // Load status of the local RSL for comparison localRefState, err := r.r.GetReference(rsl.Ref) if err != nil { return err } slog.Debug(fmt.Sprintf("Local RSL is at '%s'", localRefState.String())) // Check if local is nil and exit appropriately if localRefState.IsZero() { // Local RSL has not been populated but remote is not zero // Fetch updates to the local RSL slog.Debug("Local RSL has not been initialized but remote RSL exists, fetching remote RSL...") if err := r.r.Fetch(remoteName, []string{rsl.Ref}, true); err != nil { return err } slog.Debug("Updated local RSL!") return nil } // Check if equal and exit early if true if remoteRefState.Equal(localRefState) { slog.Debug("Local and remote RSLs have same state, nothing to do") return nil } // Next, check if remote is ahead of local knows, err := r.r.KnowsCommit(remoteRefState, localRefState) if err != nil { return err } if knows { slog.Debug("Remote RSL is ahead of local RSL, fetching remote RSL...") if err := r.r.Fetch(remoteName, []string{rsl.Ref}, true); err != nil { return err } slog.Debug("Updated local RSL!") return nil } // If not ancestor, local may be ahead or they may have diverged // If remote is ancestor, only local is ahead, no updates // If remote is not ancestor, the two have diverged, local needs to pull updates knows, err = r.r.KnowsCommit(localRefState, remoteRefState) if err != nil { return err } if knows { // We don't push to the remote RSL, that's handled alongside // other pushes (eg. via the transport) or explicitly slog.Debug("Local RSL is ahead of remote RSL, nothing to do") return nil } // This is the tricky one // First, we find a common ancestor for the two // Second, we identify all the entries in the local that is not in the // remote // Third, we set local to the remote's tip // Fourth, we apply all the entries that we identified over the new tip slog.Debug("Local and remote RSLs have diverged, identifying common ancestor to reconcile local RSL...") commonAncestor, err := r.r.GetCommonAncestor(localRefState, remoteRefState) if err != nil { return err } slog.Debug(fmt.Sprintf("Found common ancestor entry '%s'", commonAncestor.String())) localOnlyEntries, err := getRSLEntriesUntil(r.r, localRefState, commonAncestor) if err != nil { return err } remoteOnlyEntries, err := getRSLEntriesUntil(r.r, remoteRefState, commonAncestor) if err != nil { return err } localUpdatedRefs := set.NewSet[string]() for _, entry := range localOnlyEntries { slog.Debug(fmt.Sprintf("Identified local only entry that must be reapplied '%s'", entry.GetID().String())) if entry, isRefEntry := entry.(*rsl.ReferenceEntry); isRefEntry { localUpdatedRefs.Add(entry.RefName) } } remoteUpdatedRefs := set.NewSet[string]() for _, entry := range remoteOnlyEntries { slog.Debug(fmt.Sprintf("Identified remote only entry '%s'", entry.GetID().String())) if entry, isRefEntry := entry.(*rsl.ReferenceEntry); isRefEntry { remoteUpdatedRefs.Add(entry.RefName) } } // Check if remote has entries for refs that are also updated locally // We don't want to do conflict resolution right now intersection := localUpdatedRefs.Intersection(remoteUpdatedRefs) if intersection.Len() != 0 { return fmt.Errorf("unable to reconcile local RSL with remote; both RSLs contain changes to the same refs [%s]", strings.Join(intersection.Contents(), ", ")) } // Set local RSL to match the remote state if err := r.r.SetReference(rsl.Ref, remoteRefState); err != nil { return fmt.Errorf("unable to update local RSL: %w", err) } // Apply local only entries on top of the new local RSL // localOnlyEntries is in reverse order for i := len(localOnlyEntries) - 1; i >= 0; i-- { slog.Debug(fmt.Sprintf("Reapplying entry '%s'...", localOnlyEntries[i].GetID().String())) // We create a new object so as to apply anything the // entry may contain that is inferred at commit time // For example, an incrementing number inferred from the // parent entry switch entry := localOnlyEntries[i].(type) { case *rsl.ReferenceEntry: if err := rsl.NewReferenceEntry(entry.RefName, entry.TargetID).Commit(r.r, sign); err != nil { return fmt.Errorf("unable to reapply reference entry '%s': %w", entry.ID.String(), err) } case *rsl.AnnotationEntry: if err := rsl.NewAnnotationEntry(entry.RSLEntryIDs, entry.Skip, entry.Message).Commit(r.r, sign); err != nil { return fmt.Errorf("unable to reapply annotation entry '%s': %w", entry.ID.String(), err) } } if slog.Default().Enabled(ctx, slog.LevelDebug) { currentTip, err := r.r.GetReference(rsl.Ref) if err != nil { return fmt.Errorf("unable to get current tip of the RSL: %w", err) } slog.Debug("New entry ID for '%s' is '%s'", localOnlyEntries[i].GetID().String(), currentTip.String()) } } slog.Debug("Updated local RSL!") return nil } func getRSLEntriesUntil(repo *gitinterface.Repository, start, until gitinterface.Hash) ([]rsl.Entry, error) { entries := []rsl.Entry{} iterator, err := rsl.GetEntry(repo, start) if err != nil { return nil, fmt.Errorf("unable to load entry '%s': %w", start.String(), err) } for { entries = append(entries, iterator) parent, err := rsl.GetParentForEntry(repo, iterator) if err != nil { return nil, fmt.Errorf("unable to load parent of entry '%s': %w", iterator.GetID().String(), err) } if parent.GetID().Equal(until) { break } iterator = parent } return entries, nil } // CheckRemoteRSLForUpdates checks if the RSL at the specified remote // repository has updated in comparison with the local repository's RSL. This is // done by fetching the remote RSL to the local repository's remote RSL tracker. // If the remote RSL has been updated, this method also checks if the local and // remote RSLs have diverged. In summary, the first return value indicates if // there is an update and the second return value indicates if the two RSLs have // diverged and need to be reconciled. // // Deprecated: this was a precursor to ReconcileLocalRSLWithRemote, we probably // don't need both of them. func (r *Repository) CheckRemoteRSLForUpdates(_ context.Context, remoteName string) (bool, bool, error) { remoteURL, err := r.r.GetRemoteURL(remoteName) if err != nil { return false, false, err } if strings.HasPrefix(remoteURL, gittufTransportPrefix) { slog.Debug("Creating new remote to avoid using gittuf transport...") remoteName = fmt.Sprintf("check-remote-%s", remoteName) if err := r.r.AddRemote(remoteName, strings.TrimPrefix(remoteURL, gittufTransportPrefix)); err != nil { return false, false, err } defer r.r.RemoveRemote(remoteName) //nolint:errcheck } trackerRef := rsl.RemoteTrackerRef(remoteName) rslRemoteRefSpec := []string{fmt.Sprintf("%s:%s", rsl.Ref, trackerRef)} slog.Debug(fmt.Sprintf("Updating remote RSL tracker (%s)...", rslRemoteRefSpec)) if err := r.r.FetchRefSpec(remoteName, rslRemoteRefSpec); err != nil { if errors.Is(err, transport.ErrEmptyRemoteRepository) { // Check if remote is empty and exit appropriately return false, false, nil } return false, false, err } remoteRefState, err := r.r.GetReference(trackerRef) if err != nil { return false, false, err } slog.Debug(fmt.Sprintf("Remote RSL is at '%s'", remoteRefState.String())) localRefState, err := r.r.GetReference(rsl.Ref) if err != nil { return false, false, err } slog.Debug(fmt.Sprintf("Local RSL is at '%s'", localRefState.String())) // Check if local is nil and exit appropriately if localRefState.IsZero() { // Local RSL has not been populated but remote is not zero // So there are updates the local can pull slog.Debug("Local RSL has not been initialized but remote RSL exists") return true, false, nil } // Check if equal and exit early if true if remoteRefState.Equal(localRefState) { slog.Debug("Local and remote RSLs have same state") return false, false, nil } // Next, check if remote is ahead of local knows, err := r.r.KnowsCommit(remoteRefState, localRefState) if err != nil { return false, false, err } if knows { slog.Debug("Remote RSL is ahead of local RSL") return true, false, nil } // If not ancestor, local may be ahead or they may have diverged // If remote is ancestor, only local is ahead, no updates // If remote is not ancestor, the two have diverged, local needs to pull updates knows, err = r.r.KnowsCommit(localRefState, remoteRefState) if err != nil { return false, false, err } if knows { slog.Debug("Local RSL is ahead of remote RSL") return false, false, nil } slog.Debug("Local and remote RSLs have diverged") return true, true, nil } // PushRSL pushes the local RSL to the specified remote. As this push defaults // to fast-forward only, divergent RSL states are detected. func (r *Repository) PushRSL(remoteName string) error { slog.Debug(fmt.Sprintf("Pushing RSL reference to '%s'...", remoteName)) if err := r.r.Push(remoteName, []string{rsl.Ref}); err != nil { return errors.Join(ErrPushingRSL, err) } return nil } // PullRSL pulls RSL contents from the specified remote to the local RSL. The // fetch is marked as fast forward only to detect RSL divergence. func (r *Repository) PullRSL(remoteName string) error { slog.Debug(fmt.Sprintf("Pulling RSL reference from '%s'...", remoteName)) if err := r.r.Fetch(remoteName, []string{rsl.Ref}, true); err != nil { return errors.Join(ErrPullingRSL, err) } return nil } // isDuplicateEntry checks if the latest unskipped entry for the ref has the // same target ID. Note that it's legal for the RSL to have target A, then B, // then A again, this is not considered a duplicate entry func (r *Repository) isDuplicateEntry(refName string, targetID gitinterface.Hash) (bool, error) { latestUnskippedEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.r, rsl.ForReference(refName), rsl.IsUnskipped()) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { return false, nil } return false, err } return latestUnskippedEntry.GetTargetID().Equal(targetID), nil } // PropagateChangesFromUpstreamRepositories invokes gittuf's propagation // workflow. It inspects the latest policy metadata to find the applicable // propagation directives, and executes the workflow on each one. func (r *Repository) PropagateChangesFromUpstreamRepositories(ctx context.Context, sign bool) error { if !dev.InDevMode() { slog.Debug("Propagation is only supported in developer mode, skipping check...") return nil } slog.Debug("Checking if upstream changes must be propagated...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyRef) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { return nil } return err } rootMetadata, err := state.GetRootMetadata(false) if err != nil { return err } directives := rootMetadata.GetPropagationDirectives() if len(directives) == 0 { slog.Debug("No propagation directives found") return nil } upstreamRepositoryDirectivesMapping := map[string][]tuf.PropagationDirective{} for _, directive := range directives { // Group directives for the same repository together if _, has := upstreamRepositoryDirectivesMapping[directive.GetUpstreamRepository()]; !has { upstreamRepositoryDirectivesMapping[directive.GetUpstreamRepository()] = []tuf.PropagationDirective{} } upstreamRepositoryDirectivesMapping[directive.GetUpstreamRepository()] = append(upstreamRepositoryDirectivesMapping[directive.GetUpstreamRepository()], directive) } for upstreamRepositoryURL, directives := range upstreamRepositoryDirectivesMapping { slog.Debug(fmt.Sprintf("Propagating changes from repository '%s'...", upstreamRepositoryURL)) upstreamRepositoryLocation, err := os.MkdirTemp("", "gittuf-propagate-upstream") if err != nil { return err } defer os.RemoveAll(upstreamRepositoryLocation) //nolint:errcheck fetchReferences := set.NewSetFromItems(rsl.Ref) for _, directive := range directives { fetchReferences.Add(directive.GetUpstreamReference()) } upstreamRepository, err := gitinterface.CloneAndFetchRepository(upstreamRepositoryURL, upstreamRepositoryLocation, "", fetchReferences.Contents(), true) if err != nil { // TODO: we see this error when required upstream ref isn't found, handle gracefully? return fmt.Errorf("unable to fetch upstream repository '%s': %w", upstreamRepositoryURL, err) } if err := rsl.PropagateChangesFromUpstreamRepository(r.r, upstreamRepository, directives, sign); err != nil { // TODO: atomic? abort? return err } } return nil } gittuf-0.9.0/experimental/gittuf/rsl_test.go000066400000000000000000001441561475150141000212200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "fmt" "os" "path/filepath" "testing" rslopts "github.com/gittuf/gittuf/experimental/gittuf/options/rsl" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestRecordRSLEntryForReference(t *testing.T) { tempDir := t.TempDir() r := gitinterface.CreateTestGitRepository(t, tempDir, false) repo := &Repository{r: r} treeBuilder := gitinterface.NewTreeBuilder(repo.r) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.r.Commit(emptyTreeHash, "refs/heads/main", "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.RecordRSLEntryForReference(testCtx, "refs/heads/main", false); err != nil { t.Fatal(err) } entryT, err := rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } entry, ok := entryT.(*rsl.ReferenceEntry) if !ok { t.Fatal(fmt.Errorf("invalid entry type")) } assert.Equal(t, "refs/heads/main", entry.RefName) assert.Equal(t, commitID, entry.TargetID) newCommitID, err := repo.r.Commit(emptyTreeHash, "refs/heads/main", "Another commit\n", false) if err != nil { t.Fatal(err) } if err := repo.RecordRSLEntryForReference(testCtx, "main", false); err != nil { t.Fatal(err) } rslRef, err := repo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } entryT, err = rsl.GetEntry(repo.r, rslRef) if err != nil { t.Fatal(err) } entry, ok = entryT.(*rsl.ReferenceEntry) if !ok { t.Fatal(fmt.Errorf("invalid entry type")) } assert.Equal(t, "refs/heads/main", entry.RefName) assert.Equal(t, newCommitID, entry.TargetID) err = repo.RecordRSLEntryForReference(testCtx, "main", false) assert.Nil(t, err) entryT, err = rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } // check that a duplicate entry has not been created assert.Equal(t, entry.GetID(), entryT.GetID()) // Record entry for a different dst ref err = repo.RecordRSLEntryForReference(testCtx, "refs/heads/main", false, rslopts.WithOverrideRefName("refs/heads/not-main")) assert.Nil(t, err) entryT, err = rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } entry, ok = entryT.(*rsl.ReferenceEntry) if !ok { t.Fatal(fmt.Errorf("invalid entry type")) } assert.Equal(t, newCommitID, entry.TargetID) assert.Equal(t, "refs/heads/not-main", entry.RefName) // Record entry for a different dst ref and skip check for duplicate currentEntryID := entry.GetID() err = repo.RecordRSLEntryForReference(testCtx, "refs/heads/main", false, rslopts.WithOverrideRefName("refs/heads/not-main"), rslopts.WithSkipCheckForDuplicateEntry()) assert.Nil(t, err) entryT, err = rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } entry, ok = entryT.(*rsl.ReferenceEntry) if !ok { t.Fatal(fmt.Errorf("invalid entry type")) } assert.NotEqual(t, currentEntryID, entry.GetID()) assert.Equal(t, newCommitID, entry.TargetID) assert.Equal(t, "refs/heads/not-main", entry.RefName) } func TestRecordRSLEntryForReferenceAtTarget(t *testing.T) { t.Setenv(dev.DevModeKey, "1") refName := "refs/heads/main" anotherRefName := "refs/heads/feature" tests := map[string]struct { keyBytes []byte }{ "using GPG key": {keyBytes: gpgKeyBytes}, "using RSA SSH key": {keyBytes: rsaKeyBytes}, "using ECDSA ssh key": {keyBytes: ecdsaKeyBytes}, } for name, test := range tests { t.Run(name, func(t *testing.T) { tmpDir := t.TempDir() r := gitinterface.CreateTestGitRepository(t, tmpDir, false) repo := &Repository{r: r} treeBuilder := gitinterface.NewTreeBuilder(repo.r) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.r.Commit(emptyTreeHash, refName, "Test commit", false) if err != nil { t.Fatal(err) } err = repo.RecordRSLEntryForReferenceAtTarget(refName, commitID.String(), test.keyBytes) assert.Nil(t, err) latestEntry, err := rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } assert.Equal(t, refName, latestEntry.(*rsl.ReferenceEntry).RefName) assert.Equal(t, commitID, latestEntry.(*rsl.ReferenceEntry).TargetID) // Now checkout another branch, add another commit if err := repo.r.SetReference(anotherRefName, commitID); err != nil { t.Fatal(err) } newCommitID, err := repo.r.Commit(emptyTreeHash, anotherRefName, "Commit on feature branch", false) if err != nil { t.Fatal(err) } // We record an RSL entry for the commit in the new branch err = repo.RecordRSLEntryForReferenceAtTarget(anotherRefName, newCommitID.String(), test.keyBytes) assert.Nil(t, err) // Let's record a couple more commits and use the older of the two commitID, err = repo.r.Commit(emptyTreeHash, refName, "Another commit", false) if err != nil { t.Fatal(err) } _, err = repo.r.Commit(emptyTreeHash, refName, "Latest commit", false) if err != nil { t.Fatal(err) } err = repo.RecordRSLEntryForReferenceAtTarget(refName, commitID.String(), test.keyBytes) assert.Nil(t, err) // Let's record a couple more commits and add an entry with a // different dstRefName to the first rather than latest commit commitID, err = repo.r.Commit(emptyTreeHash, refName, "Another commit", false) if err != nil { t.Fatal(err) } _, err = repo.r.Commit(emptyTreeHash, refName, "Latest commit", false) if err != nil { t.Fatal(err) } err = repo.RecordRSLEntryForReferenceAtTarget(refName, commitID.String(), test.keyBytes, rslopts.WithOverrideRefName(anotherRefName)) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } assert.Equal(t, anotherRefName, latestEntry.(*rsl.ReferenceEntry).RefName) assert.Equal(t, commitID, latestEntry.(*rsl.ReferenceEntry).TargetID) }) } } func TestRecordRSLAnnotation(t *testing.T) { tempDir := t.TempDir() r := gitinterface.CreateTestGitRepository(t, tempDir, false) repo := &Repository{r: r} err := repo.RecordRSLAnnotation(testCtx, []string{gitinterface.ZeroHash.String()}, false, "test annotation", false) assert.ErrorIs(t, err, rsl.ErrRSLEntryNotFound) treeBuilder := gitinterface.NewTreeBuilder(repo.r) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } _, err = repo.r.Commit(emptyTreeHash, "refs/heads/main", "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.RecordRSLEntryForReference(testCtx, "refs/heads/main", false); err != nil { t.Fatal(err) } latestEntry, err := rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } entryID := latestEntry.GetID() err = repo.RecordRSLAnnotation(testCtx, []string{entryID.String()}, false, "test annotation", false) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } assert.IsType(t, &rsl.AnnotationEntry{}, latestEntry) annotation := latestEntry.(*rsl.AnnotationEntry) assert.Equal(t, "test annotation", annotation.Message) assert.Equal(t, []gitinterface.Hash{entryID}, annotation.RSLEntryIDs) assert.False(t, annotation.Skip) err = repo.RecordRSLAnnotation(testCtx, []string{entryID.String()}, true, "skip annotation", false) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(repo.r) if err != nil { t.Fatal(err) } assert.IsType(t, &rsl.AnnotationEntry{}, latestEntry) annotation = latestEntry.(*rsl.AnnotationEntry) assert.Equal(t, "skip annotation", annotation.Message) assert.Equal(t, []gitinterface.Hash{entryID}, annotation.RSLEntryIDs) assert.True(t, annotation.Skip) } func TestReconcileLocalRSLWithRemote(t *testing.T) { remoteName := "origin" refName := "refs/heads/main" anotherRefName := "refs/heads/feature" t.Run("remote has updates for local", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } localRepo := &Repository{r: localR} assertLocalAndRemoteRefsMatch(t, localR, remoteR, rsl.Ref) // Simulate more remote actions if _, err := remoteRepo.r.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } originalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } err = localRepo.ReconcileLocalRSLWithRemote(testCtx, remoteName, false) assert.Nil(t, err) currentRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } // Local RSL must now be updated to match remote assertLocalAndRemoteRefsMatch(t, localR, remoteR, rsl.Ref) assert.NotEqual(t, originalRSLTip, currentRSLTip) }) t.Run("remote has no updates for local", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } localRepo := &Repository{r: localR} originalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } err = localRepo.ReconcileLocalRSLWithRemote(testCtx, remoteName, false) assert.Nil(t, err) currentRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } assert.Equal(t, originalRSLTip, currentRSLTip) }) t.Run("local is ahead of remote", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } require.Nil(t, localR.SetGitConfig("user.name", "Jane Doe")) require.Nil(t, localR.SetGitConfig("user.email", "jane.doe@example.com")) localRepo := &Repository{r: localR} // Simulate local actions if _, err := localR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := localRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } originalLocalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } originalRemoteRSLTip, err := remoteRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } err = localRepo.ReconcileLocalRSLWithRemote(testCtx, remoteName, false) assert.Nil(t, err) currentLocalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } currentRemoteRSLTip, err := remoteRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } // No change to local AND no change to remote assert.Equal(t, originalLocalRSLTip, currentLocalRSLTip) assert.Equal(t, originalRemoteRSLTip, currentRemoteRSLTip) }) t.Run("remote and local have diverged", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } require.Nil(t, localR.SetGitConfig("user.name", "Jane Doe")) require.Nil(t, localR.SetGitConfig("user.email", "jane.doe@example.com")) localRepo := &Repository{r: localR} // Simulate remote actions if _, err := remoteRepo.r.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Simulate local actions if _, err := localRepo.r.Commit(emptyTreeHash, anotherRefName, "Test commit", false); err != nil { t.Fatal(err) } if err := localRepo.RecordRSLEntryForReference(testCtx, anotherRefName, false); err != nil { t.Fatal(err) } originalLocalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } originalRemoteRSLTip, err := remoteRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } err = localRepo.ReconcileLocalRSLWithRemote(testCtx, remoteName, false) assert.Nil(t, err) currentLocalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } currentRemoteRSLTip, err := remoteRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } // Remote must not have changed assert.Equal(t, originalRemoteRSLTip, currentRemoteRSLTip) // The current remote tip must be the parent of the current // local tip parents, err := localRepo.r.GetCommitParentIDs(currentLocalRSLTip) if err != nil { t.Fatal(err) } assert.Equal(t, currentRemoteRSLTip, parents[0]) // The current local tip and original local tip must have same // entry ref and target ID originalEntry, err := rsl.GetEntry(localRepo.r, originalLocalRSLTip) if err != nil { t.Fatal(err) } currentEntry, err := rsl.GetEntry(localRepo.r, currentLocalRSLTip) if err != nil { t.Fatal(err) } assert.Equal(t, originalEntry.(*rsl.ReferenceEntry).RefName, currentEntry.(*rsl.ReferenceEntry).RefName) assert.Equal(t, originalEntry.(*rsl.ReferenceEntry).TargetID, currentEntry.(*rsl.ReferenceEntry).TargetID) }) t.Run("remote and local have diverged but modify same ref", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } require.Nil(t, localR.SetGitConfig("user.name", "Jane Doe")) require.Nil(t, localR.SetGitConfig("user.email", "jane.doe@example.com")) localRepo := &Repository{r: localR} // Simulate remote actions if _, err := remoteRepo.r.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Simulate local actions -- NOT anotherRefname here if _, err := localRepo.r.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := localRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } originalLocalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } originalRemoteRSLTip, err := remoteRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } err = localRepo.ReconcileLocalRSLWithRemote(testCtx, remoteName, false) assert.ErrorContains(t, err, "changes to the same ref") currentLocalRSLTip, err := localRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } currentRemoteRSLTip, err := remoteRepo.r.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } // Neither RSL should have changed assert.Equal(t, originalRemoteRSLTip, currentRemoteRSLTip) assert.Equal(t, originalLocalRSLTip, currentLocalRSLTip) }) } func TestCheckRemoteRSLForUpdates(t *testing.T) { remoteName := "origin" refName := "refs/heads/main" anotherRefName := "refs/heads/feature" t.Run("remote has updates for local", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } localRepo := &Repository{r: localR} // Simulate more remote actions if _, err := remoteRepo.r.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Local should be notified that remote has updates hasUpdates, hasDiverged, err := localRepo.CheckRemoteRSLForUpdates(testCtx, remoteName) assert.Nil(t, err) assert.True(t, hasUpdates) assert.False(t, hasDiverged) }) t.Run("remote has no updates for local", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } localRepo := &Repository{r: localR} // Local should be notified that remote has no updates hasUpdates, hasDiverged, err := localRepo.CheckRemoteRSLForUpdates(testCtx, remoteName) assert.Nil(t, err) assert.False(t, hasUpdates) assert.False(t, hasDiverged) }) t.Run("local is ahead of remote", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } require.Nil(t, localR.SetGitConfig("user.name", "Jane Doe")) require.Nil(t, localR.SetGitConfig("user.email", "jane.doe@example.com")) localRepo := &Repository{r: localR} // Simulate local actions if _, err := localR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := localRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Local should be notified that remote has no updates hasUpdates, hasDiverged, err := localRepo.CheckRemoteRSLForUpdates(testCtx, remoteName) assert.Nil(t, err) assert.False(t, hasUpdates) assert.False(t, hasDiverged) }) t.Run("remote and local have diverged", func(t *testing.T) { tmpDir := t.TempDir() remoteR := gitinterface.CreateTestGitRepository(t, tmpDir, false) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Simulate remote actions if _, err := remoteR.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Clone remote repository // TODO: this should be handled by the Repository package localTmpDir := filepath.Join(os.TempDir(), fmt.Sprintf("local-%s", t.Name())) defer os.RemoveAll(localTmpDir) //nolint:errcheck localR, err := gitinterface.CloneAndFetchRepository(tmpDir, localTmpDir, refName, []string{rsl.Ref}, true) if err != nil { t.Fatal(err) } require.Nil(t, localR.SetGitConfig("user.name", "Jane Doe")) require.Nil(t, localR.SetGitConfig("user.email", "jane.doe@example.com")) localRepo := &Repository{r: localR} // Simulate remote actions if _, err := remoteRepo.r.Commit(emptyTreeHash, refName, "Test commit", false); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } // Simulate local actions if _, err := localRepo.r.Commit(emptyTreeHash, anotherRefName, "Test commit", false); err != nil { t.Fatal(err) } if err := localRepo.RecordRSLEntryForReference(testCtx, anotherRefName, false); err != nil { t.Fatal(err) } // Local should be notified that remote has updates that needs to be // reconciled hasUpdates, hasDiverged, err := localRepo.CheckRemoteRSLForUpdates(testCtx, remoteName) assert.Nil(t, err) assert.True(t, hasUpdates) assert.True(t, hasDiverged) }) } func TestPushRSL(t *testing.T) { remoteName := "origin" t.Run("successful push", func(t *testing.T) { remoteTmpDir := t.TempDir() remoteRepoR := gitinterface.CreateTestGitRepository(t, remoteTmpDir, false) localRepo := createTestRepositoryWithPolicy(t, "") if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PushRSL(remoteName) assert.Nil(t, err) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepoR, rsl.Ref) // No updates, successful push err = localRepo.PushRSL(remoteName) assert.Nil(t, err) }) t.Run("divergent RSLs, unsuccessful push", func(t *testing.T) { remoteTmpDir := t.TempDir() remoteRepoR := gitinterface.CreateTestGitRepository(t, remoteTmpDir, false) if err := rsl.NewReferenceEntry(policy.PolicyRef, gitinterface.ZeroHash).Commit(remoteRepoR, false); err != nil { t.Fatal(err) } localRepo := createTestRepositoryWithPolicy(t, "") if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PushRSL(remoteName) assert.ErrorIs(t, err, ErrPushingRSL) }) } func TestPullRSL(t *testing.T) { remoteName := "origin" t.Run("successful pull", func(t *testing.T) { remoteTmpDir := t.TempDir() remoteRepo := createTestRepositoryWithPolicy(t, remoteTmpDir) localTmpDir := t.TempDir() localRepoR := gitinterface.CreateTestGitRepository(t, localTmpDir, false) localRepo := &Repository{r: localRepoR} if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } err := localRepo.PullRSL(remoteName) assert.Nil(t, err) assertLocalAndRemoteRefsMatch(t, localRepo.r, remoteRepo.r, rsl.Ref) // No updates, successful pull err = localRepo.PullRSL(remoteName) assert.Nil(t, err) }) t.Run("divergent RSLs, unsuccessful pull", func(t *testing.T) { remoteTmpDir := t.TempDir() createTestRepositoryWithPolicy(t, remoteTmpDir) localTmpDir := t.TempDir() localRepoR := gitinterface.CreateTestGitRepository(t, localTmpDir, false) localRepo := &Repository{r: localRepoR} if err := localRepo.r.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } if err := rsl.NewReferenceEntry(policy.PolicyRef, gitinterface.ZeroHash).Commit(localRepo.r, false); err != nil { t.Fatal(err) } err := localRepo.PullRSL(remoteName) assert.ErrorIs(t, err, ErrPullingRSL) }) } func TestPropagateChangesFromUpstreamRepositories(t *testing.T) { t.Setenv(dev.DevModeKey, "1") t.Run("single upstream repo", func(t *testing.T) { // Create upstreamRepo upstreamRepoLocation := t.TempDir() upstreamRepo := createTestRepositoryWithRoot(t, upstreamRepoLocation) downstreamRepoLocation := t.TempDir() downstreamRepo := createTestRepositoryWithRoot(t, downstreamRepoLocation) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) refName := "refs/heads/main" localPath := "upstream" if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test", upstreamRepoLocation, refName, refName, localPath, false); err != nil { t.Fatal(err) } if err := downstreamRepo.ApplyPolicy(testCtx, false); err != nil { t.Fatal(err) } err := downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.NotNil(t, err) // TODO: upstream doesn't have main at all // Add things to upstreamRepo blobAID, err := upstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := upstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } upstreamTreeBuilder := gitinterface.NewTreeBuilder(upstreamRepo.r) upstreamRootTreeID, err := upstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := upstreamRepo.r.Commit(upstreamRootTreeID, refName, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } upstreamEntry, err := rsl.GetLatestEntry(upstreamRepo.r) if err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) // TODO: should propagation result in a new local ref? assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) // Add things to downstreamRepo blobAID, err = downstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err = downstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } downstreamTreeBuilder := gitinterface.NewTreeBuilder(downstreamRepo.r) downstreamRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := downstreamRepo.r.Commit(downstreamRootTreeID, refName, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := downstreamRepo.RecordRSLEntryForReference(testCtx, refName, false, rslopts.WithSkipPropagation()); err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err := rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } propagationEntry, isPropagationEntry := latestEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } assert.Equal(t, upstreamRepoLocation, propagationEntry.UpstreamRepository) assert.Equal(t, upstreamEntry.GetID(), propagationEntry.UpstreamEntryID) downstreamRootTreeID, err = downstreamRepo.r.GetCommitTreeID(propagationEntry.TargetID) if err != nil { t.Fatal(err) } pathTreeID, err := downstreamRepo.r.GetPathIDInTree(localPath, downstreamRootTreeID) if err != nil { t.Fatal(err) } // Check the subtree ID in downstream repo matches upstream root tree ID assert.Equal(t, upstreamRootTreeID, pathTreeID) // Check the downstream tree still contains other items expectedRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), gitinterface.NewEntryBlob("upstream/a", blobAID), gitinterface.NewEntryBlob("upstream/b", blobBID), }) if err != nil { t.Fatal(err) } assert.Equal(t, expectedRootTreeID, downstreamRootTreeID) // Nothing to propagate, check that a new entry has not been added in the downstreamRepo err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } assert.Equal(t, propagationEntry.GetID(), latestEntry.GetID()) }) t.Run("single upstream repo, multiple upstream refs into same downstream ref", func(t *testing.T) { // Create upstreamRepo upstreamRepoLocation := t.TempDir() upstreamRepo := createTestRepositoryWithRoot(t, upstreamRepoLocation) downstreamRepoLocation := t.TempDir() downstreamRepo := createTestRepositoryWithRoot(t, downstreamRepoLocation) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) refName1 := "refs/heads/main" refName2 := "refs/heads/feature" localPath1 := "main" localPath2 := "feature" if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test", upstreamRepoLocation, refName1, refName1, localPath1, false); err != nil { t.Fatal(err) } if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test", upstreamRepoLocation, refName2, refName1, localPath2, false); err != nil { t.Fatal(err) } if err := downstreamRepo.ApplyPolicy(testCtx, false); err != nil { t.Fatal(err) } err := downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.NotNil(t, err) // TODO: upstream doesn't have main at all // Add things to upstreamRepo blobAID, err := upstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := upstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } upstreamTreeBuilder := gitinterface.NewTreeBuilder(upstreamRepo.r) upstreamRootTreeID, err := upstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := upstreamRepo.r.Commit(upstreamRootTreeID, refName1, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo.RecordRSLEntryForReference(testCtx, refName1, false); err != nil { t.Fatal(err) } if _, err := upstreamRepo.r.Commit(upstreamRootTreeID, refName2, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo.RecordRSLEntryForReference(testCtx, refName2, false); err != nil { t.Fatal(err) } upstreamEntry2, err := rsl.GetLatestEntry(upstreamRepo.r) if err != nil { t.Fatal(err) } upstreamEntry1, err := rsl.GetParentForEntry(upstreamRepo.r, upstreamEntry2) if err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) // TODO: should propagation result in a new local ref? assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) // Add things to downstreamRepo blobAID, err = downstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err = downstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } downstreamTreeBuilder := gitinterface.NewTreeBuilder(downstreamRepo.r) downstreamRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := downstreamRepo.r.Commit(downstreamRootTreeID, refName1, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := downstreamRepo.RecordRSLEntryForReference(testCtx, refName1, false, rslopts.WithSkipPropagation()); err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err := rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } priorEntry, err := rsl.GetParentForEntry(downstreamRepo.r, latestEntry) if err != nil { t.Fatal(err) } propagationEntry2, isPropagationEntry := latestEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } propagationEntry1, isPropagationEntry := priorEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } assert.Equal(t, upstreamRepoLocation, propagationEntry1.UpstreamRepository) assert.Equal(t, upstreamRepoLocation, propagationEntry2.UpstreamRepository) assert.Equal(t, upstreamEntry1.GetID(), propagationEntry1.UpstreamEntryID) assert.Equal(t, upstreamEntry2.GetID(), propagationEntry2.UpstreamEntryID) downstreamRootTreeID, err = downstreamRepo.r.GetCommitTreeID(propagationEntry2.TargetID) if err != nil { t.Fatal(err) } pathTree1ID, err := downstreamRepo.r.GetPathIDInTree(localPath1, downstreamRootTreeID) if err != nil { t.Fatal(err) } pathTree2ID, err := downstreamRepo.r.GetPathIDInTree(localPath2, downstreamRootTreeID) if err != nil { t.Fatal(err) } // Check the subtree IDs in downstream repo matches upstream root tree IDs assert.Equal(t, upstreamRootTreeID, pathTree1ID) assert.Equal(t, upstreamRootTreeID, pathTree2ID) // Check the downstream tree still contains other items expectedRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), gitinterface.NewEntryBlob("main/a", blobAID), gitinterface.NewEntryBlob("main/b", blobBID), gitinterface.NewEntryBlob("feature/a", blobAID), gitinterface.NewEntryBlob("feature/b", blobBID), }) if err != nil { t.Fatal(err) } assert.Equal(t, expectedRootTreeID, downstreamRootTreeID) // Nothing to propagate, check that a new entry has not been added in the downstreamRepo err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } assert.Equal(t, propagationEntry2.GetID(), latestEntry.GetID()) }) t.Run("single upstream repo, multiple upstream refs into different downstream refs", func(t *testing.T) { // Create upstreamRepo upstreamRepoLocation := t.TempDir() upstreamRepo := createTestRepositoryWithRoot(t, upstreamRepoLocation) downstreamRepoLocation := t.TempDir() downstreamRepo := createTestRepositoryWithRoot(t, downstreamRepoLocation) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) refName1 := "refs/heads/main" refName2 := "refs/heads/feature" localPath := "upstream" if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test", upstreamRepoLocation, refName1, refName1, localPath, false); err != nil { t.Fatal(err) } if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test", upstreamRepoLocation, refName2, refName2, localPath, false); err != nil { t.Fatal(err) } if err := downstreamRepo.ApplyPolicy(testCtx, false); err != nil { t.Fatal(err) } err := downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.NotNil(t, err) // TODO: upstream doesn't have main at all // Add things to upstreamRepo blobAID, err := upstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := upstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } upstreamTreeBuilder := gitinterface.NewTreeBuilder(upstreamRepo.r) upstreamRootTreeID, err := upstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := upstreamRepo.r.Commit(upstreamRootTreeID, refName1, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo.RecordRSLEntryForReference(testCtx, refName1, false); err != nil { t.Fatal(err) } if _, err := upstreamRepo.r.Commit(upstreamRootTreeID, refName2, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo.RecordRSLEntryForReference(testCtx, refName2, false); err != nil { t.Fatal(err) } upstreamEntry2, err := rsl.GetLatestEntry(upstreamRepo.r) if err != nil { t.Fatal(err) } upstreamEntry1, err := rsl.GetParentForEntry(upstreamRepo.r, upstreamEntry2) if err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) // TODO: should propagation result in a new local ref? assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) // Add things to downstreamRepo blobAID, err = downstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err = downstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } downstreamTreeBuilder := gitinterface.NewTreeBuilder(downstreamRepo.r) downstreamRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := downstreamRepo.r.Commit(downstreamRootTreeID, refName1, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := downstreamRepo.RecordRSLEntryForReference(testCtx, refName1, false, rslopts.WithSkipPropagation()); err != nil { t.Fatal(err) } if _, err := downstreamRepo.r.Commit(downstreamRootTreeID, refName2, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := downstreamRepo.RecordRSLEntryForReference(testCtx, refName2, false, rslopts.WithSkipPropagation()); err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err := rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } priorEntry, err := rsl.GetParentForEntry(downstreamRepo.r, latestEntry) if err != nil { t.Fatal(err) } propagationEntry2, isPropagationEntry := latestEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } propagationEntry1, isPropagationEntry := priorEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } assert.Equal(t, upstreamRepoLocation, propagationEntry1.UpstreamRepository) assert.Equal(t, upstreamRepoLocation, propagationEntry2.UpstreamRepository) assert.Equal(t, upstreamEntry1.GetID(), propagationEntry1.UpstreamEntryID) assert.Equal(t, upstreamEntry2.GetID(), propagationEntry2.UpstreamEntryID) assert.Equal(t, refName1, propagationEntry1.RefName) assert.Equal(t, refName2, propagationEntry2.RefName) // Check the downstream tree still contains other items expectedRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), gitinterface.NewEntryBlob("upstream/a", blobAID), gitinterface.NewEntryBlob("upstream/b", blobBID), }) if err != nil { t.Fatal(err) } downstreamRootTreeID, err = downstreamRepo.r.GetCommitTreeID(propagationEntry2.TargetID) if err != nil { t.Fatal(err) } pathTreeID, err := downstreamRepo.r.GetPathIDInTree(localPath, downstreamRootTreeID) if err != nil { t.Fatal(err) } // Check the subtree ID in downstream repo matches upstream root tree ID assert.Equal(t, upstreamRootTreeID, pathTreeID) // Check the tree as a whole is as expected assert.Equal(t, expectedRootTreeID, downstreamRootTreeID) // Do the same thing for the other propagation entry's tree (this is a different ref!) downstreamRootTreeID, err = downstreamRepo.r.GetCommitTreeID(propagationEntry1.TargetID) if err != nil { t.Fatal(err) } pathTreeID, err = downstreamRepo.r.GetPathIDInTree(localPath, downstreamRootTreeID) if err != nil { t.Fatal(err) } // Check the subtree ID in downstream repo matches upstream root tree ID assert.Equal(t, upstreamRootTreeID, pathTreeID) // Check the tree as a whole is as expected assert.Equal(t, expectedRootTreeID, downstreamRootTreeID) // Nothing to propagate, check that a new entry has not been added in the downstreamRepo err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } assert.Equal(t, propagationEntry2.GetID(), latestEntry.GetID()) }) t.Run("multiple upstream repos", func(t *testing.T) { // Create upstreamRepos upstreamRepo1Location := t.TempDir() upstreamRepo1 := createTestRepositoryWithRoot(t, upstreamRepo1Location) upstreamRepo2Location := t.TempDir() upstreamRepo2 := createTestRepositoryWithRoot(t, upstreamRepo2Location) downstreamRepoLocation := t.TempDir() downstreamRepo := createTestRepositoryWithRoot(t, downstreamRepoLocation) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) refName := "refs/heads/main" localPath1 := "upstream1" localPath2 := "upstream2" if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test-1", upstreamRepo1Location, refName, refName, localPath1, false); err != nil { t.Fatal(err) } if err := downstreamRepo.AddPropagationDirective(testCtx, signer, "test-2", upstreamRepo2Location, refName, refName, localPath2, false); err != nil { t.Fatal(err) } if err := downstreamRepo.ApplyPolicy(testCtx, false); err != nil { t.Fatal(err) } err := downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.NotNil(t, err) // TODO: upstream repos don't have main at all // Add things to upstreamRepos blobAID, err := upstreamRepo1.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := upstreamRepo1.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } upstreamTreeBuilder1 := gitinterface.NewTreeBuilder(upstreamRepo1.r) upstreamRootTree1ID, err := upstreamTreeBuilder1.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := upstreamRepo1.r.Commit(upstreamRootTree1ID, refName, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo1.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } upstreamEntry1, err := rsl.GetLatestEntry(upstreamRepo1.r) if err != nil { t.Fatal(err) } blobCID, err := upstreamRepo2.r.WriteBlob([]byte("c")) if err != nil { t.Fatal(err) } blobDID, err := upstreamRepo2.r.WriteBlob([]byte("d")) if err != nil { t.Fatal(err) } upstreamTreeBuilder2 := gitinterface.NewTreeBuilder(upstreamRepo2.r) upstreamRootTree2ID, err := upstreamTreeBuilder2.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("c", blobCID), gitinterface.NewEntryBlob("d", blobDID), }) if err != nil { t.Fatal(err) } if _, err := upstreamRepo2.r.Commit(upstreamRootTree2ID, refName, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := upstreamRepo2.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } upstreamEntry2, err := rsl.GetLatestEntry(upstreamRepo2.r) if err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) // TODO: should propagation result in a new local ref? assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) // Add things to downstreamRepo blobAID, err = downstreamRepo.r.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err = downstreamRepo.r.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } downstreamTreeBuilder := gitinterface.NewTreeBuilder(downstreamRepo.r) downstreamRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), }) if err != nil { t.Fatal(err) } if _, err := downstreamRepo.r.Commit(downstreamRootTreeID, refName, "Initial commit\n", false); err != nil { t.Fatal(err) } if err := downstreamRepo.RecordRSLEntryForReference(testCtx, refName, false, rslopts.WithSkipPropagation()); err != nil { t.Fatal(err) } err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err := rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } propagationEntry2, isPropagationEntry := latestEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } priorEntry, err := rsl.GetParentForEntry(downstreamRepo.r, latestEntry) if err != nil { t.Fatal(err) } propagationEntry1, isPropagationEntry := priorEntry.(*rsl.PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } // Check the two propagation entries are right // We empty a set of items because the order of repos may change, // sometimes we may propagate repo A then repo B, and vice versa. // So instead, we empty the set of expected items based on what we see // in the propagation entries and ensure the set is empty so there's a // propagation entry for each expected item. expectedLocations := set.NewSetFromItems(upstreamRepo1Location, upstreamRepo2Location) expectedLocations.Remove(propagationEntry1.UpstreamRepository) expectedLocations.Remove(propagationEntry2.UpstreamRepository) assert.Equal(t, 0, expectedLocations.Len()) expectedUpstreamIDs := set.NewSetFromItems(upstreamEntry1.GetID().String(), upstreamEntry2.GetID().String()) expectedUpstreamIDs.Remove(propagationEntry1.UpstreamEntryID.String()) expectedUpstreamIDs.Remove(propagationEntry2.UpstreamEntryID.String()) assert.Equal(t, 0, expectedUpstreamIDs.Len()) downstreamRootTreeID, err = downstreamRepo.r.GetCommitTreeID(propagationEntry2.TargetID) if err != nil { t.Fatal(err) } pathTree1ID, err := downstreamRepo.r.GetPathIDInTree(localPath1, downstreamRootTreeID) if err != nil { t.Fatal(err) } pathTree2ID, err := downstreamRepo.r.GetPathIDInTree(localPath2, downstreamRootTreeID) if err != nil { t.Fatal(err) } // Check the subtree IDs in downstream repo matches upstream root tree IDs assert.Equal(t, upstreamRootTree1ID, pathTree1ID) assert.Equal(t, upstreamRootTree2ID, pathTree2ID) // Check the downstream tree still contains other items expectedRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), gitinterface.NewEntryBlob("upstream1/a", blobAID), gitinterface.NewEntryBlob("upstream1/b", blobBID), gitinterface.NewEntryBlob("upstream2/c", blobCID), gitinterface.NewEntryBlob("upstream2/d", blobDID), }) if err != nil { t.Fatal(err) } assert.Equal(t, expectedRootTreeID, downstreamRootTreeID) // Nothing to propagate, check that a new entry has not been added in the downstreamRepo err = downstreamRepo.PropagateChangesFromUpstreamRepositories(testCtx, false) assert.Nil(t, err) latestEntry, err = rsl.GetLatestEntry(downstreamRepo.r) if err != nil { t.Fatal(err) } assert.Equal(t, propagationEntry2.GetID(), latestEntry.GetID()) }) } gittuf-0.9.0/experimental/gittuf/sync.go000066400000000000000000000063341475150141000203300ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "errors" "fmt" "log/slog" "os" "reflect" "sort" "strings" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/tuf" ) var ( ErrCloningRepository = errors.New("unable to clone repository") ErrDirExists = errors.New("directory exists") ErrExpectedRootKeysDoNotMatch = errors.Join(ErrCloningRepository, errors.New("cloned root keys do not match the expected keys")) ) // Clone wraps a typical git clone invocation, fetching gittuf refs in addition // to the standard refs. It performs a verification of the RSL against the // specified HEAD after cloning the repository. // TODO: resolve how root keys are trusted / bootstrapped. func Clone(ctx context.Context, remoteURL, dir, initialBranch string, expectedRootKeys []tuf.Principal, bare bool) (*Repository, error) { slog.Debug(fmt.Sprintf("Cloning from '%s'...", remoteURL)) if dir == "" { // FIXME: my understanding is backslashes are not used in URLs but I haven't dived into the RFCs to check yet modifiedURL := strings.ReplaceAll(remoteURL, "\\", "/") modifiedURL = strings.TrimRight(strings.TrimSpace(modifiedURL), "/") // Trim spaces and trailing slashes if any split := strings.Split(modifiedURL, "/") dir = strings.TrimSuffix(split[len(split)-1], ".git") } slog.Debug("Checking if local directory exists for repository...") _, err := os.Stat(dir) if err == nil { return nil, errors.Join(ErrCloningRepository, ErrDirExists) } else if !os.IsNotExist(err) { return nil, errors.Join(ErrCloningRepository, err) } if err := os.Mkdir(dir, 0755); err != nil { return nil, errors.Join(ErrCloningRepository, err) } refs := []string{"refs/gittuf/*"} slog.Debug("Cloning repository...") r, err := gitinterface.CloneAndFetchRepository(remoteURL, dir, initialBranch, refs, bare) if err != nil { if e := os.RemoveAll(dir); e != nil { return nil, errors.Join(ErrCloningRepository, err, e) } return nil, errors.Join(ErrCloningRepository, err) } head, err := r.GetSymbolicReferenceTarget("HEAD") if err != nil { return nil, errors.Join(ErrCloningRepository, err) } repository := &Repository{r: r} if len(expectedRootKeys) > 0 { slog.Debug("Verifying if root keys are expected root keys...") sort.Slice(expectedRootKeys, func(i, j int) bool { return expectedRootKeys[i].ID() < expectedRootKeys[j].ID() }) state, err := policy.LoadFirstState(ctx, r) if err != nil { return repository, errors.Join(ErrCloningRepository, err) } rootKeys, err := state.GetRootKeys() if err != nil { return repository, errors.Join(ErrCloningRepository, err) } // We sort the root keys so that we can check if the root keys array match's the expected root key array sort.Slice(rootKeys, func(i, j int) bool { return rootKeys[i].ID() < rootKeys[j].ID() }) if len(rootKeys) != len(expectedRootKeys) { return repository, ErrExpectedRootKeysDoNotMatch } if !reflect.DeepEqual(rootKeys, expectedRootKeys) { return repository, ErrExpectedRootKeysDoNotMatch } } slog.Debug("Verifying HEAD...") return repository, repository.VerifyRef(ctx, head) } gittuf-0.9.0/experimental/gittuf/sync_test.go000066400000000000000000000310331475150141000213610ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "os" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestClone(t *testing.T) { remoteTmpDir := t.TempDir() rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) remoteR := gitinterface.CreateTestGitRepository(t, remoteTmpDir, true) remoteRepo := &Repository{r: remoteR} treeBuilder := gitinterface.NewTreeBuilder(remoteR) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } if err := remoteRepo.InitializeRoot(testCtx, rootSigner, false); err != nil { t.Fatal(err) } if err := remoteRepo.AddRootKey(testCtx, rootSigner, targetsPubKey, false); err != nil { t.Fatal(err) } if err := remoteRepo.AddTopLevelTargetsKey(testCtx, rootSigner, targetsPubKey, false); err != nil { t.Fatal(err) } if err := remoteRepo.SignRoot(testCtx, targetsSigner, false); err != nil { t.Fatal(err) } if err := remoteRepo.InitializeTargets(testCtx, targetsSigner, policy.TargetsRoleName, false); err != nil { t.Fatal(err) } if err := policy.Apply(testCtx, remoteRepo.r, false); err != nil { t.Fatal(err) } refName := "refs/heads/main" anotherRefName := "refs/heads/feature" commitID, err := remoteRepo.r.Commit(emptyTreeHash, refName, "Initial commit", false) if err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, refName, false); err != nil { t.Fatal(err) } if err := remoteRepo.r.SetReference(anotherRefName, commitID); err != nil { t.Fatal(err) } if err := remoteRepo.RecordRSLEntryForReference(testCtx, anotherRefName, false); err != nil { t.Fatal(err) } currentDir, err := os.Getwd() if err != nil { t.Fatal(err) } t.Run("successful clone without specifying dir, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repo, err := Clone(testCtx, remoteTmpDir, "", "", nil, true) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("successful clone with dir, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck dirName := "myRepo" repo, err := Clone(testCtx, remoteTmpDir, dirName, "", nil, true) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) dirInfo, err := os.Stat(dirName) assert.Nil(t, err) assert.True(t, dirInfo.IsDir()) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("successful clone without specifying dir, with non-HEAD initial branch, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repo, err := Clone(testCtx, remoteTmpDir, "", anotherRefName, nil, true) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) assert.Equal(t, anotherRefName, head) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("unsuccessful clone when unspecified dir already exists, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck _, err = Clone(testCtx, remoteTmpDir, "", "", nil, true) assert.Nil(t, err) _, err = Clone(testCtx, remoteTmpDir, "", "", nil, true) assert.ErrorIs(t, err, ErrDirExists) }) t.Run("unsuccessful clone when specified dir already exists, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck dirName := "myRepo" if err := os.Mkdir(dirName, 0o755); err != nil { t.Fatal(err) } _, err = Clone(testCtx, remoteTmpDir, dirName, "", nil, true) assert.ErrorIs(t, err, ErrDirExists) }) t.Run("successful clone without specifying dir, with trailing slashes in repository path, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repo, err := Clone(testCtx, remoteTmpDir+"//", "", "", nil, true) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("successful clone without specifying dir, with multiple expected root keys, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck rootPublicKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) targetsPublicKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) repo, err := Clone(testCtx, remoteTmpDir, "", "", []tuf.Principal{targetsPublicKey, rootPublicKey}, true) assert.Nil(t, err) head, err := repo.r.GetReference("HEAD") if err != nil { t.Fatal(err) } assert.Equal(t, commitID, head) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("unsuccessful clone without specifying dir, with expected root keys not equaling root keys, bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck badPublicKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } badPublicKey := tufv01.NewKeyFromSSLibKey(badPublicKeyR) rootPublicKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) _, err = Clone(testCtx, remoteTmpDir, "", "", []tuf.Principal{rootPublicKey, badPublicKey}, true) assert.ErrorIs(t, ErrExpectedRootKeysDoNotMatch, err) }) t.Run("successful clone without specifying dir, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repo, err := Clone(testCtx, remoteTmpDir, "", "", nil, false) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("successful clone with dir, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck dirName := "myRepo" repo, err := Clone(testCtx, remoteTmpDir, dirName, "", nil, false) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) dirInfo, err := os.Stat(dirName) assert.Nil(t, err) assert.True(t, dirInfo.IsDir()) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("successful clone without specifying dir, with non-HEAD initial branch, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repo, err := Clone(testCtx, remoteTmpDir, "", anotherRefName, nil, false) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) assert.Equal(t, anotherRefName, head) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("unsuccessful clone when unspecified dir already exists, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck _, err = Clone(testCtx, remoteTmpDir, "", "", nil, false) assert.Nil(t, err) _, err = Clone(testCtx, remoteTmpDir, "", "", nil, false) assert.ErrorIs(t, err, ErrDirExists) }) t.Run("unsuccessful clone when specified dir already exists, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck dirName := "myRepo" if err := os.Mkdir(dirName, 0o755); err != nil { t.Fatal(err) } _, err = Clone(testCtx, remoteTmpDir, dirName, "", nil, false) assert.ErrorIs(t, err, ErrDirExists) }) t.Run("successful clone without specifying dir, with trailing slashes in repository path, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck repo, err := Clone(testCtx, remoteTmpDir+"//", "", "", nil, false) assert.Nil(t, err) head, err := repo.r.GetSymbolicReferenceTarget("HEAD") if err != nil { t.Fatal(err) } headID, err := repo.r.GetReference(head) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, headID) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("successful clone without specifying dir, with multiple expected root keys, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck rootPublicKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) targetsPublicKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targetsPubKeyBytes)) repo, err := Clone(testCtx, remoteTmpDir, "", "", []tuf.Principal{targetsPublicKey, rootPublicKey}, false) assert.Nil(t, err) head, err := repo.r.GetReference("HEAD") if err != nil { t.Fatal(err) } assert.Equal(t, commitID, head) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, rsl.Ref) assertLocalAndRemoteRefsMatch(t, repo.r, remoteRepo.r, policy.PolicyRef) }) t.Run("unsuccessful clone without specifying dir, with expected root keys not equaling root keys, not bare", func(t *testing.T) { localTmpDir := t.TempDir() if err := os.Chdir(localTmpDir); err != nil { t.Fatal(err) } defer os.Chdir(currentDir) //nolint:errcheck badPublicKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } badPublicKey := tufv01.NewKeyFromSSLibKey(badPublicKeyR) rootPublicKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) _, err = Clone(testCtx, remoteTmpDir, "", "", []tuf.Principal{rootPublicKey, badPublicKey}, false) assert.ErrorIs(t, ErrExpectedRootKeysDoNotMatch, err) }) } gittuf-0.9.0/experimental/gittuf/targets.go000066400000000000000000000347511475150141000210310ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "errors" "fmt" "log/slog" "strings" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" ) var ErrInvalidPolicyName = errors.New("invalid rule or policy file name, cannot be 'root'") // InitializeTargets is the interface for the user to create the specified // policy file. func (r *Repository) InitializeTargets(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, signCommit bool) error { if targetsRoleName == policy.RootRoleName { return ErrInvalidPolicyName } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } if state.HasTargetsRole(targetsRoleName) { return ErrCannotReinitialize } // TODO: verify is role can be signed using the presented key. This requires // the user to pass in the delegating role as well as we do not want to // assume which role is the delegating role (diamond delegations are legal). // See: https://github.com/gittuf/gittuf/issues/246. slog.Debug("Creating initial rule file...") targetsMetadata := policy.InitializeTargetsMetadata() env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing initial rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { if state.DelegationEnvelopes == nil { state.DelegationEnvelopes = map[string]*sslibdsse.Envelope{} } state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Initialize policy '%s'", targetsRoleName) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // AddDelegation is the interface for the user to add a new rule to gittuf // policy. func (r *Repository) AddDelegation(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int, signCommit bool) error { if ruleName == policy.RootRoleName { return ErrInvalidPolicyName } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } slog.Debug("Checking if rule with same name exists...") if state.HasRuleName(ruleName) { return tuf.ErrDuplicatedRuleName } slog.Debug("Loading current rule file...") if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } // TODO: verify if role can be signed using the presented key. This requires // the user to pass in the delegating role as well as we do not want to // assume which role is the delegating role (diamond delegations are legal). // See: https://github.com/gittuf/gittuf/issues/246. targetsMetadata, err := state.GetTargetsMetadata(targetsRoleName, false) if err != nil { return err } slog.Debug("Adding rule to rule file...") if err := targetsMetadata.AddRule(ruleName, authorizedPrincipalIDs, rulePatterns, threshold); err != nil { return err } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing updated rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Add rule '%s' to policy '%s'", ruleName, targetsRoleName) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // UpdateDelegation is the interface for the user to update a rule to gittuf // policy. func (r *Repository) UpdateDelegation(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int, signCommit bool) error { if ruleName == policy.RootRoleName { return ErrInvalidPolicyName } if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } slog.Debug("Loading current rule file...") if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } // TODO: verify if role can be signed using the presented key. This requires // the user to pass in the delegating role as well as we do not want to // assume which role is the delegating role (diamond delegations are legal). // See: https://github.com/gittuf/gittuf/issues/246. targetsMetadata, err := state.GetTargetsMetadata(targetsRoleName, false) if err != nil { return err } slog.Debug("Updating rule in rule file...") if err := targetsMetadata.UpdateRule(ruleName, authorizedPrincipalIDs, rulePatterns, threshold); err != nil { return err } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing updated rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Update rule '%s' in policy '%s'", ruleName, targetsRoleName) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // ReorderDelegations is the interface for the user to reorder rules in gittuf // policy. func (r *Repository) ReorderDelegations(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, ruleNames []string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return nil } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } slog.Debug("Loading current rule file...") if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } targetsMetadata, err := state.GetTargetsMetadata(targetsRoleName, false) if err != nil { return err } slog.Debug("Reordering rules in rule file...") if err := targetsMetadata.ReorderRules(ruleNames); err != nil { return err } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing updated rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Reorder rules in policy '%s'", targetsRoleName) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // RemoveDelegation is the interface for a user to remove a rule from gittuf // policy. func (r *Repository) RemoveDelegation(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, ruleName string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } slog.Debug("Loading current rule file...") if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } // TODO: verify if role can be signed using the presented key. This requires // the user to pass in the delegating role as well as we do not want to // assume which role is the delegating role (diamond delegations are legal). // See: https://github.com/gittuf/gittuf/issues/246. targetsMetadata, err := state.GetTargetsMetadata(targetsRoleName, false) if err != nil { return err } slog.Debug("Removing rule from rule file...") if err := targetsMetadata.RemoveRule(ruleName); err != nil { return err } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing updated rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Remove rule '%s' from policy '%s'", ruleName, targetsRoleName) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // AddPrincipalToTargets is the interface for a user to add a trusted principal // to gittuf rule file metadata. func (r *Repository) AddPrincipalToTargets(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, authorizedPrincipals []tuf.Principal, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } // TODO: verify is role can be signed using the presented key. This requires // the user to pass in the delegating role as well as we do not want to // assume which role is the delegating role (diamond delegations are legal). // See: https://github.com/gittuf/gittuf/issues/246. principalIDs := "" for _, principal := range authorizedPrincipals { principalIDs += fmt.Sprintf("\n%s", principal.ID()) } slog.Debug("Loading current rule file...") targetsMetadata, err := state.GetTargetsMetadata(targetsRoleName, false) if err != nil { return err } for _, principal := range authorizedPrincipals { slog.Debug(fmt.Sprintf("Adding principal '%s' to rule file...", strings.TrimSpace(principal.ID()))) if err := targetsMetadata.AddPrincipal(principal); err != nil { return err } } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing updated rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Add principals to policy '%s'\n%s", targetsRoleName, principalIDs) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // RemovePrincipalFromTargets is the interface for a user to remove a principal // from gittuf rule file metadata. func (r *Repository) RemovePrincipalFromTargets(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, principalID string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } slog.Debug("Loading current rule file...") targetsMetadata, err := state.GetTargetsMetadata(targetsRoleName, false) if err != nil { return err } slog.Debug(fmt.Sprintf("Removing principal '%s' from rule file...", strings.TrimSpace(principalID))) if err := targetsMetadata.RemovePrincipal(principalID); err != nil { return err } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { return err } slog.Debug(fmt.Sprintf("Signing updated rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Remove principal from policy '%s'\n%s", targetsRoleName, principalID) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } // SignTargets adds a signature to specified Targets role's envelope. Note that // the metadata itself is not modified, so its version remains the same. func (r *Repository) SignTargets(ctx context.Context, signer sslibdsse.SignerVerifier, targetsRoleName string, signCommit bool) error { if signCommit { slog.Debug("Checking if Git signing is configured...") err := r.r.CanSign() if err != nil { return err } } keyID, err := signer.KeyID() if err != nil { return err } slog.Debug("Loading current policy...") state, err := policy.LoadCurrentState(ctx, r.r, policy.PolicyStagingRef) if err != nil { return err } if !state.HasTargetsRole(targetsRoleName) { return policy.ErrMetadataNotFound } var env *sslibdsse.Envelope if targetsRoleName == policy.TargetsRoleName { env = state.TargetsEnvelope } else { env = state.DelegationEnvelopes[targetsRoleName] } slog.Debug(fmt.Sprintf("Signing rule file using '%s'...", keyID)) env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { return err } if targetsRoleName == policy.TargetsRoleName { state.TargetsEnvelope = env } else { state.DelegationEnvelopes[targetsRoleName] = env } commitMessage := fmt.Sprintf("Add signature from key '%s' to policy '%s'", keyID, targetsRoleName) slog.Debug("Committing policy...") return state.Commit(r.r, commitMessage, signCommit) } gittuf-0.9.0/experimental/gittuf/targets_test.go000066400000000000000000000266551475150141000220740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "testing" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestInitializeTargets(t *testing.T) { rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) t.Run("successful initialization", func(t *testing.T) { // The helper also runs InitializeTargets for this test r := createTestRepositoryWithRoot(t, "") if err := r.AddTopLevelTargetsKey(testCtx, rootSigner, targetsKey, false); err != nil { t.Fatal(err) } if err := r.InitializeTargets(testCtx, targetsSigner, policy.TargetsRoleName, false); err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Contains(t, targetsMetadata.GetRules(), tufv01.AllowRule()) }) t.Run("invalid role name", func(t *testing.T) { r := createTestRepositoryWithRoot(t, "") if err := r.AddTopLevelTargetsKey(testCtx, rootSigner, targetsKey, false); err != nil { t.Fatal(err) } err := r.InitializeTargets(testCtx, targetsSigner, policy.RootRoleName, false) assert.ErrorIs(t, err, ErrInvalidPolicyName) }) } func TestAddDelegation(t *testing.T) { targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) t.Run("valid rule / delegation name", func(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) ruleName := "test-rule" authorizedKeys := []tuf.Principal{targetsPubKey} rulePatterns := []string{"git:branch=main"} state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } gpgKey, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Equal(t, 1, len(targetsMetadata.GetPrincipals())) assert.Equal(t, 2, len(targetsMetadata.GetRules())) assert.Contains(t, targetsMetadata.GetRules(), tufv01.AllowRule()) if err := r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, authorizedKeys, false); err != nil { t.Fatal(err) } err = r.AddDelegation(testCtx, targetsSigner, policy.TargetsRoleName, ruleName, []string{targetsPubKey.KeyID}, rulePatterns, 1, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err = state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Contains(t, targetsMetadata.GetPrincipals(), targetsPubKey.ID()) assert.Contains(t, targetsMetadata.GetPrincipals(), gpgKey.KeyID) assert.Equal(t, 2, len(targetsMetadata.GetPrincipals())) assert.Equal(t, 3, len(targetsMetadata.GetRules())) assert.Contains(t, targetsMetadata.GetRules(), &tufv01.Delegation{ Name: ruleName, Paths: rulePatterns, Terminating: false, Role: tufv01.Role{KeyIDs: set.NewSetFromItems(targetsPubKey.KeyID), Threshold: 1}, }) assert.Contains(t, targetsMetadata.GetRules(), tufv01.AllowRule()) }) t.Run("invalid rule name", func(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") err := r.AddDelegation(testCtx, targetsSigner, policy.TargetsRoleName, policy.RootRoleName, nil, nil, 1, false) assert.ErrorIs(t, err, ErrInvalidPolicyName) }) } func TestUpdateDelegation(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) targetsKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) if err := r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, []tuf.Principal{gpgKey, targetsKey}, false); err != nil { t.Fatal(err) } err = r.UpdateDelegation(testCtx, targetsSigner, policy.TargetsRoleName, "protect-main", []string{gpgKey.KeyID, targetsKey.KeyID}, []string{"git:refs/heads/main"}, 1, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) if err != nil { t.Fatal(err) } assert.Equal(t, 2, len(targetsMetadata.GetRules())) assert.Contains(t, targetsMetadata.GetRules(), &tufv01.Delegation{ Name: "protect-main", Paths: []string{"git:refs/heads/main"}, Terminating: false, Role: tufv01.Role{KeyIDs: set.NewSetFromItems(gpgKey.KeyID, targetsKey.KeyID), Threshold: 1}, }) } func TestReorderDelegations(t *testing.T) { targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) r := createTestRepositoryWithPolicy(t, "") if err := r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, []tuf.Principal{targetsKey}, false); err != nil { t.Fatal(err) } ruleNames := []string{"rule-1", "rule-2", "rule-3"} for _, ruleName := range ruleNames { err := r.AddDelegation(testCtx, targetsSigner, policy.TargetsRoleName, ruleName, []string{targetsKey.KeyID}, []string{ruleName}, 1, false) if err != nil { t.Fatal(err) } } // Valid Input newOrder := []string{"rule-3", "rule-1", "rule-2", "protect-main"} err := r.ReorderDelegations(testCtx, targetsSigner, policy.TargetsRoleName, newOrder, false) if err != nil { t.Fatal(err) } state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) if err != nil { t.Fatal(err) } finalOrder := []string{} for _, role := range targetsMetadata.GetRules() { finalOrder = append(finalOrder, role.ID()) } expectedFinalOrder := append([]string{}, newOrder...) expectedFinalOrder = append(expectedFinalOrder, tuf.AllowRuleName) assert.Equal(t, expectedFinalOrder, finalOrder) } func TestRemoveDelegation(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) ruleName := "test-rule" authorizedKeys := []tuf.Principal{targetsPubKey} rulePatterns := []string{"git:branch=main"} if err := r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, authorizedKeys, false); err != nil { t.Fatal(err) } err := r.AddDelegation(testCtx, targetsSigner, policy.TargetsRoleName, ruleName, []string{targetsPubKey.KeyID}, rulePatterns, 1, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Contains(t, targetsMetadata.GetPrincipals(), targetsPubKey.ID()) assert.Equal(t, 3, len(targetsMetadata.GetRules())) assert.Contains(t, targetsMetadata.GetRules(), &tufv01.Delegation{ Name: ruleName, Paths: rulePatterns, Terminating: false, Role: tufv01.Role{KeyIDs: set.NewSetFromItems(targetsPubKey.KeyID), Threshold: 1}, }) assert.Contains(t, targetsMetadata.GetRules(), tufv01.AllowRule()) err = r.RemoveDelegation(testCtx, targetsSigner, policy.TargetsRoleName, ruleName, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err = state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Contains(t, targetsMetadata.GetPrincipals(), targetsPubKey.ID()) assert.Equal(t, 2, len(targetsMetadata.GetRules())) assert.Contains(t, targetsMetadata.GetRules(), tufv01.AllowRule()) } func TestAddPrincipalToTargets(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) authorizedKeysBytes := []tuf.Principal{targetsPubKey, gpgKey} state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Contains(t, targetsMetadata.GetPrincipals(), gpgKey.KeyID) assert.Equal(t, 1, len(targetsMetadata.GetPrincipals())) err = r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, authorizedKeysBytes, false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err = state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Equal(t, 2, len(targetsMetadata.GetPrincipals())) } func TestRemovePrincicpalFromTargets(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") targetsSigner := setupSSHKeysForSigning(t, targetsKeyBytes, targetsPubKeyBytes) targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsSigner.MetadataKey()) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) authorizedKeysBytes := []tuf.Principal{targetsPubKey, gpgKey} err = r.AddPrincipalToTargets(testCtx, targetsSigner, policy.TargetsRoleName, authorizedKeysBytes, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err := state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Equal(t, 2, len(targetsMetadata.GetPrincipals())) err = r.RemovePrincipalFromTargets(testCtx, targetsSigner, policy.TargetsRoleName, targetsPubKey.ID(), false) assert.Nil(t, err) state, err = policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } targetsMetadata, err = state.GetTargetsMetadata(policy.TargetsRoleName, false) assert.Nil(t, err) assert.Equal(t, 1, len(targetsMetadata.GetPrincipals())) assert.Contains(t, targetsMetadata.GetPrincipals(), gpgKey.KeyID) } func TestSignTargets(t *testing.T) { r := createTestRepositoryWithPolicy(t, "") // Add root key as a targets key rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) rootPubKey := tufv01.NewKeyFromSSLibKey(rootSigner.MetadataKey()) if err := r.AddTopLevelTargetsKey(testCtx, rootSigner, rootPubKey, false); err != nil { t.Fatal(err) } // Add signature to targets err := r.SignTargets(testCtx, rootSigner, policy.TargetsRoleName, false) assert.Nil(t, err) state, err := policy.LoadCurrentState(testCtx, r.r, policy.PolicyStagingRef) if err != nil { t.Fatal(err) } assert.Equal(t, 2, len(state.TargetsEnvelope.Signatures)) } gittuf-0.9.0/experimental/gittuf/verify.go000066400000000000000000000146051475150141000206600ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "context" "errors" "fmt" "log/slog" verifyopts "github.com/gittuf/gittuf/experimental/gittuf/options/verify" verifymergeableopts "github.com/gittuf/gittuf/experimental/gittuf/options/verifymergeable" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" ) // ErrRefStateDoesNotMatchRSL is returned when a Git reference being verified // does not have the same tip as identified in the latest RSL entry for the // reference. This can happen for a number of reasons such as incorrectly // modifying reference state away from what's recorded in the RSL to not // creating an RSL entry for some new changes. Depending on the context, one // resolution is to update the reference state to match the RSL entry, while // another is to create a new RSL entry for the current state. var ErrRefStateDoesNotMatchRSL = errors.New("current state of Git reference does not match latest RSL entry") func (r *Repository) VerifyRef(ctx context.Context, refName string, opts ...verifyopts.Option) error { var ( expectedTip gitinterface.Hash err error ) options := &verifyopts.Options{} for _, fn := range opts { fn(options) } slog.Debug("Identifying absolute reference path...") refName, err = r.r.AbsoluteReference(refName) if err != nil { return err } // Track localRefName to check the expected tip as we may override refName localRefName := refName if options.RefNameOverride != "" { // remote ref name is different // We must consider RSL entries that have refNameOverride rather than // refName slog.Debug("Name of reference overridden to match remote reference name, identifying absolute reference path...") refNameOverride, err := r.r.AbsoluteReference(options.RefNameOverride) if err != nil { return err } refName = refNameOverride } slog.Debug(fmt.Sprintf("Verifying gittuf policies for '%s'", refName)) verifier := policy.NewPolicyVerifier(r.r) if options.LatestOnly { expectedTip, err = verifier.VerifyRef(ctx, refName) } else { expectedTip, err = verifier.VerifyRefFull(ctx, refName) } if err != nil { return err } // To verify the tip, we _must_ use the localRefName slog.Debug("Verifying if tip of reference matches expected value from RSL...") if err := r.verifyRefTip(localRefName, expectedTip); err != nil { return err } slog.Debug("Verification successful!") return nil } func (r *Repository) VerifyRefFromEntry(ctx context.Context, refName, entryID string, opts ...verifyopts.Option) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } options := &verifyopts.Options{} for _, fn := range opts { fn(options) } var err error slog.Debug("Identifying absolute reference path...") refName, err = r.r.AbsoluteReference(refName) if err != nil { return err } entryIDHash, err := gitinterface.NewHash(entryID) if err != nil { return err } // Track localRefName to check the expected tip as we may override refName localRefName := refName if options.RefNameOverride != "" { // remote ref name is different // We must consider RSL entries that have refNameOverride rather than // refName slog.Debug("Name of reference overridden to match remote reference name, identifying absolute reference path...") refNameOverride, err := r.r.AbsoluteReference(options.RefNameOverride) if err != nil { return err } refName = refNameOverride } slog.Debug(fmt.Sprintf("Verifying gittuf policies for '%s' from entry '%s'", refName, entryID)) verifier := policy.NewPolicyVerifier(r.r) expectedTip, err := verifier.VerifyRefFromEntry(ctx, refName, entryIDHash) if err != nil { return err } // To verify the tip, we _must_ use the localRefName slog.Debug("Verifying if tip of reference matches expected value from RSL...") if err := r.verifyRefTip(localRefName, expectedTip); err != nil { return err } slog.Debug("Verification successful!") return nil } // VerifyMergeable checks if the targetRef can be updated to reflect the changes // in featureRef. It checks if sufficient authorizations / approvals exist for // the merge to happen, indicated by the error being nil. Additionally, a // boolean value is also returned that indicates whether a final authorized // signature is still necessary via the RSL entry for the merge. // // Summary of return combinations: // (false, err) -> merge is not possible // (false, nil) -> merge is possible and can be performed by anyone // (true, nil) -> merge is possible but it MUST be performed by an authorized // person for the rule, i.e., an authorized person must sign the merge's RSL // entry func (r *Repository) VerifyMergeable(ctx context.Context, targetRef, featureRef string, opts ...verifymergeableopts.Option) (bool, error) { var err error options := &verifymergeableopts.Options{} for _, fn := range opts { fn(options) } slog.Debug("Identifying absolute reference paths...") targetRef, err = r.r.AbsoluteReference(targetRef) if err != nil { return false, err } featureRef, err = r.r.AbsoluteReference(featureRef) if err != nil { return false, err } slog.Debug(fmt.Sprintf("Inspecting gittuf policies to identify if '%s' can be merged into '%s' with current approvals...", featureRef, targetRef)) verifier := policy.NewPolicyVerifier(r.r) var needRSLSignature bool if options.BypassRSLForFeatureRef { slog.Debug("Not using RSL for feature ref...") featureID, err := r.r.GetReference(featureRef) if err != nil { return false, err } needRSLSignature, err = verifier.VerifyMergeableForCommit(ctx, targetRef, featureID) if err != nil { return false, err } } else { needRSLSignature, err = verifier.VerifyMergeable(ctx, targetRef, featureRef) if err != nil { return false, err } } if needRSLSignature { slog.Debug("Merge is allowed but must be performed by authorized user who has not already issued an approval!") } else { slog.Debug("Merge is allowed and can be performed by any user!") } return needRSLSignature, nil } // verifyRefTip inspects the specified reference in the local repository to // check if it points to the expected Git object. func (r *Repository) verifyRefTip(target string, expectedTip gitinterface.Hash) error { refTip, err := r.r.GetReference(target) if err != nil { return err } if !refTip.Equal(expectedTip) { return ErrRefStateDoesNotMatchRSL } return nil } gittuf-0.9.0/experimental/gittuf/verify_test.go000066400000000000000000000146511475150141000217200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gittuf import ( "fmt" "testing" verifyopts "github.com/gittuf/gittuf/experimental/gittuf/options/verify" "github.com/gittuf/gittuf/internal/common" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/rsl" "github.com/stretchr/testify/assert" ) func TestVerifyRef(t *testing.T) { repo := createTestRepositoryWithPolicy(t, "") refName := "refs/heads/main" remoteRefName := "refs/heads/not-main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo.r, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgKeyBytes) entry.ID = entryID // Add one entry for a different remote ref name entry = rsl.NewReferenceEntry(remoteRefName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgKeyBytes) entry.ID = entryID tests := map[string]struct { localRefName string remoteRefName string latestOnly bool err error }{ "absolute ref, not full": { localRefName: refName, latestOnly: true, }, "absolute ref, full": { localRefName: refName, latestOnly: false, }, "relative ref, not full": { localRefName: "main", latestOnly: true, }, "relative ref, full": { localRefName: "main", latestOnly: false, }, "unknown ref, full": { localRefName: "refs/heads/unknown", latestOnly: false, err: rsl.ErrRSLEntryNotFound, }, "different local and remote ref names, not full": { localRefName: refName, remoteRefName: remoteRefName, latestOnly: true, }, "different local and remote ref names, full": { localRefName: refName, remoteRefName: remoteRefName, latestOnly: false, }, "unknown remote ref, full": { localRefName: refName, remoteRefName: "refs/heads/unknown", latestOnly: false, err: rsl.ErrRSLEntryNotFound, }, } for name, test := range tests { options := []verifyopts.Option{verifyopts.WithOverrideRefName(test.remoteRefName)} if test.latestOnly { options = append(options, verifyopts.WithLatestOnly()) } err := repo.VerifyRef(testCtx, test.localRefName, options...) if test.err != nil { assert.ErrorIs(t, err, test.err, fmt.Sprintf("unexpected error in test '%s'", name)) } else { assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) } } // Add another commit common.AddNTestCommitsToSpecifiedRef(t, repo.r, refName, 1, gpgKeyBytes) err := repo.VerifyRef(testCtx, refName, verifyopts.WithLatestOnly()) assert.ErrorIs(t, err, ErrRefStateDoesNotMatchRSL) err = repo.VerifyRef(testCtx, refName, verifyopts.WithLatestOnly()) assert.ErrorIs(t, err, ErrRefStateDoesNotMatchRSL) } func TestVerifyRefFromEntry(t *testing.T) { t.Setenv(dev.DevModeKey, "1") repo := createTestRepositoryWithPolicy(t, "") refName := "refs/heads/main" remoteRefName := "refs/heads/not-main" // Policy violation commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo.r, refName, 1, gpgUnauthorizedKeyBytes) // Violation for refName entry := rsl.NewReferenceEntry(refName, commitIDs[0]) violatingEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgUnauthorizedKeyBytes) // Violation for remoteRefName entry = rsl.NewReferenceEntry(remoteRefName, commitIDs[0]) violatingRemoteRefNameEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgUnauthorizedKeyBytes) // No policy violation for refName commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo.r, refName, 1, gpgKeyBytes) // refName entry = rsl.NewReferenceEntry(refName, commitIDs[0]) goodEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgKeyBytes) // remoteRefName entry = rsl.NewReferenceEntry(remoteRefName, commitIDs[0]) goodRemoteRefNameEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgKeyBytes) // No policy violation for refName (what we verify) commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo.r, refName, 1, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgKeyBytes) // No policy violation for remoteRefName (what we verify) entry = rsl.NewReferenceEntry(remoteRefName, commitIDs[0]) common.CreateTestRSLReferenceEntryCommit(t, repo.r, entry, gpgKeyBytes) tests := map[string]struct { localRefName string remoteRefName string fromEntryID gitinterface.Hash err error }{ "absolute ref, from non-violating": { localRefName: "refs/heads/main", fromEntryID: goodEntryID, }, "absolute ref, from violating": { localRefName: "refs/heads/main", fromEntryID: violatingEntryID, err: policy.ErrVerificationFailed, }, "relative ref, from non-violating": { localRefName: "main", fromEntryID: goodEntryID, }, "relative ref, from violating": { localRefName: "main", fromEntryID: violatingEntryID, err: policy.ErrVerificationFailed, }, "unknown ref": { localRefName: "refs/heads/unknown", fromEntryID: gitinterface.ZeroHash, err: rsl.ErrRSLEntryNotFound, }, "different local and remote ref names, from non-violating": { localRefName: refName, remoteRefName: remoteRefName, fromEntryID: goodRemoteRefNameEntryID, }, "different local and remote ref names, from violating": { localRefName: refName, remoteRefName: remoteRefName, fromEntryID: violatingRemoteRefNameEntryID, }, } for name, test := range tests { err := repo.VerifyRefFromEntry(testCtx, test.localRefName, test.fromEntryID.String(), verifyopts.WithOverrideRefName(test.remoteRefName)) if test.err != nil { assert.ErrorIs(t, err, test.err, fmt.Sprintf("unexpected error in test '%s'", name)) } else { assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) } } // Add another commit common.AddNTestCommitsToSpecifiedRef(t, repo.r, refName, 1, gpgKeyBytes) // Verifying from only good entry tells us ref does not match RSL err := repo.VerifyRefFromEntry(testCtx, refName, goodEntryID.String()) assert.ErrorIs(t, err, ErrRefStateDoesNotMatchRSL) // Verifying from violating entry tells us unauthorized signature err = repo.VerifyRefFromEntry(testCtx, refName, violatingEntryID.String()) assert.ErrorIs(t, err, policy.ErrVerificationFailed) } gittuf-0.9.0/go.mod000066400000000000000000000175271475150141000141420ustar00rootroot00000000000000module github.com/gittuf/gittuf go 1.23.4 require ( github.com/ProtonMail/go-crypto v1.1.5 github.com/charmbracelet/bubbletea v1.3.0 github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 github.com/go-git/go-git/v5 v5.13.2 github.com/google/go-github/v61 v61.0.0 github.com/hiddeco/sshsig v0.1.0 github.com/in-toto/attestation v1.1.1 github.com/jonboulle/clockwork v0.5.0 github.com/secure-systems-lab/go-securesystemslib v0.9.0 github.com/sigstore/cosign/v2 v2.4.1 github.com/sigstore/gitsign v0.12.0 github.com/sigstore/protobuf-specs v0.4.0 github.com/sigstore/sigstore v1.8.12 github.com/sigstore/sigstore-go v0.6.2 github.com/spf13/cobra v1.8.1 github.com/stretchr/testify v1.10.0 golang.org/x/crypto v0.32.0 google.golang.org/protobuf v1.36.5 ) require ( github.com/atotto/clipboard v0.1.4 // indirect github.com/sahilm/fuzzy v0.1.1 // indirect ) require ( dario.cat/mergo v1.0.1 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect github.com/aymanbagabas/go-osc52/v2 v2.0.1 // indirect github.com/blang/semver v3.5.1+incompatible // indirect github.com/charmbracelet/bubbles v0.20.0 github.com/charmbracelet/lipgloss v1.0.0 github.com/charmbracelet/x/ansi v0.8.0 // indirect github.com/charmbracelet/x/term v0.2.1 // indirect github.com/cloudflare/circl v1.5.0 // indirect github.com/containerd/stargz-snapshotter/estargz v0.16.3 // indirect github.com/coreos/go-oidc/v3 v3.12.0 // indirect github.com/cpuguy83/go-md2man/v2 v2.0.5 // indirect github.com/cyberphone/json-canonicalization v0.0.0-20241213102144-19d51d7fe467 // indirect github.com/cyphar/filepath-securejoin v0.3.6 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect github.com/digitorus/pkcs7 v0.0.0-20230818184609-3a137a874352 // indirect github.com/digitorus/timestamp v0.0.0-20231217203849-220c5c2851b7 // indirect github.com/docker/cli v27.4.1+incompatible // indirect github.com/docker/distribution v2.8.3+incompatible // indirect github.com/docker/docker-credential-helpers v0.8.2 // indirect github.com/dustin/go-humanize v1.0.1 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f // indirect github.com/fsnotify/fsnotify v1.8.0 // indirect github.com/github/smimesign v0.2.0 // indirect github.com/go-chi/chi v4.1.2+incompatible // indirect github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 // indirect github.com/go-git/go-billy/v5 v5.6.2 // indirect github.com/go-jose/go-jose/v3 v3.0.3 // indirect github.com/go-jose/go-jose/v4 v4.0.4 // indirect github.com/go-logr/logr v1.4.2 // indirect github.com/go-logr/stdr v1.2.2 // indirect github.com/go-openapi/analysis v0.23.0 // indirect github.com/go-openapi/errors v0.22.0 // indirect github.com/go-openapi/jsonpointer v0.21.0 // indirect github.com/go-openapi/jsonreference v0.21.0 // indirect github.com/go-openapi/loads v0.22.0 // indirect github.com/go-openapi/runtime v0.28.0 // indirect github.com/go-openapi/spec v0.21.0 // indirect github.com/go-openapi/strfmt v0.23.0 // indirect github.com/go-openapi/swag v0.23.0 // indirect github.com/go-openapi/validate v0.24.0 // indirect github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 // indirect github.com/golang/snappy v0.0.4 // indirect github.com/google/certificate-transparency-go v1.3.0 // indirect github.com/google/go-containerregistry v0.20.2 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/google/trillian v1.7.0 // indirect github.com/google/uuid v1.6.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/hcl v1.0.1-vault-7 // indirect github.com/in-toto/in-toto-golang v0.9.0 // indirect github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect github.com/jedisct1/go-minisign v0.0.0-20241212093149-d2f9f49435c7 // indirect github.com/josharian/intern v1.0.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/klauspost/compress v1.17.11 // indirect github.com/letsencrypt/boulder v0.0.0-20241220190419-d6e163c15d44 // indirect github.com/lucasb-eyer/go-colorful v1.2.0 // indirect github.com/magiconair/properties v1.8.9 // indirect github.com/mailru/easyjson v0.9.0 // indirect github.com/mattn/go-isatty v0.0.20 github.com/mattn/go-localereader v0.0.1 // indirect github.com/mattn/go-runewidth v0.0.16 // indirect github.com/mitchellh/go-homedir v1.1.0 // indirect github.com/mitchellh/mapstructure v1.5.0 // indirect github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 // indirect github.com/muesli/cancelreader v0.2.2 // indirect github.com/muesli/termenv v0.15.2 // indirect github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 // indirect github.com/oklog/ulid v1.3.1 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect github.com/opencontainers/image-spec v1.1.0 // indirect github.com/opentracing/opentracing-go v1.2.0 // indirect github.com/pelletier/go-toml/v2 v2.2.3 // indirect github.com/pjbgf/sha1cd v0.3.2 // indirect github.com/pkg/errors v0.9.1 // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect github.com/prometheus/common v0.60.0 // indirect github.com/rivo/uniseg v0.4.7 // indirect github.com/russross/blackfriday/v2 v2.1.0 // indirect github.com/sagikazarmark/locafero v0.6.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect github.com/sassoftware/relic v7.2.1+incompatible // indirect github.com/segmentio/ksuid v1.0.4 // indirect github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 // indirect github.com/shibumi/go-pathspec v1.3.0 // indirect github.com/sigstore/rekor v1.3.7 // indirect github.com/sigstore/timestamp-authority v1.2.3 // indirect github.com/sirupsen/logrus v1.9.3 // indirect github.com/skeema/knownhosts v1.3.0 // indirect github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/spf13/afero v1.11.0 // indirect github.com/spf13/cast v1.7.1 // indirect github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/viper v1.19.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d // indirect github.com/theupdateframework/go-tuf v0.7.0 // indirect github.com/theupdateframework/go-tuf/v2 v2.0.2 // indirect github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399 // indirect github.com/transparency-dev/merkle v0.0.2 // indirect github.com/vbatts/tar-split v0.11.6 // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect go.mongodb.org/mongo-driver v1.17.1 // indirect go.opentelemetry.io/auto/sdk v1.1.0 // indirect go.opentelemetry.io/otel v1.33.0 // indirect go.opentelemetry.io/otel/metric v1.33.0 // indirect go.opentelemetry.io/otel/trace v1.33.0 // indirect go.uber.org/multierr v1.11.0 // indirect go.uber.org/zap v1.27.0 // indirect golang.org/x/exp v0.0.0-20241217172543-b2144cdd0a67 // indirect golang.org/x/mod v0.22.0 // indirect golang.org/x/net v0.34.0 // indirect golang.org/x/oauth2 v0.25.0 // indirect golang.org/x/sync v0.10.0 // indirect golang.org/x/sys v0.29.0 // indirect golang.org/x/term v0.28.0 // indirect golang.org/x/text v0.21.0 // indirect google.golang.org/genproto/googleapis/api v0.0.0-20241223144023-3abc09e42ca8 // indirect google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 // indirect google.golang.org/grpc v1.69.2 // indirect gopkg.in/ini.v1 v1.67.0 // indirect gopkg.in/warnings.v0 v0.1.2 // indirect gopkg.in/yaml.v3 v3.0.1 // indirect gotest.tools/v3 v3.5.1 // indirect k8s.io/klog/v2 v2.130.1 // indirect sigs.k8s.io/yaml v1.4.0 // indirect ) gittuf-0.9.0/go.sum000066400000000000000000002354071475150141000141660ustar00rootroot00000000000000cloud.google.com/go v0.116.0 h1:B3fRrSDkLRt5qSHWe40ERJvhvnQwdZiHu0bJOpldweE= cloud.google.com/go v0.116.0/go.mod h1:cEPSRWPzZEswwdr9BxE6ChEn01dWlTaF05LiC2Xs70U= cloud.google.com/go/auth v0.10.2 h1:oKF7rgBfSHdp/kuhXtqU/tNDr0mZqhYbEh+6SiqzkKo= cloud.google.com/go/auth v0.10.2/go.mod h1:xxA5AqpDrvS+Gkmo9RqrGGRh6WSNKKOXhY3zNOr38tI= cloud.google.com/go/auth/oauth2adapt v0.2.5 h1:2p29+dePqsCHPP1bqDJcKj4qxRyYCcbzKpFyKGt3MTk= cloud.google.com/go/auth/oauth2adapt v0.2.5/go.mod h1:AlmsELtlEBnaNTL7jCj8VQFLy6mbZv0s4Q7NGBeQ5E8= cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg= cloud.google.com/go/compute/metadata v0.5.2 h1:UxK4uu/Tn+I3p2dYWTfiX4wva7aYlKixAHn3fyqngqo= cloud.google.com/go/compute/metadata v0.5.2/go.mod h1:C66sj2AluDcIqakBq/M8lw8/ybHgOZqin2obFxa/E5k= cloud.google.com/go/iam v1.2.2 h1:ozUSofHUGf/F4tCNy/mu9tHLTaxZFLOUiKzjcgWHGIA= cloud.google.com/go/iam v1.2.2/go.mod h1:0Ys8ccaZHdI1dEUilwzqng/6ps2YB6vRsjIe00/+6JY= cloud.google.com/go/kms v1.20.1 h1:og29Wv59uf2FVaZlesaiDAqHFzHaoUyHI3HYp9VUHVg= cloud.google.com/go/kms v1.20.1/go.mod h1:LywpNiVCvzYNJWS9JUcGJSVTNSwPwi0vBAotzDqn2nc= cloud.google.com/go/longrunning v0.6.2 h1:xjDfh1pQcWPEvnfjZmwjKQEcHnpz6lHjfy7Fo0MK+hc= cloud.google.com/go/longrunning v0.6.2/go.mod h1:k/vIs83RN4bE3YCswdXC5PFfWVILjm3hpEUlSko4PiI= cuelabs.dev/go/oci/ociregistry v0.0.0-20240404174027-a39bec0462d2 h1:BnG6pr9TTr6CYlrJznYUDj6V7xldD1W+1iXPum0wT/w= cuelabs.dev/go/oci/ociregistry v0.0.0-20240404174027-a39bec0462d2/go.mod h1:pK23AUVXuNzzTpfMCA06sxZGeVQ/75FdVtW249de9Uo= cuelang.org/go v0.9.2 h1:pfNiry2PdRBr02G/aKm5k2vhzmqbAOoaB4WurmEbWvs= cuelang.org/go v0.9.2/go.mod h1:qpAYsLOf7gTM1YdEg6cxh553uZ4q9ZDWlPbtZr9q1Wk= dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= filippo.io/edwards25519 v1.1.0 h1:FNf4tywRC1HmFuKW5xopWpigGjJKiJSV0Cqo0cJWDaA= filippo.io/edwards25519 v1.1.0/go.mod h1:BxyFTGdWcka3PhytdK4V28tE5sGfRvvvRV7EaN4VDT4= github.com/AdamKorcz/go-fuzz-headers-1 v0.0.0-20230919221257-8b5d3ce2d11d h1:zjqpY4C7H15HjRPEenkS4SAn3Jy2eRRjkjZbGR30TOg= github.com/AdamKorcz/go-fuzz-headers-1 v0.0.0-20230919221257-8b5d3ce2d11d/go.mod h1:XNqJ7hv2kY++g8XEHREpi+JqZo3+0l+CH2egBVN4yqM= github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider v0.14.0 h1:kcnfY4vljxXliXDBrA9K9lwF8IoEZ4Up6Eg9kWTIm28= github.com/AliyunContainerService/ack-ram-tool/pkg/credentials/provider v0.14.0/go.mod h1:tlqp9mUGbsP+0z3Q+c0Q5MgSdq/OMwQhm5bffR3Q3ss= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible h1:fcYLmCpyNYRnvJbPerq7U0hS+6+I79yEDJBqVNcqUzU= github.com/Azure/azure-sdk-for-go v68.0.0+incompatible/go.mod h1:9XXNKU+eRnpl9moKnB4QOLf1HestfXbmab5FXxiDBjc= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0 h1:JZg6HRh6W6U4OLl6lk7BZ7BLisIzM9dG1R50zUk9C/M= github.com/Azure/azure-sdk-for-go/sdk/azcore v1.16.0/go.mod h1:YL1xnZ6QejvQHWJrX/AvhFl4WW4rqHVoKspWNVwFk0M= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0 h1:B/dfvscEQtew9dVuoxqxrUKKv8Ih2f55PydknDamU+g= github.com/Azure/azure-sdk-for-go/sdk/azidentity v1.8.0/go.mod h1:fiPSssYvltE08HJchL04dOy+RD4hgrjph0cwGGMntdI= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0 h1:ywEEhmNahHBihViHepv3xPBn1663uRv2t2q/ESv9seY= github.com/Azure/azure-sdk-for-go/sdk/internal v1.10.0/go.mod h1:iZDifYGJTIgIIkYRNWPENUnqx6bJ2xnSDFI2tjwZNuY= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0 h1:DRiANoJTiW6obBQe3SqZizkuV1PEgfiiGivmVocDy64= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/azkeys v1.1.0/go.mod h1:qLIye2hwb/ZouqhpSD9Zn3SJipvpEnz1Ywl3VUk9Y0s= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0 h1:D3occbWoio4EBLkbkevetNMAVX197GkzbUMtqjGWn80= github.com/Azure/azure-sdk-for-go/sdk/security/keyvault/internal v1.0.0/go.mod h1:bTSOgj05NGRuHHhQwAdPnYr9TOdNmKlZTgGLL6nyAdI= github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs= github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24= github.com/Azure/go-autorest/autorest v0.11.29 h1:I4+HL/JDvErx2LjyzaVxllw2lRDB5/BT2Bm4g20iqYw= github.com/Azure/go-autorest/autorest v0.11.29/go.mod h1:ZtEzC4Jy2JDrZLxvWs8LrBWEBycl1hbT1eknI8MtfAs= github.com/Azure/go-autorest/autorest/adal v0.9.24 h1:BHZfgGsGwdkHDyZdtQRQk1WeUdW0m2WPAwuHZwUi5i4= github.com/Azure/go-autorest/autorest/adal v0.9.24/go.mod h1:7T1+g0PYFmACYW5LlG2fcoPiPlFHjClyRGL7dRlP5c8= github.com/Azure/go-autorest/autorest/azure/auth v0.5.13 h1:Ov8avRZi2vmrE2JcXw+tu5K/yB41r7xK9GZDiBF7NdM= github.com/Azure/go-autorest/autorest/azure/auth v0.5.13/go.mod h1:5BAVfWLWXihP47vYrPuBKKf4cS0bXI+KM9Qx6ETDJYo= github.com/Azure/go-autorest/autorest/azure/cli v0.4.6 h1:w77/uPk80ZET2F+AfQExZyEWtn+0Rk/uw17m9fv5Ajc= github.com/Azure/go-autorest/autorest/azure/cli v0.4.6/go.mod h1:piCfgPho7BiIDdEQ1+g4VmKyD5y+p/XtSNqE6Hc4QD0= github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw= github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74= github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg= github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8= github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo= github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2 h1:XHOnouVk1mxXfQidrMEnLlPk9UMeRtyBTnEFtxkV0kU= github.com/AzureAD/microsoft-authentication-library-for-go v1.2.2/go.mod h1:wP83P5OoQ5p6ip3ScPr0BAq0BvuPAvacpEuSzyouqAI= github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY= github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY= github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU= github.com/OneOfOne/xxhash v1.2.8 h1:31czK/TI9sNkxIKfaUfGlU47BAxQ0ztGgd9vPyqimf8= github.com/OneOfOne/xxhash v1.2.8/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q= github.com/ProtonMail/go-crypto v1.1.5 h1:eoAQfK2dwL+tFSFpr7TbOaPNUbPiJj4fLYwwGE1FQO4= github.com/ProtonMail/go-crypto v1.1.5/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/ThalesIgnite/crypto11 v1.2.5 h1:1IiIIEqYmBvUYFeMnHqRft4bwf/O36jryEUpY+9ef8E= github.com/ThalesIgnite/crypto11 v1.2.5/go.mod h1:ILDKtnCKiQ7zRoNxcp36Y1ZR8LBPmR2E23+wTQe/MlE= github.com/agnivade/levenshtein v1.1.1 h1:QY8M92nrzkmr798gCo3kmMyqXFzdQVpxLlGPRBij0P8= github.com/agnivade/levenshtein v1.1.1/go.mod h1:veldBMzWxcCG2ZvUTKD2kJNRdCk5hVbJomOvKkmgYbo= github.com/alessio/shellescape v1.4.1 h1:V7yhSDDn8LP4lc4jS8pFkt0zCnzVJlG5JXy9BVKJUX0= github.com/alessio/shellescape v1.4.1/go.mod h1:PZAiSCk0LJaZkiCSkPv8qIobYglO3FPpyFjDCtHLS30= github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.4 h1:iC9YFYKDGEy3n/FtqJnOkZsene9olVspKmkX5A2YBEo= github.com/alibabacloud-go/alibabacloud-gateway-spi v0.0.4/go.mod h1:sCavSAvdzOjul4cEqeVtvlSaSScfNsTQ+46HwlTL1hc= github.com/alibabacloud-go/cr-20160607 v1.0.1 h1:WEnP1iPFKJU74ryUKh/YDPHoxMZawqlPajOymyNAkts= github.com/alibabacloud-go/cr-20160607 v1.0.1/go.mod h1:QHeKZtZ3F3FOE+/uIXCBAp8POwnUYekpLwr1dtQa5r0= github.com/alibabacloud-go/cr-20181201 v1.0.10 h1:B60f6S1imsgn2fgC6X6FrVNrONDrbCT0NwYhsJ0C9/c= github.com/alibabacloud-go/cr-20181201 v1.0.10/go.mod h1:VN9orB/w5G20FjytoSpZROqu9ZqxwycASmGqYUJSoDc= github.com/alibabacloud-go/darabonba-openapi v0.2.1 h1:WyzxxKvhdVDlwpAMOHgAiCJ+NXa6g5ZWPFEzaK/ewwY= github.com/alibabacloud-go/darabonba-openapi v0.2.1/go.mod h1:zXOqLbpIqq543oioL9IuuZYOQgHQ5B8/n5OPrnko8aY= github.com/alibabacloud-go/debug v1.0.0 h1:3eIEQWfay1fB24PQIEzXAswlVJtdQok8f3EVN5VrBnA= github.com/alibabacloud-go/debug v1.0.0/go.mod h1:8gfgZCCAC3+SCzjWtY053FrOcd4/qlH6IHTI4QyICOc= github.com/alibabacloud-go/endpoint-util v1.1.1 h1:ZkBv2/jnghxtU0p+upSU0GGzW1VL9GQdZO3mcSUTUy8= github.com/alibabacloud-go/endpoint-util v1.1.1/go.mod h1:O5FuCALmCKs2Ff7JFJMudHs0I5EBgecXXxZRyswlEjE= github.com/alibabacloud-go/openapi-util v0.1.0 h1:0z75cIULkDrdEhkLWgi9tnLe+KhAFE/r5Pb3312/eAY= github.com/alibabacloud-go/openapi-util v0.1.0/go.mod h1:sQuElr4ywwFRlCCberQwKRFhRzIyG4QTP/P4y1CJ6Ws= github.com/alibabacloud-go/tea v1.2.2 h1:aTsR6Rl3ANWPfqeQugPglfurloyBJY85eFy7Gc1+8oU= github.com/alibabacloud-go/tea v1.2.2/go.mod h1:CF3vOzEMAG+bR4WOql8gc2G9H3EkH3ZLAQdpmpXMgwk= github.com/alibabacloud-go/tea-utils v1.4.5 h1:h0/6Xd2f3bPE4XHTvkpjwxowIwRCJAJOqY6Eq8f3zfA= github.com/alibabacloud-go/tea-utils v1.4.5/go.mod h1:KNcT0oXlZZxOXINnZBs6YvgOd5aYp9U67G+E3R8fcQw= github.com/alibabacloud-go/tea-xml v1.1.3 h1:7LYnm+JbOq2B+T/B0fHC4Ies4/FofC4zHzYtqw7dgt0= github.com/alibabacloud-go/tea-xml v1.1.3/go.mod h1:Rq08vgCcCAjHyRi/M7xlHKUykZCEtyBy9+DPF6GgEu8= github.com/aliyun/credentials-go v1.3.4 h1:X5nse+8s7ft00ANpoG3+bFJIqZVpjHbOg7G9gWQshVY= github.com/aliyun/credentials-go v1.3.4/go.mod h1:1LxUuX7L5YrZUWzBrRyk0SwSdH4OmPrib8NVePL3fxM= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be h1:9AeTilPcZAjCFIImctFaOjnTIavg87rW78vTPkQqLI8= github.com/anmitsu/go-shlex v0.0.0-20200514113438-38f4b401e2be/go.mod h1:ySMOLuWl6zY27l47sB3qLNK6tF2fkHG55UZxx8oIVo4= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5 h1:0CwZNZbxp69SHPdPJAN/hZIm0C4OItdklCFmMRWYpio= github.com/armon/go-socks5 v0.0.0-20160902184237-e75332964ef5/go.mod h1:wHh0iHkYZB8zMSxRWpUBQtwG5a7fFgvEO+odwuTv2gs= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2/go.mod h1:WaHUgvxTVq04UNunO+XhnAqY/wQc+bxr74GqbsZ/Jqw= github.com/atotto/clipboard v0.1.4 h1:EH0zSVneZPSuFR11BlR9YppQTVDbh5+16AmcJi4g1z4= github.com/atotto/clipboard v0.1.4/go.mod h1:ZY9tmq7sm5xIbd9bOK4onWV4S6X0u6GY7Vn0Yu86PYI= github.com/aws/aws-sdk-go v1.55.5 h1:KKUZBfBoyqy5d3swXyiC7Q76ic40rYcbqH7qjh59kzU= github.com/aws/aws-sdk-go v1.55.5/go.mod h1:eRwEWoyTWFMVYVQzKMNHWP5/RV4xIUGMQfXQHfHkpNU= github.com/aws/aws-sdk-go-v2 v1.32.4 h1:S13INUiTxgrPueTmrm5DZ+MiAo99zYzHEFh1UNkOxNE= github.com/aws/aws-sdk-go-v2 v1.32.4/go.mod h1:2SK5n0a2karNTv5tbP1SjsX0uhttou00v/HpXKM1ZUo= github.com/aws/aws-sdk-go-v2/config v1.28.3 h1:kL5uAptPcPKaJ4q0sDUjUIdueO18Q7JDzl64GpVwdOM= github.com/aws/aws-sdk-go-v2/config v1.28.3/go.mod h1:SPEn1KA8YbgQnwiJ/OISU4fz7+F6Fe309Jf0QTsRCl4= github.com/aws/aws-sdk-go-v2/credentials v1.17.44 h1:qqfs5kulLUHUEXlHEZXLJkgGoF3kkUeFUTVA585cFpU= github.com/aws/aws-sdk-go-v2/credentials v1.17.44/go.mod h1:0Lm2YJ8etJdEdw23s+q/9wTpOeo2HhNE97XcRa7T8MA= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19 h1:woXadbf0c7enQ2UGCi8gW/WuKmE0xIzxBF/eD94jMKQ= github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.16.19/go.mod h1:zminj5ucw7w0r65bP6nhyOd3xL6veAUMc3ElGMoLVb4= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23 h1:A2w6m6Tmr+BNXjDsr7M90zkWjsu4JXHwrzPg235STs4= github.com/aws/aws-sdk-go-v2/internal/configsources v1.3.23/go.mod h1:35EVp9wyeANdujZruvHiQUAo9E3vbhnIO1mTCAxMlY0= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23 h1:pgYW9FCabt2M25MoHYCfMrVY2ghiiBKYWUVXfwZs+sU= github.com/aws/aws-sdk-go-v2/internal/endpoints/v2 v2.6.23/go.mod h1:c48kLgzO19wAu3CPkDWC28JbaJ+hfQlsdl7I2+oqIbk= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1 h1:VaRN3TlFdd6KxX1x3ILT5ynH6HvKgqdiXoTxAF4HQcQ= github.com/aws/aws-sdk-go-v2/internal/ini v1.8.1/go.mod h1:FbtygfRFze9usAadmnGJNc8KsP346kEe+y2/oyhGAGc= github.com/aws/aws-sdk-go-v2/service/ecr v1.28.5 h1:dvvTFXpWSv9+8lTNPl1EPNZL6BCUV6MgVckEMvXaOgk= github.com/aws/aws-sdk-go-v2/service/ecr v1.28.5/go.mod h1:Ogt6AOZ/sPBlJZpVFJgOK+jGGREuo8DMjNg+O/7gpjI= github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.23.10 h1:dNXYTooy/H6NSIJ/zZqAVk/Ri4G4mqEWoz3btXhqI7E= github.com/aws/aws-sdk-go-v2/service/ecrpublic v1.23.10/go.mod h1:6JWi6AO/j/YgTOdu+XM2fRfoZTmferahXDwmravqSwQ= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0 h1:TToQNkvGguu209puTojY/ozlqy2d/SFNcoLIqTFi42g= github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.12.0/go.mod h1:0jp+ltwkf+SwG2fm/PKo8t4y8pJSgOCO4D8Lz3k0aHQ= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4 h1:tHxQi/XHPK0ctd/wdOw0t7Xrc2OxcRCnVzv8lwWPu0c= github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.12.4/go.mod h1:4GQbF1vJzG60poZqWatZlhP31y8PGCCVTvIGPdaaYJ0= github.com/aws/aws-sdk-go-v2/service/kms v1.37.5 h1:5dQJ6Q5QrQOqZxXjSbRXukBqU8Pgu6Ro6Qqtyd8yiz4= github.com/aws/aws-sdk-go-v2/service/kms v1.37.5/go.mod h1:A9vfQcNHVBCE7ZZN6H+UUJpXtbH26Vv6L7Zhk5nIJAY= github.com/aws/aws-sdk-go-v2/service/sso v1.24.5 h1:HJwZwRt2Z2Tdec+m+fPjvdmkq2s9Ra+VR0hjF7V2o40= github.com/aws/aws-sdk-go-v2/service/sso v1.24.5/go.mod h1:wrMCEwjFPms+V86TCQQeOxQF/If4vT44FGIOFiMC2ck= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4 h1:zcx9LiGWZ6i6pjdcoE9oXAB6mUdeyC36Ia/QEiIvYdg= github.com/aws/aws-sdk-go-v2/service/ssooidc v1.28.4/go.mod h1:Tp/ly1cTjRLGBBmNccFumbZ8oqpZlpdhFf80SrRh4is= github.com/aws/aws-sdk-go-v2/service/sts v1.32.4 h1:yDxvkz3/uOKfxnv8YhzOi9m+2OGIxF+on3KOISbK5IU= github.com/aws/aws-sdk-go-v2/service/sts v1.32.4/go.mod h1:9XEUty5v5UAsMiFOBJrNibZgwCeOma73jgGwwhgffa8= github.com/aws/smithy-go v1.22.0 h1:uunKnWlcoL3zO7q+gG2Pk53joueEOsnNB28QdMsmiMM= github.com/aws/smithy-go v1.22.0/go.mod h1:irrKGvNn1InZwb2d7fkIRNucdfwR8R+Ts3wxYa/cJHg= github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20240613164637-021de410d8a7 h1:9uQK1ME/KnZAJ4zsqIM6YbT7Tx1+nMh4WAeyh7Fw6Oc= github.com/awslabs/amazon-ecr-credential-helper/ecr-login v0.0.0-20240613164637-021de410d8a7/go.mod h1:LujQZufeLkg0H+0p3pmSkLlDQjmkwd5W7/f9yF9uYdk= github.com/aymanbagabas/go-osc52/v2 v2.0.1 h1:HwpRHbFMcZLEVr42D4p7XBqjyuxQH5SMiErDT4WkJ2k= github.com/aymanbagabas/go-osc52/v2 v2.0.1/go.mod h1:uYgXzlJ7ZpABp8OJ+exZzJJhRNQ2ASbcXHWsFqH8hp8= github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/blang/semver v3.5.1+incompatible h1:cQNTCjp13qL8KC3Nbxr/y2Bqb63oX6wdnnjpJbkM4JQ= github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnwebNt5EWlYSAyrTnjyyk= github.com/buildkite/agent/v3 v3.81.0 h1:JVfkng2XnsXesFXwiFwLJFkuzVu4zvoJCvedfoIXD6E= github.com/buildkite/agent/v3 v3.81.0/go.mod h1:edJeyycODRxaFvpT22rDGwaQ5oa4eB8GjtbjgX5VpFw= github.com/buildkite/go-pipeline v0.13.1 h1:Y9p8pQIwPtauVwNrcmTDH6+XK7jE1nLuvWVaK8oymA8= github.com/buildkite/go-pipeline v0.13.1/go.mod h1:2HHqlSFTYgHFhzedJu0LhLs9n5c9XkYnHiQFVN5HE4U= github.com/buildkite/interpolate v0.1.3 h1:OFEhqji1rNTRg0u9DsSodg63sjJQEb1uWbENq9fUOBM= github.com/buildkite/interpolate v0.1.3/go.mod h1:UNVe6A+UfiBNKbhAySrBbZFZFxQ+DXr9nWen6WVt/A8= github.com/buildkite/roko v1.2.0 h1:hbNURz//dQqNl6Eo9awjQOVOZwSDJ8VEbBDxSfT9rGQ= github.com/buildkite/roko v1.2.0/go.mod h1:23R9e6nHxgedznkwwfmqZ6+0VJZJZ2Sg/uVcp2cP46I= github.com/cenkalti/backoff/v4 v4.3.0 h1:MyRJ/UdXutAwSAT+s3wNd7MfTIcy71VQueUuFK343L8= github.com/cenkalti/backoff/v4 v4.3.0/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE= github.com/certifi/gocertifi v0.0.0-20180118203423-deb3ae2ef261/go.mod h1:GJKEexRPVJrBSOjoqN5VNOIKJ5Q3RViH6eu3puDRwx4= github.com/cespare/xxhash/v2 v2.3.0 h1:UL815xU9SqsFlibzuggzjXhog7bL6oX9BbNZnL2UFvs= github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs= github.com/charmbracelet/bubbles v0.20.0 h1:jSZu6qD8cRQ6k9OMfR1WlM+ruM8fkPWkHvQWD9LIutE= github.com/charmbracelet/bubbles v0.20.0/go.mod h1:39slydyswPy+uVOHZ5x/GjwVAFkCsV8IIVy+4MhzwwU= github.com/charmbracelet/bubbletea v1.3.0 h1:fPMyirm0u3Fou+flch7hlJN9krlnVURrkUVDwqXjoAc= github.com/charmbracelet/bubbletea v1.3.0/go.mod h1:eTaHfqbIwvBhFQM/nlT1NsGc4kp8jhF8LfUK67XiTDM= github.com/charmbracelet/lipgloss v1.0.0 h1:O7VkGDvqEdGi93X+DeqsQ7PKHDgtQfF8j8/O2qFMQNg= github.com/charmbracelet/lipgloss v1.0.0/go.mod h1:U5fy9Z+C38obMs+T+tJqst9VGzlOYGj4ri9reL3qUlo= github.com/charmbracelet/x/ansi v0.8.0 h1:9GTq3xq9caJW8ZrBTe0LIe2fvfLR/bYXKTx2llXn7xE= github.com/charmbracelet/x/ansi v0.8.0/go.mod h1:wdYl/ONOLHLIVmQaxbIYEC/cRKOQyjTkowiI4blgS9Q= github.com/charmbracelet/x/term v0.2.1 h1:AQeHeLZ1OqSXhrAWpYUtZyX1T3zVxfpZuEQMIQaGIAQ= github.com/charmbracelet/x/term v0.2.1/go.mod h1:oQ4enTYFV7QN4m0i9mzHrViD7TQKvNEEkHUMCmsxdUg= github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589 h1:krfRl01rzPzxSxyLyrChD+U+MzsBXbm0OwYYB67uF+4= github.com/chrismellard/docker-credential-acr-env v0.0.0-20230304212654-82a0ddb27589/go.mod h1:OuDyvmLnMCwa2ep4Jkm6nyA0ocJuZlGyk2gGseVzERM= github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI= github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/clbanning/mxj/v2 v2.7.0 h1:WA/La7UGCanFe5NpHF0Q3DNtnCsVoxbPKuyBNHWRyME= github.com/clbanning/mxj/v2 v2.7.0/go.mod h1:hNiWqW14h+kc+MdF9C6/YoRfjEJoR3ou6tn/Qo+ve2s= github.com/cloudflare/circl v1.5.0 h1:hxIWksrX6XN5a1L2TI/h53AGPhNHoUBo+TD1ms9+pys= github.com/cloudflare/circl v1.5.0/go.mod h1:uddAzsPgqdMAYatqJ0lsjX1oECcQLIlRpzZh3pJrofs= github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg= github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc= github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb h1:EDmT6Q9Zs+SbUoc7Ik9EfrFqcylYqgPZ9ANSbTAntnE= github.com/codahale/rfc6979 v0.0.0-20141003034818-6a90f24967eb/go.mod h1:ZjrT6AXHbDs86ZSdt/osfBi5qfexBrKUdONk989Wnk4= github.com/common-nighthawk/go-figure v0.0.0-20210622060536-734e95fb86be h1:J5BL2kskAlV9ckgEsNQXscjIaLiOYiZ75d4e94E6dcQ= github.com/common-nighthawk/go-figure v0.0.0-20210622060536-734e95fb86be/go.mod h1:mk5IQ+Y0ZeO87b858TlA645sVcEcbiX6YqP98kt+7+w= github.com/containerd/stargz-snapshotter/estargz v0.16.3 h1:7evrXtoh1mSbGj/pfRccTampEyKpjpOnS3CyiV1Ebr8= github.com/containerd/stargz-snapshotter/estargz v0.16.3/go.mod h1:uyr4BfYfOj3G9WBVE8cOlQmXAbPN9VEQpBBeJIuOipU= github.com/coreos/go-oidc/v3 v3.12.0 h1:sJk+8G2qq94rDI6ehZ71Bol3oUHy63qNYmkiSjrc/Jo= github.com/coreos/go-oidc/v3 v3.12.0/go.mod h1:gE3LgjOgFoHi9a4ce4/tJczr0Ai2/BoDhf0r5lltWI0= github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cpuguy83/go-md2man/v2 v2.0.5 h1:ZtcqGrnekaHpVLArFSe4HK5DoKx1T0rq2DwVB0alcyc= github.com/cpuguy83/go-md2man/v2 v2.0.5/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/cyberphone/json-canonicalization v0.0.0-20241213102144-19d51d7fe467 h1:uX1JmpONuD549D73r6cgnxyUu18Zb7yHAy5AYU0Pm4Q= github.com/cyberphone/json-canonicalization v0.0.0-20241213102144-19d51d7fe467/go.mod h1:uzvlm1mxhHkdfqitSA92i7Se+S9ksOn3a3qmv/kyOCw= github.com/cyphar/filepath-securejoin v0.3.6 h1:4d9N5ykBnSp5Xn2JkhocYDkOpURL/18CYMpo6xB9uWM= github.com/cyphar/filepath-securejoin v0.3.6/go.mod h1:Sdj7gXlvMcPZsbhwhQ33GguGLDGQL7h7bg04C/+u9jI= github.com/danieljoos/wincred v1.2.1 h1:dl9cBrupW8+r5250DYkYxocLeZ1Y4vB1kxgtjxw8GQs= github.com/danieljoos/wincred v1.2.1/go.mod h1:uGaFL9fDn3OLTvzCGulzE+SzjEe5NGlh5FdCcyfPwps= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 h1:y5HC9v93H5EPKqaS1UYVg1uYah5Xf51mBfIoWehClUQ= github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964/go.mod h1:Xd9hchkHSWYkEqJwUGisez3G1QY8Ryz0sdWrLPMGjLk= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/digitorus/pkcs7 v0.0.0-20230713084857-e76b763bdc49/go.mod h1:SKVExuS+vpu2l9IoOc0RwqE7NYnb0JlcFHFnEJkVDzc= github.com/digitorus/pkcs7 v0.0.0-20230818184609-3a137a874352 h1:ge14PCmCvPjpMQMIAH7uKg0lrtNSOdpYsRXlwk3QbaE= github.com/digitorus/pkcs7 v0.0.0-20230818184609-3a137a874352/go.mod h1:SKVExuS+vpu2l9IoOc0RwqE7NYnb0JlcFHFnEJkVDzc= github.com/digitorus/timestamp v0.0.0-20231217203849-220c5c2851b7 h1:lxmTCgmHE1GUYL7P0MlNa00M67axePTq+9nBSGddR8I= github.com/digitorus/timestamp v0.0.0-20231217203849-220c5c2851b7/go.mod h1:GvWntX9qiTlOud0WkQ6ewFm0LPy5JUR1Xo0Ngbd1w6Y= github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U= github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE= github.com/docker/cli v27.4.1+incompatible h1:VzPiUlRJ/xh+otB75gva3r05isHMo5wXDfPRi5/b4hI= github.com/docker/cli v27.4.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8= github.com/docker/distribution v2.8.3+incompatible h1:AtKxIZ36LoNK51+Z6RpzLpddBirtxJnzDrHLEKxTAYk= github.com/docker/distribution v2.8.3+incompatible/go.mod h1:J2gT2udsDAN96Uj4KfcMRqY0/ypR+oyYUYmja8H+y+w= github.com/docker/docker-credential-helpers v0.8.2 h1:bX3YxiGzFP5sOXWc3bTPEXdEaZSeVMrFgOr3T+zrFAo= github.com/docker/docker-credential-helpers v0.8.2/go.mod h1:P3ci7E3lwkZg6XiHdRKft1KckHiO9a2rNtyFbZ/ry9M= github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/elazarl/goproxy v1.4.0 h1:4GyuSbFa+s26+3rmYNSuUVsx+HgPrV1bk1jXI0l9wjM= github.com/elazarl/goproxy v1.4.0/go.mod h1:X/5W/t+gzDyLfHW4DrMdpjqYjpXsURlBt9lpBDxZZZQ= github.com/emicklei/go-restful/v3 v3.12.1 h1:PJMDIM/ak7btuL8Ex0iYET9hxM3CI2sjZtzpL63nKAU= github.com/emicklei/go-restful/v3 v3.12.1/go.mod h1:6n3XBCmQQb25CM2LCACGz8ukIrRry+4bhvbpWn3mrbc= github.com/emicklei/proto v1.12.1 h1:6n/Z2pZAnBwuhU66Gs8160B8rrrYKo7h2F2sCOnNceE= github.com/emicklei/proto v1.12.1/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f h1:Y/CXytFA4m6baUTXGLOoWe4PQhGxaX0KpnayAqC48p4= github.com/erikgeiser/coninput v0.0.0-20211004153227-1c3628e74d0f/go.mod h1:vw97MGsxSvLiUE2X8qFplwetxpGLQrlU1Q9AUEIzCaM= github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/felixge/httpsnoop v1.0.4 h1:NFTV2Zj1bL4mc9sqWACXbQFVBBg2W3GPvqp8/ESS2Wg= github.com/felixge/httpsnoop v1.0.4/go.mod h1:m8KPJKqk1gH5J9DgRY2ASl2lWCfGKXixSwevea8zH2U= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ= github.com/fsnotify/fsnotify v1.5.4/go.mod h1:OVB6XrOHzAwXMpEM7uPOzcehqUV2UqJxmVXmkdnm1bU= github.com/fsnotify/fsnotify v1.8.0 h1:dAwr6QBTBZIkG8roQaJjGof0pp0EeF+tNV7YBP3F/8M= github.com/fsnotify/fsnotify v1.8.0/go.mod h1:8jBTzvmWwFyi3Pb8djgCCO5IBqzKJ/Jwo8TRcHyHii0= github.com/github/smimesign v0.2.0 h1:Hho4YcX5N1I9XNqhq0fNx0Sts8MhLonHd+HRXVGNjvk= github.com/github/smimesign v0.2.0/go.mod h1:iZiiwNT4HbtGRVqCQu7uJPEZCuEE5sfSSttcnePkDl4= github.com/gliderlabs/ssh v0.3.8 h1:a4YXD1V7xMF9g5nTkdfnja3Sxy1PVDCj1Zg4Wb8vY6c= github.com/gliderlabs/ssh v0.3.8/go.mod h1:xYoytBv1sV0aL3CavoDuJIQNURXkkfPA/wxQ1pL1fAU= github.com/go-chi/chi v4.1.2+incompatible h1:fGFk2Gmi/YKXk0OmGfBh0WgmN3XB8lVnEyNz34tQRec= github.com/go-chi/chi v4.1.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= github.com/go-git/go-billy/v5 v5.6.2 h1:6Q86EsPXMa7c3YZ3aLAQsMA0VlWmy43r6FHqa/UNbRM= github.com/go-git/go-billy/v5 v5.6.2/go.mod h1:rcFC2rAsp/erv7CMz9GczHcuD0D32fWzH+MJAU+jaUU= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399 h1:eMje31YglSBqCdIqdhKBW8lokaMrL3uTkpGYlE2OOT4= github.com/go-git/go-git-fixtures/v4 v4.3.2-0.20231010084843-55a94097c399/go.mod h1:1OCfN199q1Jm3HZlxleg+Dw/mwps2Wbk9frAWm+4FII= github.com/go-git/go-git/v5 v5.13.2 h1:7O7xvsK7K+rZPKW6AQR1YyNhfywkv7B8/FsP3ki6Zv0= github.com/go-git/go-git/v5 v5.13.2/go.mod h1:hWdW5P4YZRjmpGHwRH2v3zkWcNl6HeXaXQEMGb3NJ9A= github.com/go-ini/ini v1.67.0 h1:z6ZrTEZqSWOTyH2FlglNbNgARyHG8oLW9gMELqKr06A= github.com/go-ini/ini v1.67.0/go.mod h1:ByCAeIL28uOIIG0E3PJtZPDL8WnHpFKFOtgjp+3Ies8= github.com/go-jose/go-jose/v3 v3.0.3 h1:fFKWeig/irsp7XD2zBxvnmA/XaRWp5V3CBsZXJF7G7k= github.com/go-jose/go-jose/v3 v3.0.3/go.mod h1:5b+7YgP7ZICgJDBdfjZaIt+H/9L9T/YQrVfLAMboGkQ= github.com/go-jose/go-jose/v4 v4.0.4 h1:VsjPI33J0SB9vQM6PLmNjoHqMQNGPiZ0rHL7Ni7Q6/E= github.com/go-jose/go-jose/v4 v4.0.4/go.mod h1:NKb5HO1EZccyMpiZNbdUw/14tiXNyUJh188dfnMCAfc= github.com/go-logr/logr v1.2.2/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbVz/1A= github.com/go-logr/logr v1.4.2 h1:6pFjapn8bFcIbiKo3XT4j/BhANplGihG6tvd+8rYgrY= github.com/go-logr/logr v1.4.2/go.mod h1:9T104GzyrTigFIr8wt5mBrctHMim0Nb2HLGrmQ40KvY= github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag= github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE= github.com/go-openapi/analysis v0.23.0 h1:aGday7OWupfMs+LbmLZG4k0MYXIANxcuBTYUC03zFCU= github.com/go-openapi/analysis v0.23.0/go.mod h1:9mz9ZWaSlV8TvjQHLl2mUW2PbZtemkE8yA5v22ohupo= github.com/go-openapi/errors v0.22.0 h1:c4xY/OLxUBSTiepAg3j/MHuAv5mJhnf53LLMWFB+u/w= github.com/go-openapi/errors v0.22.0/go.mod h1:J3DmZScxCDufmIMsdOuDHxJbdOGC0xtUynjIx092vXE= github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1rr/O9oNQ= github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= github.com/go-openapi/jsonreference v0.21.0 h1:Rs+Y7hSXT83Jacb7kFyjn4ijOuVGSvOdF2+tg1TRrwQ= github.com/go-openapi/jsonreference v0.21.0/go.mod h1:LmZmgsrTkVg9LG4EaHeY8cBDslNPMo06cago5JNLkm4= github.com/go-openapi/loads v0.22.0 h1:ECPGd4jX1U6NApCGG1We+uEozOAvXvJSF4nnwHZ8Aco= github.com/go-openapi/loads v0.22.0/go.mod h1:yLsaTCS92mnSAZX5WWoxszLj0u+Ojl+Zs5Stn1oF+rs= github.com/go-openapi/runtime v0.28.0 h1:gpPPmWSNGo214l6n8hzdXYhPuJcGtziTOgUpvsFWGIQ= github.com/go-openapi/runtime v0.28.0/go.mod h1:QN7OzcS+XuYmkQLw05akXk0jRH/eZ3kb18+1KwW9gyc= github.com/go-openapi/spec v0.21.0 h1:LTVzPc3p/RzRnkQqLRndbAzjY0d0BCL72A6j3CdL9ZY= github.com/go-openapi/spec v0.21.0/go.mod h1:78u6VdPw81XU44qEWGhtr982gJ5BWg2c0I5XwVMotYk= github.com/go-openapi/strfmt v0.23.0 h1:nlUS6BCqcnAk0pyhi9Y+kdDVZdZMHfEKQiS4HaMgO/c= github.com/go-openapi/strfmt v0.23.0/go.mod h1:NrtIpfKtWIygRkKVsxh7XQMDQW5HKQl6S5ik2elW+K4= github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= github.com/go-openapi/validate v0.24.0 h1:LdfDKwNbpB6Vn40xhTdNZAnfLECL81w+VX3BumrGD58= github.com/go-openapi/validate v0.24.0/go.mod h1:iyeX1sEufmv3nPbBdX3ieNviWnOZaJ1+zquzJEf2BAQ= github.com/go-piv/piv-go v1.11.0 h1:5vAaCdRTFSIW4PeqMbnsDlUZ7odMYWnHBDGdmtU/Zhg= github.com/go-piv/piv-go v1.11.0/go.mod h1:NZ2zmjVkfFaL/CF8cVQ/pXdXtuj110zEKGdJM6fJZZM= github.com/go-rod/rod v0.116.2 h1:A5t2Ky2A+5eD/ZJQr1EfsQSe5rms5Xof/qj296e+ZqA= github.com/go-rod/rod v0.116.2/go.mod h1:H+CMO9SCNc2TJ2WfrG+pKhITz57uGNYU43qYHh438Mg= github.com/go-sql-driver/mysql v1.8.1 h1:LedoTUt/eveggdHS9qUFC1EFSa8bU2+1pZjSRpvNJ1Y= github.com/go-sql-driver/mysql v1.8.1/go.mod h1:wEBSXgmK//2ZFJyE+qWnIsVGmvmEKlqwuVSjsCm7DZg= github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE= github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U= github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y= github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8= github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk= github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= github.com/golang-jwt/jwt/v4 v4.5.1 h1:JdqV9zKUdtaa9gdPlywC3aeoEsR681PlKC+4F5gQgeo= github.com/golang-jwt/jwt/v4 v4.5.1/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0= github.com/golang-jwt/jwt/v5 v5.2.1 h1:OuVbFODueb089Lh128TAcimifWaLhJwVflnrgM17wHk= github.com/golang-jwt/jwt/v5 v5.2.1/go.mod h1:pqrtFR0X4osieyHYxtmOUWsAWrfe1Q5UVIyoH402zdk= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8 h1:f+oWsMOmNPc8JmEHVZIycC7hBoQxHH9pNKQORJNozsQ= github.com/golang/groupcache v0.0.0-20241129210726-2c02b8208cf8/go.mod h1:wcDNUvekVysuuOpQKo3191zZyTpiI6se1N1ULghS0sw= github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc= github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs= github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8= github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA= github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs= github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w= github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0= github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/golang/protobuf v1.5.2/go.mod h1:XVQd3VNwM+JqD3oG2Ue2ip4fOMUkwXdXDdiuN0vRsmY= github.com/golang/protobuf v1.5.4 h1:i7eJL8qZTpSEXOPTxNKhASYpMn+8e5Q6AdndVa1dWek= github.com/golang/protobuf v1.5.4/go.mod h1:lnTiLA8Wa4RWRcIUkrtSVa5nRhsEGBg48fD6rSs7xps= github.com/golang/snappy v0.0.4 h1:yAGX7huGHXlcLOEtBnF4w7FQwA26wojNCwOYAEhLjQM= github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= github.com/google/certificate-transparency-go v1.3.0 h1:+UhSNQAyA38Ed4CGfwOZeG4sJ030ELQZE4xtMFOxA7U= github.com/google/certificate-transparency-go v1.3.0/go.mod h1:/xVlT13jyrOuJOXTW5PjCBCrHBtXUq/jT5UeW40xliQ= github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49 h1:0VpGH+cDhbDtdcweoyCVsF3fhN8kejK6rFe/2FFX2nU= github.com/google/gnostic-models v0.6.9-0.20230804172637-c7be7c783f49/go.mod h1:BkkQ4L1KS1xMt2aWSPStnn55ChGC0DPOn2FQYj+f25M= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.2/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= github.com/google/go-containerregistry v0.20.2 h1:B1wPJ1SN/S7pB+ZAimcciVD+r+yV/l/DSArMxlbwseo= github.com/google/go-containerregistry v0.20.2/go.mod h1:z38EKdKh4h7IP2gSfUUqEvalZBqs6AoLeWfUy34nQC8= github.com/google/go-github/v55 v55.0.0 h1:4pp/1tNMB9X/LuAhs5i0KQAE40NmiR/y6prLNb9x9cg= github.com/google/go-github/v55 v55.0.0/go.mod h1:JLahOTA1DnXzhxEymmFF5PP2tSS9JVNj68mSZNDwskA= github.com/google/go-github/v61 v61.0.0 h1:VwQCBwhyE9JclCI+22/7mLB1PuU9eowCXKY5pNlu1go= github.com/google/go-github/v61 v61.0.0/go.mod h1:0WR+KmsWX75G2EbpyGsGmradjo3IiciuI4BmdVCobQY= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= github.com/google/gofuzz v1.2.0 h1:xRy4A+RhZaiKjJ1bPfwQ8sedCA+YS2YcCHW6ec7JMi0= github.com/google/gofuzz v1.2.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE= github.com/google/s2a-go v0.1.8 h1:zZDs9gcbt9ZPLV0ndSyQk6Kacx2g/X+SKYovpnz3SMM= github.com/google/s2a-go v0.1.8/go.mod h1:6iNWHTpQ+nfNRN5E00MSdfDwVesa8hhS32PhPO8deJA= github.com/google/tink/go v1.7.0 h1:6Eox8zONGebBFcCBqkVmt60LaWZa6xg1cl/DwAh/J1w= github.com/google/tink/go v1.7.0/go.mod h1:GAUOd+QE3pgj9q8VKIGTCP33c/B7eb4NhxLcgTJZStM= github.com/google/trillian v1.7.0 h1:Oib7mKRvZ0Z3GjvNcn2C4clRmFouEOkBcbzw7q8JlFI= github.com/google/trillian v1.7.0/go.mod h1:JMp1zzzHe7j2m9m8P/eTWOaoon3R/SwgqUnFMhm4vfw= github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/googleapis/enterprise-certificate-proxy v0.3.4 h1:XYIDZApgAnrN1c855gTgghdIA6Stxb52D5RnLI1SLyw= github.com/googleapis/enterprise-certificate-proxy v0.3.4/go.mod h1:YKe7cfqYXjKGpGvmSg28/fFvhNzinZQm8DGnaburhGA= github.com/googleapis/gax-go/v2 v2.14.0 h1:f+jMrjBPl+DL9nI4IQzLUxMq7XrAqFYB7hBPqMNIe8o= github.com/googleapis/gax-go/v2 v2.14.0/go.mod h1:lhBCnjdLrWRaPvLWhmc8IS24m9mr07qSYnHncrgo+zk= github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc= github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8= github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7 h1:UpiO20jno/eV1eVZcxqWnUohyKRe1g8FPV/xH1s/2qs= github.com/hashicorp/go-secure-stdlib/parseutil v0.1.7/go.mod h1:QmrqtbKuxxSWTN3ETMPuB+VtEiBJ/A9XhoYGv8E1uD8= github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts= github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4= github.com/hashicorp/go-sockaddr v1.0.5 h1:dvk7TIXCZpmfOlM+9mlcrWmWjw/wlKT+VDq2wMvfPJU= github.com/hashicorp/go-sockaddr v1.0.5/go.mod h1:uoUUmtwU7n9Dv3O4SNLeFvg0SxQ3lyjsj6+CCykpaxI= github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= github.com/hashicorp/hcl v1.0.1-vault-7 h1:ag5OxFVy3QYTFTJODRzTKVZ6xvdfLLCA1cy/Y6xGI0I= github.com/hashicorp/hcl v1.0.1-vault-7/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM= github.com/hashicorp/vault/api v1.15.0 h1:O24FYQCWwhwKnF7CuSqP30S51rTV7vz1iACXE/pj5DA= github.com/hashicorp/vault/api v1.15.0/go.mod h1:+5YTO09JGn0u+b6ySD/LLVf8WkJCPLAL2Vkmrn2+CM8= github.com/hiddeco/sshsig v0.1.0 h1:ehWA9PeBtDVAU7uULxUbQgw2e/JAB+ZKN29TIO33QUk= github.com/hiddeco/sshsig v0.1.0/go.mod h1:PtIDi8GwgjGQDK0fUF1XhC24wjOymNbyiWd0NzXxTwo= github.com/howeyc/gopass v0.0.0-20210920133722-c8aef6fb66ef h1:A9HsByNhogrvm9cWb28sjiS3i7tcKCkflWFEkHfuAgM= github.com/howeyc/gopass v0.0.0-20210920133722-c8aef6fb66ef/go.mod h1:lADxMC39cJJqL93Duh1xhAs4I2Zs8mKS89XWXFGp9cs= github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU= github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc= github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/in-toto/attestation v1.1.1 h1:QD3d+oATQ0dFsWoNh5oT0udQ3tUrOsZZ0Fc3tSgWbzI= github.com/in-toto/attestation v1.1.1/go.mod h1:Dcq1zVwA2V7Qin8I7rgOi+i837wEf/mOZwRm047Sjys= github.com/in-toto/in-toto-golang v0.9.0 h1:tHny7ac4KgtsfrG6ybU8gVOZux2H8jN05AXJ9EBM1XU= github.com/in-toto/in-toto-golang v0.9.0/go.mod h1:xsBVrVsHNsB61++S6Dy2vWosKhuA3lUTQd+eF9HdeMo= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438 h1:Dj0L5fhJ9F82ZJyVOmBx6msDp/kfd1t9GRfny/mfJA0= github.com/jackc/pgerrcode v0.0.0-20240316143900-6e2875d9b438/go.mod h1:a/s9Lp5W7n/DD0VrVoyJ00FbP2ytTPDVOivvn2bMlds= github.com/jackc/pgpassfile v1.0.0 h1:/6Hmqy13Ss2zCq62VdNG8tM1wchn8zjSGOBJ6icpsIM= github.com/jackc/pgpassfile v1.0.0/go.mod h1:CEx0iS5ambNFdcRtxPj5JhEz+xB6uRky5eyVu/W2HEg= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761 h1:iCEnooe7UlwOQYpKFhBabPMi4aNAfoODPEFNiAnClxo= github.com/jackc/pgservicefile v0.0.0-20240606120523-5a60cdf6a761/go.mod h1:5TJZWKEWniPve33vlWYSoGYefn3gLQRzjfDlhSJ9ZKM= github.com/jackc/pgx/v5 v5.7.1 h1:x7SYsPBYDkHDksogeSmZZ5xzThcTgRz++I5E+ePFUcs= github.com/jackc/pgx/v5 v5.7.1/go.mod h1:e7O26IywZZ+naJtWWos6i6fvWK+29etgITqrqHLfoZA= github.com/jackc/puddle/v2 v2.2.2 h1:PR8nw+E/1w0GLuRFSmiioY6UooMp6KJv0/61nB7icHo= github.com/jackc/puddle/v2 v2.2.2/go.mod h1:vriiEXHvEE654aYKXXjOvZM39qJ0q+azkZFrfEOc3H4= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jedisct1/go-minisign v0.0.0-20241212093149-d2f9f49435c7 h1:FWpSWRD8FbVkKQu8M1DM9jF5oXFLyE+XpisIYfdzbic= github.com/jedisct1/go-minisign v0.0.0-20241212093149-d2f9f49435c7/go.mod h1:BMxO138bOokdgt4UaxZiEfypcSHX0t6SIFimVP1oRfk= github.com/jellydator/ttlcache/v3 v3.3.0 h1:BdoC9cE81qXfrxeb9eoJi9dWrdhSuwXMAnHTbnBm4Wc= github.com/jellydator/ttlcache/v3 v3.3.0/go.mod h1:bj2/e0l4jRnQdrnSTaGTsh4GSXvMjQcy41i7th0GVGw= github.com/jmespath/go-jmespath v0.4.1-0.20220621161143-b0104c826a24 h1:liMMTbpW34dhU4az1GN0pTPADwNmvoRSeoZ6PItiqnY= github.com/jmespath/go-jmespath v0.4.1-0.20220621161143-b0104c826a24/go.mod h1:T8mJZnbsbmF+m6zOOFylbeCJqk5+pHWvzYPziyZiYoo= github.com/jmhodges/clock v1.2.0 h1:eq4kys+NI0PLngzaHEe7AmPT90XMGIEySD1JfV1PDIs= github.com/jmhodges/clock v1.2.0/go.mod h1:qKjhA7x7u/lQpPB1XAqX1b1lCI/w3/fNuYpI/ZjLynI= github.com/jonboulle/clockwork v0.5.0 h1:Hyh9A8u51kptdkR+cqRpT1EebBwTn1oK9YfGYbdFz6I= github.com/jonboulle/clockwork v0.5.0/go.mod h1:3mZlmanh0g2NDKO5TWZVJAfofYk64M7XN3SzBPjZF60= github.com/josharian/intern v1.0.0 h1:vlS4z54oSdjm0bgjRigI+G1HpF+tI+9rE5LLzOg8HmY= github.com/josharian/intern v1.0.0/go.mod h1:5DoeVV0s6jJacbCEi61lwdGj/aVlrQvzHFFd8Hwg//Y= github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnrnM= github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo= github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc= github.com/klauspost/compress v1.17.11/go.mod h1:pMDklpSncoRMuLFrf1W9Ss9KT+0rH90U12bZKk7uwG0= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/letsencrypt/boulder v0.0.0-20241220190419-d6e163c15d44 h1:o2qZLfJkORgMS1jOsjAx8nSx4sEZI95cjexMHFe4y9s= github.com/letsencrypt/boulder v0.0.0-20241220190419-d6e163c15d44/go.mod h1:w1Qdn1NioL94Dsk35HaBlY1rl8bYu/32YQwiGPhgsew= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/magiconair/properties v1.8.9 h1:nWcCbLq1N2v/cpNsy5WvQ37Fb+YElfq20WJ/a8RkpQM= github.com/magiconair/properties v1.8.9/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3vdS329zhj2hYo0= github.com/mailru/easyjson v0.9.0 h1:PrnmzHw7262yW8sTBwxi1PdJA3Iw/EKBa8psRf7d9a4= github.com/mailru/easyjson v0.9.0/go.mod h1:1+xMtQp2MRNVL/V1bOzuP3aP8VNwRW55fQUto+XFtTU= github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/mattn/go-localereader v0.0.1 h1:ygSAOl7ZXTx4RdPYinUpg6W99U8jWvWi9Ye2JC/oIi4= github.com/mattn/go-localereader v0.0.1/go.mod h1:8fBrzywKY7BI3czFoHkuzRoWE9C+EiG4R1k4Cjx5p88= github.com/mattn/go-runewidth v0.0.16 h1:E5ScNMtiwvlvB5paMFdw9p4kSQzbXFikJ5SQO6TULQc= github.com/mattn/go-runewidth v0.0.16/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w= github.com/miekg/pkcs11 v1.1.1 h1:Ugu9pdy6vAYku5DEpVWVFPYnzV+bxB+iRdbuFSu7TvU= github.com/miekg/pkcs11 v1.1.1/go.mod h1:XsNlhZGX73bx86s2hdc/FuaLm2CPZJemRLMA+WTFxgs= github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-wordwrap v1.0.1 h1:TLuKupo69TCn6TQSyGxwI1EblZZEsQ0vMlAFQflz0v0= github.com/mitchellh/go-wordwrap v1.0.1/go.mod h1:R62XHJLzvMFRBbcrT7m7WgmE1eOyTSsCt+hzestvNj0= github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY= github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w8PVh93nsPXa1VrQ6jlwL5oN8l14QlcNfg= github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q= github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M= github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk= github.com/mozillazg/docker-credential-acr-helper v0.4.0 h1:Uoh3Z9CcpEDnLiozDx+D7oDgRq7X+R296vAqAumnOcw= github.com/mozillazg/docker-credential-acr-helper v0.4.0/go.mod h1:2kiicb3OlPytmlNC9XGkLvVC+f0qTiJw3f/mhmeeQBg= github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6 h1:ZK8zHtRHOkbHy6Mmr5D264iyp3TiX5OmNcI5cIARiQI= github.com/muesli/ansi v0.0.0-20230316100256-276c6243b2f6/go.mod h1:CJlz5H+gyd6CUWT45Oy4q24RdLyn7Md9Vj2/ldJBSIo= github.com/muesli/cancelreader v0.2.2 h1:3I4Kt4BQjOR54NavqnDogx/MIoWBFa0StPA8ELUXHmA= github.com/muesli/cancelreader v0.2.2/go.mod h1:3XuTXfFS2VjM+HTLZY9Ak0l6eUKfijIfMUZ4EgX0QYo= github.com/muesli/termenv v0.15.2 h1:GohcuySI0QmI3wN8Ok9PtKGkgkFIk7y6Vpb5PvrY+Wo= github.com/muesli/termenv v0.15.2/go.mod h1:Epx+iuz8sNs7mNKhxzH4fWXGNpZwUaJKRS1noLXviQ8= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481 h1:Up6+btDp321ZG5/zdSLo48H9Iaq0UQGthrhWC6pCxzE= github.com/nozzle/throttler v0.0.0-20180817012639-2ea982251481/go.mod h1:yKZQO8QE2bHlgozqWDiRVqTFlLQSj30K/6SAK8EeYFw= github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A= github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU= github.com/nxadm/tail v1.4.11 h1:8feyoE3OzPrcshW5/MJ4sGESc5cqmGkGCWlco4l0bqY= github.com/nxadm/tail v1.4.11/go.mod h1:OTaG3NK980DZzxbRq6lEuzgU+mug70nY11sMd4JXXHc= github.com/oklog/ulid v1.3.1 h1:EGfNDEx6MqHz8B3uNV6QAib1UR2Lm97sHi3ocA6ESJ4= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/oleiade/reflections v1.1.0 h1:D+I/UsXQB4esMathlt0kkZRJZdUDmhv5zGi/HOwYTWo= github.com/oleiade/reflections v1.1.0/go.mod h1:mCxx0QseeVCHs5Um5HhJeCKVC7AwS8kO67tky4rdisA= github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE= github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk= github.com/onsi/ginkgo v1.16.4/go.mod h1:dX+/inL/fNMqNlz0e9LfyB9TswhZpCVdJM/Z6Vvnwo0= github.com/onsi/ginkgo v1.16.5 h1:8xi0RTUf59SOSfEtZMvwTvXYMzG4gV23XVHOZiXNtnE= github.com/onsi/ginkgo v1.16.5/go.mod h1:+E8gABHa3K6zRBolWtd+ROzc/U5bkGt0FwiG042wbpU= github.com/onsi/ginkgo/v2 v2.1.3/go.mod h1:vw5CSIxN1JObi/U8gcbwft7ZxR2dgaR70JSE3/PpL4c= github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY= github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo= github.com/onsi/gomega v1.17.0/go.mod h1:HnhC7FXeEQY45zxNK3PPoIUhzk/80Xly9PcubAlGdZY= github.com/onsi/gomega v1.19.0/go.mod h1:LY+I3pBVzYsTBU1AnDwOSxaYi9WoWiqgwooUqq9yPro= github.com/onsi/gomega v1.34.1 h1:EUMJIKUjM8sKjYbtxQI9A4z2o+rruxnzNvpknOXie6k= github.com/onsi/gomega v1.34.1/go.mod h1:kU1QgUvBDLXBJq618Xvm2LUX6rSAfRaFRTcdOeDLwwY= github.com/open-policy-agent/opa v0.68.0 h1:Jl3U2vXRjwk7JrHmS19U3HZO5qxQRinQbJ2eCJYSqJQ= github.com/open-policy-agent/opa v0.68.0/go.mod h1:5E5SvaPwTpwt2WM177I9Z3eT7qUpmOGjk1ZdHs+TZ4w= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/opentracing/opentracing-go v1.2.0 h1:uEJPy/1a5RIPAJ0Ov+OIO8OxWu77jEv+1B0VhjKrZUs= github.com/opentracing/opentracing-go v1.2.0/go.mod h1:GxEUsuufX4nBwe+T+Wl9TAgYrxe9dPLANfrWvHYVTgc= github.com/pborman/getopt v0.0.0-20180811024354-2b5b3bfb099b/go.mod h1:85jBQOZwpVEaDAr341tbn15RS4fCAsIst0qp7i8ex1o= github.com/pborman/uuid v1.2.1 h1:+ZZIw58t/ozdjRaXh/3awHfmWRbzYxJoAdNJxe/3pvw= github.com/pborman/uuid v1.2.1/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= github.com/pelletier/go-toml/v2 v2.2.3 h1:YmeHyLY8mFWbdkNWwpr+qIL2bEqT0o95WSdkNHvL12M= github.com/pelletier/go-toml/v2 v2.2.3/go.mod h1:MfCQTFTvCcUyyvvwm1+G6H/jORL20Xlb6rzQu9GuUkc= github.com/pjbgf/sha1cd v0.3.2 h1:a9wb0bp1oC2TGwStyn0Umc/IGKQnEgF0vVaZ8QF8eo4= github.com/pjbgf/sha1cd v0.3.2/go.mod h1:zQWigSxVmsHEZow5qaLtPYxpcKMMQpa09ixqBxuCS6A= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c h1:+mdjkGKdHQG3305AYmdv1U2eRNDiU2ErMBj1gwrq8eQ= github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c/go.mod h1:7rwL4CYBLnjLxUqIJNnCWiEdr3bn6IUYi15bNlnbCCU= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 h1:Jamvg5psRIccs7FGNTlIRMkT8wgtp5eCXdBlqhYGL6U= github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_golang v1.20.5 h1:cxppBPuYhUnsO6yo/aoRol4L7q7UFfdm+bR9r+8l63Y= github.com/prometheus/client_golang v1.20.5/go.mod h1:PIEt8X02hGcP8JWbeHyeZ53Y/jReSnHgO035n//V5WE= github.com/prometheus/client_model v0.6.1 h1:ZKSh/rekM+n3CeS952MLRAdFwIKqeY8b62p8ais2e9E= github.com/prometheus/client_model v0.6.1/go.mod h1:OrxVMOVHjw3lKMa8+x6HeMGkHMQyHDk9E3jmP2AmGiY= github.com/prometheus/common v0.60.0 h1:+V9PAREWNvJMAuJ1x1BaWl9dewMW4YrHZQbx0sJNllA= github.com/prometheus/common v0.60.0/go.mod h1:h0LYf1R1deLSKtD4Vdg8gy4RuOvENW2J/h19V5NADQw= github.com/prometheus/procfs v0.15.1 h1:YagwOFzUgYfKKHX6Dr+sHT7km/hxC76UB0learggepc= github.com/prometheus/procfs v0.15.1/go.mod h1:fB45yRUv8NstnjriLhBQLuOUt+WW4BsoGhij/e3PBqk= github.com/protocolbuffers/txtpbfmt v0.0.0-20231025115547-084445ff1adf h1:014O62zIzQwvoD7Ekj3ePDF5bv9Xxy0w6AZk0qYbjUk= github.com/protocolbuffers/txtpbfmt v0.0.0-20231025115547-084445ff1adf/go.mod h1:jgxiZysxFPM+iWKwQwPR+y+Jvo54ARd4EisXxKYpB5c= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 h1:N/ElC8H3+5XpJzTSTfLsJV/mx9Q9g7kxmchpfZyxgzM= github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rivo/uniseg v0.4.7 h1:WUdvkW8uEhrYfLC4ZzdpI2ztxP1I582+49Oc5Mq64VQ= github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88= github.com/rogpeppe/go-internal v1.13.1 h1:KvO1DLK/DRN07sQ1LQKScxyZJuNnedQ5/wKSR38lUII= github.com/rogpeppe/go-internal v1.13.1/go.mod h1:uMEvuHeurkdAXX61udpOXGD/AzZDWNMNyH2VO9fmH0o= github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk= github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk= github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc= github.com/sagikazarmark/locafero v0.6.0 h1:ON7AQg37yzcRPU69mt7gwhFEBwxI6P9T4Qu3N51bwOk= github.com/sagikazarmark/locafero v0.6.0/go.mod h1:77OmuIc6VTraTXKXIs/uvUxKGUXjE1GbemJYHqdNjX0= github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE= github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/sahilm/fuzzy v0.1.1 h1:ceu5RHF8DGgoi+/dR5PsECjCDH1BE3Fnmpo7aVXOdRA= github.com/sahilm/fuzzy v0.1.1/go.mod h1:VFvziUEIMCrT6A6tw2RFIXPXXmzXbOsSHF0DOI8ZK9Y= github.com/sassoftware/relic v7.2.1+incompatible h1:Pwyh1F3I0r4clFJXkSI8bOyJINGqpgjJU3DYAZeI05A= github.com/sassoftware/relic v7.2.1+incompatible/go.mod h1:CWfAxv73/iLZ17rbyhIEq3K9hs5w6FpNMdUT//qR+zk= github.com/sassoftware/relic/v7 v7.6.2 h1:rS44Lbv9G9eXsukknS4mSjIAuuX+lMq/FnStgmZlUv4= github.com/sassoftware/relic/v7 v7.6.2/go.mod h1:kjmP0IBVkJZ6gXeAu35/KCEfca//+PKM6vTAsyDPY+k= github.com/secure-systems-lab/go-securesystemslib v0.9.0 h1:rf1HIbL64nUpEIZnjLZ3mcNEL9NBPB0iuVjyxvq3LZc= github.com/secure-systems-lab/go-securesystemslib v0.9.0/go.mod h1:DVHKMcZ+V4/woA/peqr+L0joiRXbPpQ042GgJckkFgw= github.com/segmentio/ksuid v1.0.4 h1:sBo2BdShXjmcugAMwjugoGUdUV0pcxY5mW4xKRn3v4c= github.com/segmentio/ksuid v1.0.4/go.mod h1:/XUiZBD3kVx5SmUOl55voK5yeAbBNNIed+2O73XgrPE= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shibumi/go-pathspec v1.3.0 h1:QUyMZhFo0Md5B8zV8x2tesohbb5kfbpTi9rBnKh5dkI= github.com/shibumi/go-pathspec v1.3.0/go.mod h1:Xutfslp817l2I1cZvgcfeMQJG5QnU2lh5tVaaMCl3jE= github.com/sigstore/cosign/v2 v2.4.1 h1:b8UXEfJFks3hmTwyxrRNrn6racpmccUycBHxDMkEPvU= github.com/sigstore/cosign/v2 v2.4.1/go.mod h1:GvzjBeUKigI+XYnsoVQDmMAsMMc6engxztRSuxE+x9I= github.com/sigstore/fulcio v1.6.5 h1:A9DtV2hmeJ835mtqqxgvV0Sie0HOPKaLPgvFVF4FU7c= github.com/sigstore/fulcio v1.6.5/go.mod h1:wUei0BMnlz5iY10keKAx5FXFGnMX0N25ZSe4jGRA65M= github.com/sigstore/gitsign v0.12.0 h1:tdSrYKhhEn1hh14yZ8f63GC61MzxGjYc/e4SmvFZCtE= github.com/sigstore/gitsign v0.12.0/go.mod h1:eE5Y3g1rorXrii6H5M74dTfvtbrC/lVBFUAXRFpcvVQ= github.com/sigstore/protobuf-specs v0.4.0 h1:yoZbdh0kZYKOSiVbYyA8J3f2wLh5aUk2SQB7LgAfIdU= github.com/sigstore/protobuf-specs v0.4.0/go.mod h1:FKW5NYhnnFQ/Vb9RKtQk91iYd0MKJ9AxyqInEwU6+OI= github.com/sigstore/rekor v1.3.7 h1:Z5UW5TmqbTZnyOFkMRfi32q/CWcxK6VuzIkx+33mbq8= github.com/sigstore/rekor v1.3.7/go.mod h1:TihqJscZ6L6398x68EHY82t0AOnGYfrQ0siXe3WgbR4= github.com/sigstore/sigstore v1.8.12 h1:S8xMVZbE2z9ZBuQUEG737pxdLjnbOIcFi5v9UFfkJFc= github.com/sigstore/sigstore v1.8.12/go.mod h1:+PYQAa8rfw0QdPpBcT+Gl3egKD9c+TUgAlF12H3Nmjo= github.com/sigstore/sigstore-go v0.6.2 h1:8uiywjt73vzfrGfWYVwVsiB1E1Qmwmpgr1kVpl4fs6A= github.com/sigstore/sigstore-go v0.6.2/go.mod h1:pOIUH7Jx+ctwMICo+2zNrViOJJN5sGaQgwX4yAVJkA0= github.com/sigstore/sigstore/pkg/signature/kms/aws v1.8.10 h1:e5GfVngPjGap/N3ODefayt7vKIPS1/v3hWLZ9+4MrN4= github.com/sigstore/sigstore/pkg/signature/kms/aws v1.8.10/go.mod h1:HOr3AdFPKdND2FNl/sUD5ZifPl1OMJvrbf9xIaaWcus= github.com/sigstore/sigstore/pkg/signature/kms/azure v1.8.10 h1:9tZEpfIL/ewAG9G87AHe3aVoy8Ujos2F1qLfCckX6jQ= github.com/sigstore/sigstore/pkg/signature/kms/azure v1.8.10/go.mod h1:VnIAcitund62R45ezK/dtUeEhuRtB3LsAgJ8m0H34zc= github.com/sigstore/sigstore/pkg/signature/kms/gcp v1.8.10 h1:Xre51HdjIIaVo5ox5zyL+6h0tkrx7Ke9Neh7fLmmZK0= github.com/sigstore/sigstore/pkg/signature/kms/gcp v1.8.10/go.mod h1:VNfdklQDbyGJog8S7apdxiEfmYmCkKyxrsCL9xprkTY= github.com/sigstore/sigstore/pkg/signature/kms/hashivault v1.8.10 h1:HjfjL3x3dP2kaGqQHVog974cTcKfzFaGjfZyLQ9KXrg= github.com/sigstore/sigstore/pkg/signature/kms/hashivault v1.8.10/go.mod h1:jaeEjkTW1p3gUyPjz9lTcT4TydCs208FoyAwIs6bIT4= github.com/sigstore/timestamp-authority v1.2.3 h1:/4YXCKF/+ZQsad89D0Lj2QIz78s5/aRgLa8Nwq2/Kd4= github.com/sigstore/timestamp-authority v1.2.3/go.mod h1:q2tJKJzP34hLIbVu3Y1A9bBZTBuZ/gEmMN7MtAoGQKI= github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ= github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ= github.com/skeema/knownhosts v1.3.0 h1:AM+y0rI04VksttfwjkSTNQorvGqmwATnvnAHpSgc0LY= github.com/skeema/knownhosts v1.3.0/go.mod h1:sPINvnADmT/qYH1kfv+ePMmOBTH6Tbl7b5LvTDjFK7M= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966 h1:JIAuq3EEf9cgbU6AtGPK4CTG3Zf6CKMNqf0MHTggAUA= github.com/skratchdot/open-golang v0.0.0-20200116055534-eef842397966/go.mod h1:sUM3LWHvSMaG192sy56D9F7CNvL7jUJVXoqM1QKLnog= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.7.1 h1:cuNEagBQEHWN1FnbGEjCXL2szYEXqfJPbP2HNUaca9Y= github.com/spf13/cast v1.7.1/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.19.0 h1:RWq5SEjt8o25SROyN3z2OrDB9l7RPd3lwTWU8EcEdcI= github.com/spf13/viper v1.19.0/go.mod h1:GQUN9bilAbhU/jgc1bKs99f/suXKeUMct8Adx5+Ntkg= github.com/spiffe/go-spiffe/v2 v2.4.0 h1:j/FynG7hi2azrBG5cvjRcnQ4sux/VNj8FAVc99Fl66c= github.com/spiffe/go-spiffe/v2 v2.4.0/go.mod h1:m5qJ1hGzjxjtrkGHZupoXHo/FDWwCB1MdSyBzfHugx0= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw= github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA= github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals= github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU= github.com/stretchr/testify v1.8.2/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4= github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= github.com/subosito/gotenv v1.6.0 h1:9NlTDc1FTs4qu0DDq7AEtTPNw6SVm7uBMsUCUjABIf8= github.com/subosito/gotenv v1.6.0/go.mod h1:Dk4QP5c2W3ibzajGcXpNraDfq2IrhjMIvMSWPKKo0FU= github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d h1:vfofYNRScrDdvS342BElfbETmL1Aiz3i2t0zfRj16Hs= github.com/syndtr/goleveldb v1.0.1-0.20220721030215-126854af5e6d/go.mod h1:RRCYJbIwD5jmqPI9XoAFR0OcDxqUctll6zUj/+B4S48= github.com/tchap/go-patricia/v2 v2.3.1 h1:6rQp39lgIYZ+MHmdEq4xzuk1t7OdC35z/xm0BGhTkes= github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k= github.com/thales-e-security/pool v0.0.2 h1:RAPs4q2EbWsTit6tpzuvTFlgFRJ3S8Evf5gtvVDbmPg= github.com/thales-e-security/pool v0.0.2/go.mod h1:qtpMm2+thHtqhLzTwgDBj/OuNnMpupY8mv0Phz0gjhU= github.com/theupdateframework/go-tuf v0.7.0 h1:CqbQFrWo1ae3/I0UCblSbczevCCbS31Qvs5LdxRWqRI= github.com/theupdateframework/go-tuf v0.7.0/go.mod h1:uEB7WSY+7ZIugK6R1hiBMBjQftaFzn7ZCDJcp1tCUug= github.com/theupdateframework/go-tuf/v2 v2.0.2 h1:PyNnjV9BJNzN1ZE6BcWK+5JbF+if370jjzO84SS+Ebo= github.com/theupdateframework/go-tuf/v2 v2.0.2/go.mod h1:baB22nBHeHBCeuGZcIlctNq4P61PcOdyARlplg5xmLA= github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399 h1:e/5i7d4oYZ+C1wj2THlRK+oAhjeS/TRQwMfkIuet3w0= github.com/titanous/rocacheck v0.0.0-20171023193734-afe73141d399/go.mod h1:LdwHTNJT99C5fTAzDz0ud328OgXz+gierycbcIx2fRs= github.com/tjfoc/gmsm v1.4.1 h1:aMe1GlZb+0bLjn+cKTPEvvn9oUEBlJitaZiiBwsbgho= github.com/tjfoc/gmsm v1.4.1/go.mod h1:j4INPkHWMrhJb38G+J6W4Tw0AbuN8Thu3PbdVYhVcTE= github.com/transparency-dev/merkle v0.0.2 h1:Q9nBoQcZcgPamMkGn7ghV8XiTZ/kRxn1yCG81+twTK4= github.com/transparency-dev/merkle v0.0.2/go.mod h1:pqSy+OXefQ1EDUVmAJ8MUhHB9TXGuzVAT58PqBoHz1A= github.com/vbatts/tar-split v0.11.6 h1:4SjTW5+PU11n6fZenf2IPoV8/tz3AaYHMWjf23envGs= github.com/vbatts/tar-split v0.11.6/go.mod h1:dqKNtesIOr2j2Qv3W/cHjnvk9I8+G7oAkFDFN6TCBEI= github.com/xanzy/go-gitlab v0.109.0 h1:RcRme5w8VpLXTSTTMZdVoQWY37qTJWg+gwdQl4aAttE= github.com/xanzy/go-gitlab v0.109.0/go.mod h1:wKNKh3GkYDMOsGmnfuX+ITCmDuSDWFO0G+C4AygL9RY= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb h1:zGWFAtiMcyryUHoUjUJX0/lt1H2+i2Ka2n+D3DImSNo= github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/yashtewari/glob-intersection v0.2.0 h1:8iuHdN88yYuCzCdjt0gDe+6bAhUwBeEWqThExu54RFg= github.com/yashtewari/glob-intersection v0.2.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok= github.com/ysmood/fetchup v0.2.3 h1:ulX+SonA0Vma5zUFXtv52Kzip/xe7aj4vqT5AJwQ+ZQ= github.com/ysmood/fetchup v0.2.3/go.mod h1:xhibcRKziSvol0H1/pj33dnKrYyI2ebIvz5cOOkYGns= github.com/ysmood/goob v0.4.0 h1:HsxXhyLBeGzWXnqVKtmT9qM7EuVs/XOgkX7T6r1o1AQ= github.com/ysmood/goob v0.4.0/go.mod h1:u6yx7ZhS4Exf2MwciFr6nIM8knHQIE22lFpWHnfql18= github.com/ysmood/got v0.40.0 h1:ZQk1B55zIvS7zflRrkGfPDrPG3d7+JOza1ZkNxcc74Q= github.com/ysmood/got v0.40.0/go.mod h1:W7DdpuX6skL3NszLmAsC5hT7JAhuLZhByVzHTq874Qg= github.com/ysmood/gson v0.7.3 h1:QFkWbTH8MxyUTKPkVWAENJhxqdBa4lYTQWqZCiLG6kE= github.com/ysmood/gson v0.7.3/go.mod h1:3Kzs5zDl21g5F/BlLTNcuAGAYLKt2lV5G8D1zF3RNmg= github.com/ysmood/leakless v0.9.0 h1:qxCG5VirSBvmi3uynXFkcnLMzkphdh3xx5FtrORwDCU= github.com/ysmood/leakless v0.9.0/go.mod h1:R8iAXPRaG97QJwqxs74RdwzcRHT1SWCGTNqY8q0JvMQ= github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= github.com/zalando/go-keyring v0.2.3 h1:v9CUu9phlABObO4LPWycf+zwMG7nlbb3t/B5wa97yms= github.com/zalando/go-keyring v0.2.3/go.mod h1:HL4k+OXQfJUWaMnqyuSOc0drfGPX2b51Du6K+MRgZMk= github.com/zeebo/errs v1.3.0 h1:hmiaKqgYZzcVgRL1Vkc1Mn2914BbzB0IBxs+ebeutGs= github.com/zeebo/errs v1.3.0/go.mod h1:sgbWHsvVuTPHcqJJGQ1WhI5KbWlHYz+2+2C/LSEtCw4= go.mongodb.org/mongo-driver v1.17.1 h1:Wic5cJIwJgSpBhe3lx3+/RybR5PiYRMpVFgO7cOHyIM= go.mongodb.org/mongo-driver v1.17.1/go.mod h1:wwWm/+BuOddhcq3n68LKRmgk2wXzmF6s0SFOa0GINL4= go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0= go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo= go.opentelemetry.io/auto/sdk v1.1.0 h1:cH53jehLUN6UFLY71z+NDOiNJqDdPRaXzTel0sJySYA= go.opentelemetry.io/auto/sdk v1.1.0/go.mod h1:3wSPjt5PWp2RhlCcmmOial7AvC4DQqZb7a7wCow3W8A= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.55.0 h1:hCq2hNMwsegUvPzI7sPOvtO9cqyy5GbWt/Ybp2xrx8Q= go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.55.0/go.mod h1:LqaApwGx/oUmzsbqxkzuBvyoPpkxk3JQWnqfVrJ3wCA= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0 h1:ZIg3ZT/aQ7AfKqdwp7ECpOK6vHqquXXuyTjIO8ZdmPs= go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.55.0/go.mod h1:DQAwmETtZV00skUwgD6+0U89g80NKsJE3DCKeLLPQMI= go.opentelemetry.io/otel v1.33.0 h1:/FerN9bax5LoK51X/sI0SVYrjSE0/yUL7DpxW4K3FWw= go.opentelemetry.io/otel v1.33.0/go.mod h1:SUUkR6csvUQl+yjReHu5uM3EtVV7MBm5FHKRlNx4I8I= go.opentelemetry.io/otel/metric v1.33.0 h1:r+JOocAyeRVXD8lZpjdQjzMadVZp2M4WmQ+5WtEnklQ= go.opentelemetry.io/otel/metric v1.33.0/go.mod h1:L9+Fyctbp6HFTddIxClbQkjtubW6O9QS3Ann/M82u6M= go.opentelemetry.io/otel/sdk v1.31.0 h1:xLY3abVHYZ5HSfOg3l2E5LUj2Cwva5Y7yGxnSW9H5Gk= go.opentelemetry.io/otel/sdk v1.31.0/go.mod h1:TfRbMdhvxIIr/B2N2LQW2S5v9m3gOQ/08KsbbO5BPT0= go.opentelemetry.io/otel/sdk/metric v1.31.0 h1:i9hxxLJF/9kkvfHppyLL55aW7iIJz4JjxTeYusH7zMc= go.opentelemetry.io/otel/sdk/metric v1.31.0/go.mod h1:CRInTMVvNhUKgSAMbKyTMxqOBC0zgyxzW55lZzX43Y8= go.opentelemetry.io/otel/trace v1.33.0 h1:cCJuF7LRjUFso9LPnEAHJDB2pqzp+hbO8eu1qqW2d/s= go.opentelemetry.io/otel/trace v1.33.0/go.mod h1:uIcdVUZMpTAmz0tI1z04GoVSezK37CbGV4fr1f2nBck= go.step.sm/crypto v0.54.2 h1:3LSA5nYDQvcd484OSx7xsS3XDqQ7/WZjVqvq0+a0fWc= go.step.sm/crypto v0.54.2/go.mod h1:1+OjUozd5aA3TkBJfr5Aobd6vNt9F70n1DagcoBh3Pc= go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.uber.org/multierr v1.11.0 h1:blXXJkSxSSfBVBlC76pxqeO+LN3aDfLQo+309xJstO0= go.uber.org/multierr v1.11.0/go.mod h1:20+QtiLqy0Nd6FdQB9TLXag12DsQkrbs3htMFfDN80Y= go.uber.org/zap v1.27.0 h1:aJMhYGrd5QSmlpLMr2MftRKl7t8J8PTZPA732ud/XR8= go.uber.org/zap v1.27.0/go.mod h1:GB2qFLM7cTU87MWRP2mPIjqfIDnGu+VIO4V/SdhGo2E= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190426145343-a29dc8fdc734/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4= golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU= golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc= golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc= golang.org/x/exp v0.0.0-20241217172543-b2144cdd0a67 h1:1UoZQm6f0P/ZO0w1Ri+f+ifG/gXhegadRdwBIXEFWDo= golang.org/x/exp v0.0.0-20241217172543-b2144cdd0a67/go.mod h1:qj5a5QZpwLU2NLQudwIN5koi3beDhSAlJwa67PuM98c= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.22.0 h1:D4nJWe9zXqHOmWqj4VMOJhvzj7bEZg4wEYa759z1pH4= golang.org/x/mod v0.22.0/go.mod h1:6SkKJ3Xj0I0BrPOZoBy3bdMptDDU9oJrpohJ3eWZ1fY= golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A= golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20210428140749-89ef3d95e781/go.mod h1:OJAsFXCWl8Ukc7SiCT/9KSuxbyM7479/AVlXFRxuMCk= golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y= golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220607020251-c690dde0001d/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg= golang.org/x/net v0.34.0 h1:Mb7Mrk043xzHgnRM88suvJFwzVrRfHEHJEl5/71CKw0= golang.org/x/net v0.34.0/go.mod h1:di0qlW3YNM5oh6GqDGQr92MyTozJPmybPK4Ev/Gm31k= golang.org/x/oauth2 v0.25.0 h1:CY4y7XT9v0cRI9oupztF8AgiIu99L/ksR/Xp/6jrZ70= golang.org/x/oauth2 v0.25.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI= golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.10.0 h1:3NQrjDixjgGwUOCaF8w2+VYHv0Ve/vGYSbdkTa98gmQ= golang.org/x/sync v0.10.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20210809222454-d867a43fc93e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20211216021012-1d35b9e2eb4e/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220412211240-33da011f77ad/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/sys v0.29.0 h1:TPYlXGxvx1MGTn2GiZDhnjPA9wZzZeGKHHmKhHYvgaU= golang.org/x/sys v0.29.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo= golang.org/x/term v0.17.0/go.mod h1:lLRBjIVuehSbZlaOtGMbcMncT+aqLLLmKrsjNrUguwk= golang.org/x/term v0.28.0 h1:/Ts8HFuMR2E6IP/jlo7QVLZHggjKQbhu/7H0LJFr3Gg= golang.org/x/term v0.28.0/go.mod h1:Sw/lC2IAUZ92udQNf3WodGtn4k/XoLyZoh8v/8uiwek= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo= golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ= golang.org/x/time v0.8.0 h1:9i3RxcPv3PZnitoVGMPDKZSq1xW1gK1Xy3ArNOGZfEg= golang.org/x/time v0.8.0/go.mod h1:3BpzKBy/shNhVucY/MWOyx10tF3SFh9QdLuxbVysPQM= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220517211312-f3a8303e98df/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8= golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 h1:LLhsEBxRTBLuKlQxFBYUOU8xyFgXv6cOTp2HASDlsDk= golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= google.golang.org/api v0.207.0 h1:Fvt6IGCYjf7YLcQ+GCegeAI2QSQCfIWhRkmrMPj3JRM= google.golang.org/api v0.207.0/go.mod h1:I53S168Yr/PNDNMi5yPnDc0/LGRZO6o7PoEbl/HY3CM= google.golang.org/genproto v0.0.0-20241113202542-65e8d215514f h1:zDoHYmMzMacIdjNe+P2XiTmPsLawi/pCbSPfxt6lTfw= google.golang.org/genproto v0.0.0-20241113202542-65e8d215514f/go.mod h1:Q5m6g8b5KaFFzsQFIGdJkSJDGeJiybVenoYFMMa3ohI= google.golang.org/genproto/googleapis/api v0.0.0-20241223144023-3abc09e42ca8 h1:st3LcW/BPi75W4q1jJTEor/QWwbNlPlDG0JTn6XhZu0= google.golang.org/genproto/googleapis/api v0.0.0-20241223144023-3abc09e42ca8/go.mod h1:klhJGKFyG8Tn50enBn7gizg4nXGXJ+jqEREdCWaPcV4= google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8 h1:TqExAhdPaB60Ux47Cn0oLV07rGnxZzIsaRhQaqS666A= google.golang.org/genproto/googleapis/rpc v0.0.0-20241223144023-3abc09e42ca8/go.mod h1:lcTa1sDdWEIHMWlITnIczmw5w60CF9ffkb8Z+DVmmjA= google.golang.org/grpc v1.69.2 h1:U3S9QEtbXC0bYNvRtcoklF3xGtLViumSYxWykJS+7AU= google.golang.org/grpc v1.69.2/go.mod h1:vyjdE6jLBI76dgpDojsFGNaHlxdjXN9ghpnd2o7JGZ4= google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8= google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0= google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM= google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE= google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo= google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys= gopkg.in/inf.v0 v0.9.1 h1:73M5CoZyi3ZLMOyDlQh031Cx6N9NDJ2Vvfl76EDAgDc= gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ= gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw= gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= gotest.tools/v3 v3.5.1 h1:EENdUnS3pdur5nybKYIh2Vfgc8IUNBjxDPSjtiJcOzU= gotest.tools/v3 v3.5.1/go.mod h1:isy3WKz7GK6uNw/sbHzfKBLvlvXwUyV06n6brMxxopU= k8s.io/api v0.30.2 h1:+ZhRj+28QT4UOH+BKznu4CBgPWgkXO7XAvMcMl0qKvI= k8s.io/api v0.30.2/go.mod h1:ULg5g9JvOev2dG0u2hig4Z7tQ2hHIuS+m8MNZ+X6EmI= k8s.io/apimachinery v0.30.2 h1:fEMcnBj6qkzzPGSVsAZtQThU62SmQ4ZymlXRC5yFSCg= k8s.io/apimachinery v0.30.2/go.mod h1:iexa2somDaxdnj7bha06bhb43Zpa6eWH8N8dbqVjTUc= k8s.io/client-go v0.30.2 h1:sBIVJdojUNPDU/jObC+18tXWcTJVcwyqS9diGdWHk50= k8s.io/client-go v0.30.2/go.mod h1:JglKSWULm9xlJLx4KCkfLLQ7XwtlbflV6uFFSHTMgVs= k8s.io/klog/v2 v2.130.1 h1:n9Xl7H1Xvksem4KFG4PYbdQCQxqc/tTUyrgXaOhHSzk= k8s.io/klog/v2 v2.130.1/go.mod h1:3Jpz1GvMt720eyJH1ckRHK1EDfpxISzJ7I9OYgaDtPE= k8s.io/kube-openapi v0.0.0-20240521193020-835d969ad83a h1:zD1uj3Jf+mD4zmA7W+goE5TxDkI7OGJjBNBzq5fJtLA= k8s.io/kube-openapi v0.0.0-20240521193020-835d969ad83a/go.mod h1:UxDHUPsUwTOOxSU+oXURfFBcAS6JwiRXTYqYwfuGowc= k8s.io/utils v0.0.0-20240502163921-fe8a2dddb1d0 h1:jgGTlFYnhF1PM1Ax/lAlxUPE+KfCIXHaathvJg1C3ak= k8s.io/utils v0.0.0-20240502163921-fe8a2dddb1d0/go.mod h1:OLgZIPagt7ERELqWJFomSt595RzquPNLL48iOWgYOg0= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd h1:EDPBXCAspyGV4jQlpZSudPeMmr1bNJefnuqLsRAsHZo= sigs.k8s.io/json v0.0.0-20221116044647-bc3834ca7abd/go.mod h1:B8JuhiUyNFVKdsE8h686QcCxMaH6HrOAZj4vswFpcB0= sigs.k8s.io/release-utils v0.8.5 h1:FUtFqEAN621gSXv0L7kHyWruBeS7TUU9aWf76olX7uQ= sigs.k8s.io/release-utils v0.8.5/go.mod h1:qsm5bdxdgoHkD8HsXpgme2/c3mdsNaiV53Sz2HmKeJA= sigs.k8s.io/structured-merge-diff/v4 v4.4.1 h1:150L+0vs/8DA78h1u02ooW1/fFq/Lwr+sGiqlzvrtq4= sigs.k8s.io/structured-merge-diff/v4 v4.4.1/go.mod h1:N8hJocpFajUSSeSJ9bOZ77VzejKZaXsTtZo4/u7Io08= sigs.k8s.io/yaml v1.4.0 h1:Mk1wCc2gy/F0THH0TAp1QYyJNzRm2KCLy3o5ASXVI5E= sigs.k8s.io/yaml v1.4.0/go.mod h1:Ejl7/uTz7PSA4eKMyQCUTnhZYNmLIl+5c2lQPGR2BPY= software.sslmate.com/src/go-pkcs12 v0.4.0 h1:H2g08FrTvSFKUj+D309j1DPfk5APnIdAQAB8aEykJ5k= software.sslmate.com/src/go-pkcs12 v0.4.0/go.mod h1:Qiz0EyvDRJjjxGyUQa2cCNZn/wMyzrRJ/qcDXOQazLI= gittuf-0.9.0/internal/000077500000000000000000000000001475150141000146345ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/000077500000000000000000000000001475150141000173565ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/attestations.go000066400000000000000000000173761475150141000224450ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestations import ( "encoding/json" "errors" "fmt" "path" "strings" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" ) const ( Ref = "refs/gittuf/attestations" referenceAuthorizationsTreeEntryName = "reference-authorizations" githubPullRequestAttestationsTreeEntryName = "github-pull-requests" githubPullRequestApprovalSystemName = "github" codeReviewApprovalAttestationsTreeEntryName = "code-review-approvals" codeReviewApprovalIndexTreeEntryName = "review-index.json" initialCommitMessage = "Initial commit" defaultCommitMessage = "Update attestations" ) var ErrAttestationsNotFound = errors.New("attestations not found") // Attestations tracks all the attestations in a gittuf repository. type Attestations struct { // referenceAuthorizations maps each authorized action to the blob ID of the // attestation. The key is a path of the form // `/-`, where `ref-path` is the absolute ref path // such as `refs/heads/main` and `from-id` and `to-id` determine how the ref // in question moved. For example, the key // `refs/heads/main/-` indicates the authorization is // for the action of moving `refs/heads/main` from `commit-A` to a commit // with `tree-B`. referenceAuthorizations map[string]gitinterface.Hash // githubPullRequestAttestations maps information about the GitHub pull // request for a commit and branch. The key is a path of the form // `/`, where `ref-path` is the absolute ref path, and // `commit-id` is the ID of the merged commit. githubPullRequestAttestations map[string]gitinterface.Hash // codeReviewApprovalAttestations stores the blob ID of a code review // approval attestation generated by or on behalf of a system like GitHub or // Gerrit for the change it applies to. The key is a path of the form // `/-/`, where `ref-path` is the absolute // ref path such as `refs/heads/main` and `from-id` and `to-id` determine // how the ref in question moved. `` identifies the code review // system in question. For example, the key // `refs/heads/main/-/github` indicates the approved // change updated `refs/heads/main` from `commit-A` to a commit with // `tree-B`, and that the approval originated on GitHub. codeReviewApprovalAttestations map[string]gitinterface.Hash // codeReviewApprovalIndex is stored in-memory. It maps a code review // system's approval ID to the gittuf identifier for a review, // `/-/`. We need this because when a // review is dismissed, we need to unambiguously know what the review // applied to when it was first submitted, which we cannot do with the // information at the time of dismissal. This is serialized to the // attestations namespace as a special blob in the // codeReviewApprovalAttestations tree. codeReviewApprovalIndex map[string]string } // LoadCurrentAttestations inspects the repository's attestations namespace and // loads the current attestations. func LoadCurrentAttestations(repo *gitinterface.Repository) (*Attestations, error) { entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo, rsl.ForReference(Ref)) if err != nil { if !errors.Is(err, rsl.ErrRSLEntryNotFound) { return nil, err } return &Attestations{}, nil } return LoadAttestationsForEntry(repo, entry) } // LoadAttestationsForEntry loads the repository's attestations for a particular // RSL entry for the attestations namespace. func LoadAttestationsForEntry(repo *gitinterface.Repository, entry rsl.ReferenceUpdaterEntry) (*Attestations, error) { if entry.GetRefName() != Ref { return nil, rsl.ErrRSLEntryDoesNotMatchRef } attestationsRootTreeID, err := repo.GetCommitTreeID(entry.GetTargetID()) if err != nil { return nil, err } treeContents, err := repo.GetAllFilesInTree(attestationsRootTreeID) if err != nil { return nil, err } if len(treeContents) == 0 { // This happens in the initial commit for the attestations namespace, // where there are no entries in the tree yet. // This is expected, and there is nothing more to check so return a zero Attestations state. return &Attestations{}, nil } attestations := &Attestations{ referenceAuthorizations: map[string]gitinterface.Hash{}, githubPullRequestAttestations: map[string]gitinterface.Hash{}, codeReviewApprovalAttestations: map[string]gitinterface.Hash{}, codeReviewApprovalIndex: map[string]string{}, } for name, blobID := range treeContents { switch { case strings.HasPrefix(name, referenceAuthorizationsTreeEntryName+"/"): attestations.referenceAuthorizations[strings.TrimPrefix(name, referenceAuthorizationsTreeEntryName+"/")] = blobID case strings.HasPrefix(name, githubPullRequestAttestationsTreeEntryName+"/"): attestations.githubPullRequestAttestations[strings.TrimPrefix(name, githubPullRequestAttestationsTreeEntryName+"/")] = blobID case strings.HasPrefix(name, codeReviewApprovalAttestationsTreeEntryName+"/"): attestations.codeReviewApprovalAttestations[strings.TrimPrefix(name, codeReviewApprovalAttestationsTreeEntryName+"/")] = blobID } } if blobID, has := attestations.codeReviewApprovalAttestations[codeReviewApprovalIndexTreeEntryName]; has { // Load the approval index that maps review IDs to the gittuf way of // mapping the review to a change in the repository indexContents, err := repo.ReadBlob(blobID) if err != nil { return nil, err } if err := json.Unmarshal(indexContents, &attestations.codeReviewApprovalIndex); err != nil { return nil, fmt.Errorf("unable to read current code review approval index: %w", err) } } return attestations, nil } // Commit writes the state of the attestations to the repository, creating a new // commit with the changes made. An RSL entry is also recorded for the // namespace. func (a *Attestations) Commit(repo *gitinterface.Repository, commitMessage string, signCommit bool) error { if len(commitMessage) == 0 { commitMessage = defaultCommitMessage } if len(a.codeReviewApprovalIndex) != 0 { // Create a JSON blob for the approval index indexContents, err := json.Marshal(&a.codeReviewApprovalIndex) if err != nil { return err } indexBlobID, err := repo.WriteBlob(indexContents) if err != nil { return err } a.codeReviewApprovalAttestations[codeReviewApprovalIndexTreeEntryName] = indexBlobID } treeBuilder := gitinterface.NewTreeBuilder(repo) allAttestations := []gitinterface.TreeEntry{} for name, blobID := range a.referenceAuthorizations { allAttestations = append(allAttestations, gitinterface.NewEntryBlob(path.Join(referenceAuthorizationsTreeEntryName, name), blobID)) } for name, blobID := range a.githubPullRequestAttestations { allAttestations = append(allAttestations, gitinterface.NewEntryBlob(path.Join(githubPullRequestAttestationsTreeEntryName, name), blobID)) } for name, blobID := range a.codeReviewApprovalAttestations { allAttestations = append(allAttestations, gitinterface.NewEntryBlob(path.Join(codeReviewApprovalAttestationsTreeEntryName, name), blobID)) } attestationsTreeID, err := treeBuilder.WriteTreeFromEntries(allAttestations) if err != nil { return err } priorCommitID, err := repo.GetReference(Ref) if err != nil { if !errors.Is(err, gitinterface.ErrReferenceNotFound) { return err } } newCommitID, err := repo.Commit(attestationsTreeID, Ref, commitMessage, signCommit) if err != nil { return err } // We must reset to original attestation commit if err != nil from here onwards. if err := rsl.NewReferenceEntry(Ref, newCommitID).Commit(repo, signCommit); err != nil { if !priorCommitID.IsZero() { return repo.ResetDueToError(err, Ref, priorCommitID) } return err } return nil } gittuf-0.9.0/internal/attestations/attestations_test.go000066400000000000000000000116451475150141000234750ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestations import ( "encoding/json" "path" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/stretchr/testify/assert" ) func TestLoadCurrentAttestations(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() testAttestation, err := NewReferenceAuthorizationForCommit(testRef, testID, testID) if err != nil { t.Fatal(err) } testEnv, err := dsse.CreateEnvelope(testAttestation) if err != nil { t.Fatal(err) } testEnvBytes, err := json.Marshal(testEnv) if err != nil { t.Fatal(err) } t.Run("no RSL entry", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations, err := LoadCurrentAttestations(repo) assert.Nil(t, err) assert.Empty(t, attestations.referenceAuthorizations) }) t.Run("with RSL entry and with an attestation", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) blobID, err := repo.WriteBlob(testEnvBytes) if err != nil { t.Fatal(err) } authorizations := map[string]gitinterface.Hash{ReferenceAuthorizationPath(testRef, testID, testID): blobID} attestations := &Attestations{referenceAuthorizations: authorizations} if err := attestations.Commit(repo, "Test commit", false); err != nil { t.Fatal(err) } attestations, err = LoadCurrentAttestations(repo) assert.Nil(t, err) assert.Equal(t, authorizations, attestations.referenceAuthorizations) }) } func TestLoadAttestationsForEntry(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() testAttestation, err := NewReferenceAuthorizationForCommit(testRef, testID, testID) if err != nil { t.Fatal(err) } testEnv, err := dsse.CreateEnvelope(testAttestation) if err != nil { t.Fatal(err) } testEnvBytes, err := json.Marshal(testEnv) if err != nil { t.Fatal(err) } t.Run("with RSL entry and no an attestation", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) authorizations := map[string]gitinterface.Hash{} attestations := &Attestations{referenceAuthorizations: authorizations} if err := attestations.Commit(repo, "Test commit", false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } attestations, err = LoadAttestationsForEntry(repo, entry.(*rsl.ReferenceEntry)) assert.Nil(t, err) assert.Empty(t, attestations.referenceAuthorizations) }) t.Run("with RSL entry and with an attestation", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) blobID, err := repo.WriteBlob(testEnvBytes) if err != nil { t.Fatal(err) } authorizations := map[string]gitinterface.Hash{ReferenceAuthorizationPath(testRef, testID, testID): blobID} attestations := &Attestations{referenceAuthorizations: authorizations} if err := attestations.Commit(repo, "Test commit", false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } attestations, err = LoadAttestationsForEntry(repo, entry.(*rsl.ReferenceEntry)) assert.Nil(t, err) assert.Equal(t, authorizations, attestations.referenceAuthorizations) }) } func TestAttestationsCommit(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() testAttestation, err := NewReferenceAuthorizationForCommit(testRef, testID, testID) if err != nil { t.Fatal(err) } testEnv, err := dsse.CreateEnvelope(testAttestation) if err != nil { t.Fatal(err) } testEnvBytes, err := json.Marshal(testEnv) if err != nil { t.Fatal(err) } tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) blobID, err := repo.WriteBlob(testEnvBytes) if err != nil { t.Fatal(err) } authorizations := map[string]gitinterface.Hash{ReferenceAuthorizationPath(testRef, testID, testID): blobID} attestations := &Attestations{referenceAuthorizations: authorizations} treeBuilder := gitinterface.NewTreeBuilder(repo) expectedTreeID, err := treeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{gitinterface.NewEntryBlob(path.Join(referenceAuthorizationsTreeEntryName, ReferenceAuthorizationPath(testRef, testID, testID)), blobID)}) if err != nil { t.Fatal(err) } if err := attestations.Commit(repo, "Test commit", false); err != nil { t.Error(err) } currentTip, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } currentTreeID, err := repo.GetCommitTreeID(currentTip) if err != nil { t.Fatal(err) } assert.Equal(t, expectedTreeID, currentTreeID) attestations, err = LoadCurrentAttestations(repo) assert.Nil(t, err) assert.Equal(t, attestations.referenceAuthorizations, authorizations) } gittuf-0.9.0/internal/attestations/authorization.go000066400000000000000000000122321475150141000226050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestations import ( "encoding/json" "fmt" "path" "github.com/gittuf/gittuf/internal/attestations/authorizations" authorizationsv01 "github.com/gittuf/gittuf/internal/attestations/authorizations/v01" authorizationsv02 "github.com/gittuf/gittuf/internal/attestations/authorizations/v02" "github.com/gittuf/gittuf/internal/gitinterface" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" ) // NewReferenceAuthorizationForCommit creates a new reference authorization for // the provided information. The authorization is embedded in an in-toto // "statement" and returned with the appropriate "predicate type" set. The // `fromID` and `toID` specify the change to `targetRef` that is to be // authorized by invoking this function. Since this is for a commit, the `toID` // is expected to be a Git tree ID. func NewReferenceAuthorizationForCommit(targetRef, fromID, toID string) (*ita.Statement, error) { return authorizationsv02.NewReferenceAuthorizationForCommit(targetRef, fromID, toID) } // NewReferenceAuthorizationForTag creates a new reference authorization for the // provided information. The authorization is embedded in an in-toto "statement" // and returned with the appropriate "predicate type" set. The `fromID` and // `toID` specify the change to `targetRef` that is to be authorized by invoking // this function. Since this is for a tag, the `toID` is expected to be a Git // commit ID. func NewReferenceAuthorizationForTag(targetRef, fromID, toID string) (*ita.Statement, error) { return authorizationsv02.NewReferenceAuthorizationForTag(targetRef, fromID, toID) } // SetReferenceAuthorization writes the new reference authorization attestation // to the object store and tracks it in the current attestations state. func (a *Attestations) SetReferenceAuthorization(repo *gitinterface.Repository, env *sslibdsse.Envelope, refName, fromID, toID string) error { payloadBytes, err := env.DecodeB64Payload() if err != nil { return fmt.Errorf("unable to inspect reference authorization: %w", err) } inspectAuthorization := map[string]any{} if err := json.Unmarshal(payloadBytes, &inspectAuthorization); err != nil { return fmt.Errorf("unable to inspect reference authorization: %w", err) } switch inspectAuthorization["predicate_type"] { case authorizationsv01.PredicateType: if err := authorizationsv01.Validate(env, refName, fromID, toID); err != nil { return err } case authorizationsv02.PredicateType: if err := authorizationsv02.Validate(env, refName, fromID, toID); err != nil { return err } default: return authorizations.ErrUnknownAuthorizationVersion } envBytes, err := json.Marshal(env) if err != nil { return err } blobID, err := repo.WriteBlob(envBytes) if err != nil { return err } if a.referenceAuthorizations == nil { a.referenceAuthorizations = map[string]gitinterface.Hash{} } a.referenceAuthorizations[ReferenceAuthorizationPath(refName, fromID, toID)] = blobID return nil } // RemoveReferenceAuthorization removes a set reference authorization // attestation entirely. The object, however, isn't removed from the object // store as prior states may still need it. func (a *Attestations) RemoveReferenceAuthorization(refName, fromID, toID string) error { authPath := ReferenceAuthorizationPath(refName, fromID, toID) if _, has := a.referenceAuthorizations[authPath]; !has { return authorizations.ErrAuthorizationNotFound } delete(a.referenceAuthorizations, authPath) return nil } // GetReferenceAuthorizationFor returns the requested reference authorization // attestation (with its signatures). func (a *Attestations) GetReferenceAuthorizationFor(repo *gitinterface.Repository, refName, fromID, toID string) (*sslibdsse.Envelope, error) { blobID, has := a.referenceAuthorizations[ReferenceAuthorizationPath(refName, fromID, toID)] if !has { return nil, authorizations.ErrAuthorizationNotFound } envBytes, err := repo.ReadBlob(blobID) if err != nil { return nil, err } env := &sslibdsse.Envelope{} if err := json.Unmarshal(envBytes, env); err != nil { return nil, err } payloadBytes, err := env.DecodeB64Payload() if err != nil { return nil, fmt.Errorf("unable to inspect reference authorization: %w", err) } inspectAuthorization := map[string]any{} if err := json.Unmarshal(payloadBytes, &inspectAuthorization); err != nil { return nil, fmt.Errorf("unable to inspect reference authorization: %w", err) } switch inspectAuthorization["predicate_type"] { case authorizationsv01.PredicateType: if err := authorizationsv01.Validate(env, refName, fromID, toID); err != nil { return nil, err } case authorizationsv02.PredicateType: if err := authorizationsv02.Validate(env, refName, fromID, toID); err != nil { return nil, err } default: return nil, authorizations.ErrUnknownAuthorizationVersion } return env, nil } // ReferenceAuthorizationPath constructs the expected path on-disk for the // reference authorization attestation. func ReferenceAuthorizationPath(refName, fromID, toID string) string { return path.Join(refName, fmt.Sprintf("%s-%s", fromID, toID)) } gittuf-0.9.0/internal/attestations/authorization_test.go000066400000000000000000000163731475150141000236560ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestations import ( "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" "github.com/stretchr/testify/assert" ) func TestSetReferenceAuthorization(t *testing.T) { t.Run("for commit", func(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() mainZeroZero := createReferenceAuthorizationAttestationEnvelopes(t, testRef, testID, testID, false) featureZeroZero := createReferenceAuthorizationAttestationEnvelopes(t, testAnotherRef, testID, testID, false) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations := &Attestations{} // Add auth for first branch err := attestations.SetReferenceAuthorization(repo, mainZeroZero, testRef, testID, testID) assert.Nil(t, err) assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testRef, testID, testID)) assert.NotContains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testAnotherRef, testID, testID)) // Add auth for the other branch err = attestations.SetReferenceAuthorization(repo, featureZeroZero, testAnotherRef, testID, testID) assert.Nil(t, err) assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testRef, testID, testID)) assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testAnotherRef, testID, testID)) }) t.Run("for tag", func(t *testing.T) { tagRef := "refs/tags/v1" testID := gitinterface.ZeroHash.String() tagApproval := createReferenceAuthorizationAttestationEnvelopes(t, tagRef, testID, testID, true) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations := &Attestations{} err := attestations.SetReferenceAuthorization(repo, tagApproval, tagRef, testID, testID) assert.Nil(t, err) assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(tagRef, testID, testID)) }) } func TestRemoveReferenceAuthorization(t *testing.T) { t.Run("for commit", func(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() mainZeroZero := createReferenceAuthorizationAttestationEnvelopes(t, testRef, testID, testID, false) featureZeroZero := createReferenceAuthorizationAttestationEnvelopes(t, testAnotherRef, testID, testID, false) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations := &Attestations{} err := attestations.SetReferenceAuthorization(repo, mainZeroZero, testRef, testID, testID) if err != nil { t.Fatal(err) } assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testRef, testID, testID)) assert.NotContains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testAnotherRef, testID, testID)) err = attestations.SetReferenceAuthorization(repo, featureZeroZero, testAnotherRef, testID, testID) if err != nil { t.Fatal(err) } assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testRef, testID, testID)) assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testAnotherRef, testID, testID)) err = attestations.RemoveReferenceAuthorization(testAnotherRef, testID, testID) assert.Nil(t, err) assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testRef, testID, testID)) assert.NotContains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testAnotherRef, testID, testID)) err = attestations.RemoveReferenceAuthorization(testRef, testID, testID) assert.Nil(t, err) assert.NotContains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testRef, testID, testID)) assert.NotContains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(testAnotherRef, testID, testID)) }) t.Run("for tag", func(t *testing.T) { tagRef := "refs/tags/v1" testID := gitinterface.ZeroHash.String() tagApproval := createReferenceAuthorizationAttestationEnvelopes(t, tagRef, testID, testID, true) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations := &Attestations{} err := attestations.SetReferenceAuthorization(repo, tagApproval, tagRef, testID, testID) if err != nil { t.Fatal(err) } assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(tagRef, testID, testID)) err = attestations.RemoveReferenceAuthorization(tagRef, testID, testID) assert.Nil(t, err) assert.Empty(t, attestations.referenceAuthorizations) }) } func TestGetReferenceAuthorizationFor(t *testing.T) { t.Run("for commit", func(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() mainZeroZero := createReferenceAuthorizationAttestationEnvelopes(t, testRef, testID, testID, false) featureZeroZero := createReferenceAuthorizationAttestationEnvelopes(t, testAnotherRef, testID, testID, false) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations := &Attestations{} err := attestations.SetReferenceAuthorization(repo, mainZeroZero, testRef, testID, testID) if err != nil { t.Fatal(err) } err = attestations.SetReferenceAuthorization(repo, featureZeroZero, testAnotherRef, testID, testID) if err != nil { t.Fatal(err) } mainAuth, err := attestations.GetReferenceAuthorizationFor(repo, testRef, testID, testID) assert.Nil(t, err) assert.Equal(t, mainZeroZero, mainAuth) featureAuth, err := attestations.GetReferenceAuthorizationFor(repo, testAnotherRef, testID, testID) assert.Nil(t, err) assert.Equal(t, featureZeroZero, featureAuth) }) t.Run("for tag", func(t *testing.T) { tagRef := "refs/tags/v1" testID := gitinterface.ZeroHash.String() tagApproval := createReferenceAuthorizationAttestationEnvelopes(t, tagRef, testID, testID, true) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) attestations := &Attestations{} err := attestations.SetReferenceAuthorization(repo, tagApproval, tagRef, testID, testID) if err != nil { t.Fatal(err) } assert.Contains(t, attestations.referenceAuthorizations, ReferenceAuthorizationPath(tagRef, testID, testID)) tagApprovalFetched, err := attestations.GetReferenceAuthorizationFor(repo, tagRef, testID, testID) assert.Nil(t, err) assert.Equal(t, tagApproval, tagApprovalFetched) }) } func createReferenceAuthorizationAttestationEnvelopes(t *testing.T, refName, fromID, toID string, tag bool) *sslibdsse.Envelope { t.Helper() var ( authorization *ita.Statement err error ) if tag { authorization, err = NewReferenceAuthorizationForTag(refName, fromID, toID) } else { authorization, err = NewReferenceAuthorizationForCommit(refName, fromID, toID) } if err != nil { t.Fatal(err) } env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } return env } gittuf-0.9.0/internal/attestations/authorizations/000077500000000000000000000000001475150141000224415ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/authorizations/authorizations.go000066400000000000000000000016771475150141000260660ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package authorizations import "errors" var ( ErrInvalidAuthorization = errors.New("authorization attestation does not match expected details") ErrAuthorizationNotFound = errors.New("requested authorization not found") ErrUnknownAuthorizationVersion = errors.New("unknown reference authorization version") ) // ReferenceAuthorization represents an attestation that approves a change to a // reference. type ReferenceAuthorization interface { // GetRef returns the reference for the change approved by the attestation. GetRef() string // GetFromID returns the Git ID of the reference prior to the change. GetFromID() string // GetTargetID returns the Git ID of the reference after the change is // applied. Note that this is typically something that can be pre-computed, // such as the Git tree ID for a merge that has not happened yet. GetTargetID() string } gittuf-0.9.0/internal/attestations/authorizations/v01/000077500000000000000000000000001475150141000230475ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/authorizations/v01/v01.go000066400000000000000000000056531475150141000240150ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "encoding/json" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/attestations/common" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" ) const ( PredicateType = "https://gittuf.dev/reference-authorization/v0.1" digestGitTreeKey = "gitTree" targetRefKey = "targetRef" fromRevisionIDKey = "fromRevisionID" targetTreeIDKey = "targetTreeID" ) // ReferenceAuthorization is a lightweight record of a detached authorization in // a gittuf repository. It is meant to be used as a "predicate" in an in-toto // attestation. type ReferenceAuthorization struct { TargetRef string `json:"targetRef"` FromRevisionID string `json:"fromRevisionID"` TargetTreeID string `json:"targetTreeID"` } func (r *ReferenceAuthorization) GetRef() string { return r.TargetRef } func (r *ReferenceAuthorization) GetFromID() string { return r.FromRevisionID } func (r *ReferenceAuthorization) GetTargetID() string { return r.TargetTreeID } // NewReferenceAuthorization creates a new reference authorization for the // provided information. The authorization is embedded in an in-toto "statement" // and returned with the appropriate "predicate type" set. The `fromRevisionID` // and `targetTreeID` specify the change to `targetRef` that is to be authorized // by invoking this function. func NewReferenceAuthorization(targetRef, fromRevisionID, targetTreeID string) (*ita.Statement, error) { predicate := &ReferenceAuthorization{ TargetRef: targetRef, FromRevisionID: fromRevisionID, TargetTreeID: targetTreeID, } predicateStruct, err := common.PredicateToPBStruct(predicate) if err != nil { return nil, err } return &ita.Statement{ Type: ita.StatementTypeUri, Subject: []*ita.ResourceDescriptor{ { Digest: map[string]string{digestGitTreeKey: targetTreeID}, }, }, PredicateType: PredicateType, Predicate: predicateStruct, }, nil } // Validate checks that the returned envelope contains the expected in-toto // attestation and predicate contents. func Validate(env *sslibdsse.Envelope, targetRef, fromRevisionID, targetTreeID string) error { payload, err := env.DecodeB64Payload() if err != nil { return err } attestation := &ita.Statement{} if err := json.Unmarshal(payload, attestation); err != nil { return err } if attestation.Subject[0].Digest[digestGitTreeKey] != targetTreeID { return authorizations.ErrInvalidAuthorization } predicate := attestation.Predicate.AsMap() if predicate[targetTreeIDKey] != targetTreeID { return authorizations.ErrInvalidAuthorization } if predicate[fromRevisionIDKey] != fromRevisionID { return authorizations.ErrInvalidAuthorization } if predicate[targetRefKey] != targetRef { return authorizations.ErrInvalidAuthorization } return nil } gittuf-0.9.0/internal/attestations/authorizations/v01/v01_test.go000066400000000000000000000042071475150141000250460ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "testing" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" "github.com/stretchr/testify/assert" ) func TestNewReferenceAuthorization(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() authorization, err := NewReferenceAuthorization(testRef, testID, testID) assert.Nil(t, err) // Check value of statement type assert.Equal(t, ita.StatementTypeUri, authorization.Type) // Check subject contents assert.Equal(t, 1, len(authorization.Subject)) assert.Contains(t, authorization.Subject[0].Digest, digestGitTreeKey) assert.Equal(t, authorization.Subject[0].Digest[digestGitTreeKey], testID) // Check predicate type assert.Equal(t, PredicateType, authorization.PredicateType) // Check predicate predicate := authorization.Predicate.AsMap() assert.Equal(t, predicate[targetRefKey], testRef) assert.Equal(t, predicate[targetTreeIDKey], testID) assert.Equal(t, predicate[fromRevisionIDKey], testID) } func TestValidate(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() mainZeroZero := createTestEnvelope(t, testRef, testID, testID) featureZeroZero := createTestEnvelope(t, testAnotherRef, testID, testID) err := Validate(mainZeroZero, testRef, testID, testID) assert.Nil(t, err) err = Validate(featureZeroZero, testAnotherRef, testID, testID) assert.Nil(t, err) err = Validate(mainZeroZero, testAnotherRef, testID, testID) assert.ErrorIs(t, err, authorizations.ErrInvalidAuthorization) } func createTestEnvelope(t *testing.T, refName, fromID, toID string) *sslibdsse.Envelope { t.Helper() authorization, err := NewReferenceAuthorization(refName, fromID, toID) if err != nil { t.Fatal(err) } env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } return env } gittuf-0.9.0/internal/attestations/authorizations/v02/000077500000000000000000000000001475150141000230505ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/authorizations/v02/v02.go000066400000000000000000000107511475150141000240120ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "encoding/json" "strings" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/attestations/common" "github.com/gittuf/gittuf/internal/gitinterface" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" "google.golang.org/protobuf/types/known/structpb" ) const ( PredicateType = "https://gittuf.dev/reference-authorization/v0.2" digestGitTreeKey = "gitTree" digestGitCommitKey = "gitCommit" targetRefKey = "targetRef" fromIDKey = "fromID" targetIDKey = "targetID" ) // ReferenceAuthorization is a lightweight record of a detached authorization in // a gittuf repository. It is meant to be used as a "predicate" in an in-toto // attestation. type ReferenceAuthorization struct { TargetRef string `json:"targetRef"` FromID string `json:"fromID"` TargetID string `json:"targetID"` } func (r *ReferenceAuthorization) GetRef() string { return r.TargetRef } func (r *ReferenceAuthorization) GetFromID() string { return r.FromID } func (r *ReferenceAuthorization) GetTargetID() string { return r.TargetID } // NewReferenceAuthorizationForCommit creates a new reference authorization for // the provided information. The authorization is embedded in an in-toto // "statement" and returned with the appropriate "predicate type" set. The // `fromID` and `targetID` specify the change to `targetRef` that is to be // authorized by invoking this function. The targetID is expected to be the Git // tree ID of the resultant commit. func NewReferenceAuthorizationForCommit(targetRef, fromID, targetID string) (*ita.Statement, error) { predicateStruct, err := newReferenceAuthorizationStruct(targetRef, fromID, targetID) if err != nil { return nil, err } return &ita.Statement{ Type: ita.StatementTypeUri, Subject: []*ita.ResourceDescriptor{ { Digest: map[string]string{digestGitTreeKey: targetID}, }, }, PredicateType: PredicateType, Predicate: predicateStruct, }, nil } // NewReferenceAuthorizationForTag creates a new reference authorization for the // provided information. The authorization is embedded in an in-toto "statement" // and returned with the appropriate "predicate type" set. The `fromID` and // `targetID` specify the change to `targetRef` that is to be authorized by // invoking this function. The targetID is expected to be the ID of the commit // the tag will point to. func NewReferenceAuthorizationForTag(targetRef, fromID, targetID string) (*ita.Statement, error) { predicateStruct, err := newReferenceAuthorizationStruct(targetRef, fromID, targetID) if err != nil { return nil, err } return &ita.Statement{ Type: ita.StatementTypeUri, Subject: []*ita.ResourceDescriptor{ { Digest: map[string]string{digestGitCommitKey: targetID}, }, }, PredicateType: PredicateType, Predicate: predicateStruct, }, nil } // Validate checks that the returned envelope contains the expected in-toto // attestation and predicate contents. func Validate(env *sslibdsse.Envelope, targetRef, fromID, targetID string) error { payload, err := env.DecodeB64Payload() if err != nil { return err } attestation := &ita.Statement{} if err := json.Unmarshal(payload, attestation); err != nil { return err } subjectDigest, hasGitTree := attestation.Subject[0].Digest[digestGitTreeKey] if hasGitTree { if subjectDigest != targetID { return authorizations.ErrInvalidAuthorization } } else { subjectDigest, hasGitCommit := attestation.Subject[0].Digest[digestGitCommitKey] if !hasGitCommit { return authorizations.ErrInvalidAuthorization } if subjectDigest != targetID { return authorizations.ErrInvalidAuthorization } if !strings.HasPrefix(targetRef, gitinterface.TagRefPrefix) { return authorizations.ErrInvalidAuthorization } } predicate := attestation.Predicate.AsMap() if predicate[targetIDKey] != targetID { return authorizations.ErrInvalidAuthorization } if predicate[fromIDKey] != fromID { return authorizations.ErrInvalidAuthorization } if predicate[targetRefKey] != targetRef { return authorizations.ErrInvalidAuthorization } return nil } func newReferenceAuthorizationStruct(targetRef, fromID, targetID string) (*structpb.Struct, error) { predicate := &ReferenceAuthorization{ TargetRef: targetRef, FromID: fromID, TargetID: targetID, } return common.PredicateToPBStruct(predicate) } gittuf-0.9.0/internal/attestations/authorizations/v02/v02_test.go000066400000000000000000000107551475150141000250550ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "testing" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" "github.com/stretchr/testify/assert" ) func TestNewReferenceAuthorization(t *testing.T) { t.Run("for commit", func(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() authorization, err := NewReferenceAuthorizationForCommit(testRef, testID, testID) assert.Nil(t, err) // Check value of statement type assert.Equal(t, ita.StatementTypeUri, authorization.Type) // Check subject contents assert.Equal(t, 1, len(authorization.Subject)) assert.Contains(t, authorization.Subject[0].Digest, digestGitTreeKey) assert.Equal(t, authorization.Subject[0].Digest[digestGitTreeKey], testID) // Check predicate type assert.Equal(t, PredicateType, authorization.PredicateType) // Check predicate predicate := authorization.Predicate.AsMap() assert.Equal(t, predicate[targetRefKey], testRef) assert.Equal(t, predicate[targetIDKey], testID) assert.Equal(t, predicate[fromIDKey], testID) }) t.Run("for tag", func(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() authorization, err := NewReferenceAuthorizationForTag(testRef, testID, testID) assert.Nil(t, err) // Check value of statement type assert.Equal(t, ita.StatementTypeUri, authorization.Type) // Check subject contents assert.Equal(t, 1, len(authorization.Subject)) assert.Contains(t, authorization.Subject[0].Digest, digestGitCommitKey) assert.Equal(t, authorization.Subject[0].Digest[digestGitCommitKey], testID) // Check predicate type assert.Equal(t, PredicateType, authorization.PredicateType) // Check predicate predicate := authorization.Predicate.AsMap() assert.Equal(t, predicate[targetRefKey], testRef) assert.Equal(t, predicate[targetIDKey], testID) assert.Equal(t, predicate[fromIDKey], testID) }) } func TestValidate(t *testing.T) { t.Run("for commit", func(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() mainZeroZero := createTestEnvelope(t, testRef, testID, testID, false) featureZeroZero := createTestEnvelope(t, testAnotherRef, testID, testID, false) err := Validate(mainZeroZero, testRef, testID, testID) assert.Nil(t, err) err = Validate(featureZeroZero, testAnotherRef, testID, testID) assert.Nil(t, err) err = Validate(mainZeroZero, testAnotherRef, testID, testID) assert.ErrorIs(t, err, authorizations.ErrInvalidAuthorization) }) t.Run("for tag", func(t *testing.T) { testRef := "refs/tags/v1" testID := gitinterface.ZeroHash.String() authorization := createTestEnvelope(t, testRef, testID, testID, true) err := Validate(authorization, testRef, testID, testID) assert.Nil(t, err) }) t.Run("invalid subject", func(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() authorization, err := NewReferenceAuthorizationForCommit(testRef, testID, testID) if err != nil { t.Fatal(err) } authorization.Subject[0].Digest["garbage"] = authorization.Subject[0].Digest[digestGitTreeKey] delete(authorization.Subject[0].Digest, digestGitTreeKey) env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } err = Validate(env, testRef, testID, testID) assert.ErrorIs(t, err, authorizations.ErrInvalidAuthorization) }) t.Run("mismatch ref (non tag) and subject digest key (commit)", func(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() authorization := createTestEnvelope(t, testRef, testID, testID, true) err := Validate(authorization, testRef, testID, testID) assert.ErrorIs(t, err, authorizations.ErrInvalidAuthorization) }) } func createTestEnvelope(t *testing.T, refName, fromID, toID string, tag bool) *sslibdsse.Envelope { t.Helper() var ( authorization *ita.Statement err error ) if tag { authorization, err = NewReferenceAuthorizationForTag(refName, fromID, toID) } else { authorization, err = NewReferenceAuthorizationForCommit(refName, fromID, toID) } if err != nil { t.Fatal(err) } env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } return env } gittuf-0.9.0/internal/attestations/common/000077500000000000000000000000001475150141000206465ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/common/common.go000066400000000000000000000010011475150141000224550ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package common import ( "encoding/json" "google.golang.org/protobuf/types/known/structpb" ) func PredicateToPBStruct(predicate any) (*structpb.Struct, error) { predicateBytes, err := json.Marshal(predicate) if err != nil { return nil, err } predicateInterface := &map[string]any{} if err := json.Unmarshal(predicateBytes, predicateInterface); err != nil { return nil, err } return structpb.NewStruct(*predicateInterface) } gittuf-0.9.0/internal/attestations/github.go000066400000000000000000000177601475150141000212020ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestations import ( "encoding/base64" "encoding/json" "errors" "fmt" "net/url" "path" "github.com/gittuf/gittuf/internal/attestations/github" githubv01 "github.com/gittuf/gittuf/internal/attestations/github/v01" "github.com/gittuf/gittuf/internal/gitinterface" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" gogithub "github.com/google/go-github/v61/github" ita "github.com/in-toto/attestation/go/v1" ) func NewGitHubPullRequestAttestation(owner, repository string, pullRequestNumber int, commitID string, pullRequest *gogithub.PullRequest) (*ita.Statement, error) { return githubv01.NewPullRequestAttestation(owner, repository, pullRequestNumber, commitID, pullRequest) } func (a *Attestations) SetGitHubPullRequestAuthorization(repo *gitinterface.Repository, env *sslibdsse.Envelope, targetRefName, commitID string) error { envBytes, err := json.Marshal(env) if err != nil { return err } blobID, err := repo.WriteBlob(envBytes) if err != nil { return err } if a.githubPullRequestAttestations == nil { a.githubPullRequestAttestations = map[string]gitinterface.Hash{} } a.githubPullRequestAttestations[GitHubPullRequestAttestationPath(targetRefName, commitID)] = blobID return nil } // GitHubPullRequestAttestationPath constructs the expected path on-disk for the // GitHub pull request attestation. func GitHubPullRequestAttestationPath(refName, commitID string) string { return path.Join(refName, commitID) } // NewGitHubPullRequestApprovalAttestation creates a new GitHub pull request // approval attestation for the provided information. The attestation is // embedded in an in-toto "statement" and returned with the appropriate // "predicate type" set. The `fromTargetID` and `toTargetID` specify the change // to `targetRef` that is approved on the corresponding GitHub pull request. func NewGitHubPullRequestApprovalAttestation(targetRef, fromRevisionID, targetTreeID string, approvers, dismissedApprovers []string) (*ita.Statement, error) { return githubv01.NewPullRequestApprovalAttestation(targetRef, fromRevisionID, targetTreeID, approvers, dismissedApprovers) } // SetGitHubPullRequestApprovalAttestation writes the new GitHub pull request // approval attestation to the object store and tracks it in the current // attestations state. The refName, fromRevisionID, targetTreeID parameters are // used to construct an indexPath. The hostURL and reviewID are together mapped // to the indexPath so that if the review is dismissed later, the corresponding // attestation can be updated. func (a *Attestations) SetGitHubPullRequestApprovalAttestation(repo *gitinterface.Repository, env *sslibdsse.Envelope, hostURL string, reviewID int64, appName, refName, fromRevisionID, targetTreeID string) error { // TODO: this will be updated to support validating different versions if err := githubv01.ValidatePullRequestApproval(env, refName, fromRevisionID, targetTreeID); err != nil { return errors.Join(github.ErrInvalidPullRequestApprovalAttestation, err) } envBytes, err := json.Marshal(env) if err != nil { return err } blobID, err := repo.WriteBlob(envBytes) if err != nil { return err } if a.codeReviewApprovalAttestations == nil { a.codeReviewApprovalAttestations = map[string]gitinterface.Hash{} } if a.codeReviewApprovalIndex == nil { a.codeReviewApprovalIndex = map[string]string{} } indexPath := GitHubPullRequestApprovalAttestationPath(refName, fromRevisionID, targetTreeID) // We URL encode the appName to make it appropriate for an on-disk path blobPath := path.Join(indexPath, base64.URLEncoding.EncodeToString([]byte(appName))) // Note the distinction between indexPath and blobPath // We don't have this for reference authorizations // indexPath is of the form "/-/github" // blobPath is a specific entry in the indexPath tree, for the app recording // the attestation a.codeReviewApprovalAttestations[blobPath] = blobID githubReviewID, err := GitHubReviewID(hostURL, reviewID) if err != nil { return err } if existingIndexPath, has := a.codeReviewApprovalIndex[githubReviewID]; has { if existingIndexPath != indexPath { return github.ErrInvalidPullRequestApprovalAttestation } } else { a.codeReviewApprovalIndex[githubReviewID] = indexPath // only use indexPath as the same review ID can be observed by more than one app } return nil } // GetGitHubPullRequestApprovalAttestationFor returns the requested GitHub pull // request approval attestation. Here, all the pieces of information to load the // attestation are known: the change the approval is for as well as the app that // observed the approval. func (a *Attestations) GetGitHubPullRequestApprovalAttestationFor(repo *gitinterface.Repository, appName, refName, fromRevisionID, targetTreeID string) (*sslibdsse.Envelope, error) { indexPath := GitHubPullRequestApprovalAttestationPath(refName, fromRevisionID, targetTreeID) return a.GetGitHubPullRequestApprovalAttestationForIndexPath(repo, appName, indexPath) } // GetGitHubPullRequestApprovalAttestationForReviewID returns the requested // GitHub pull request approval attestation for the specified GitHub instance, // review ID, and app. This is used when the indexPath is unknown, such as when // dismissing a prior approval. The host information and reviewID are used to // identify the indexPath for the requested review. func (a *Attestations) GetGitHubPullRequestApprovalAttestationForReviewID(repo *gitinterface.Repository, hostURL string, reviewID int64, appName string) (*sslibdsse.Envelope, error) { indexPath, has, err := a.GetGitHubPullRequestApprovalIndexPathForReviewID(hostURL, reviewID) if err != nil { return nil, err } if has { return a.GetGitHubPullRequestApprovalAttestationForIndexPath(repo, appName, indexPath) } return nil, github.ErrGitHubReviewIDNotFound } // GetGitHubPullRequestApprovalAttestationForIndexPath returns the requested // GitHub pull request approval attestation for the indexPath and appName. func (a *Attestations) GetGitHubPullRequestApprovalAttestationForIndexPath(repo *gitinterface.Repository, appName, indexPath string) (*sslibdsse.Envelope, error) { // We URL encode the appName to match the on-disk path blobPath := path.Join(indexPath, base64.URLEncoding.EncodeToString([]byte(appName))) blobID, has := a.codeReviewApprovalAttestations[blobPath] if !has { return nil, github.ErrPullRequestApprovalAttestationNotFound } envBytes, err := repo.ReadBlob(blobID) if err != nil { return nil, err } env := &sslibdsse.Envelope{} if err := json.Unmarshal(envBytes, env); err != nil { return nil, err } return env, nil } // GetGitHubPullRequestApprovalIndexPathForReviewID uses the host and review ID // to find the previously recorded index path. Also see: // SetGitHubPullRequestApprovalAttestation. func (a *Attestations) GetGitHubPullRequestApprovalIndexPathForReviewID(hostURL string, reviewID int64) (string, bool, error) { githubReviewID, err := GitHubReviewID(hostURL, reviewID) if err != nil { return "", false, err } indexPath, has := a.codeReviewApprovalIndex[githubReviewID] return indexPath, has, nil } // GitHubPullRequestApprovalAttestationPath returns the expected path on-disk // for the GitHub pull request approval attestation. This attestation type is // stored using the same format as a reference authorization with the addition // of `github` at the end of the path. This must be used as the tree to store // specific attestation blobs in. func GitHubPullRequestApprovalAttestationPath(refName, fromID, toID string) string { return path.Join(ReferenceAuthorizationPath(refName, fromID, toID), githubPullRequestApprovalSystemName) } // GitHubReviewID converts a GitHub specific review ID (recorded as an int64 // number by GitHub) into a code review system agnostic identifier used by // gittuf. func GitHubReviewID(hostURL string, reviewID int64) (string, error) { u, err := url.Parse(hostURL) if err != nil { return "", err } return fmt.Sprintf("%s::%d", u.Host, reviewID), nil } gittuf-0.9.0/internal/attestations/github/000077500000000000000000000000001475150141000206405ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/github/github.go000066400000000000000000000026141475150141000224540ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package github import ( "errors" "github.com/gittuf/gittuf/internal/attestations/authorizations" ) var ( ErrInvalidPullRequestApprovalAttestation = errors.New("the GitHub pull request approval attestation does not match expected details or has no approvers and dismissed approvers") ErrPullRequestApprovalAttestationNotFound = errors.New("requested GitHub pull request approval attestation not found") ErrGitHubReviewIDNotFound = errors.New("requested GitHub review ID does not exist in index") ) // PullRequestApprovalAttestation records approvals on a GitHub pull request via // a gittuf GitHub app. It's similar to a Reference Authorization in that it // records the updated ref, the prior state of the ref, and the target state of // the ref after the change is made. Unlike a Reference Authorization, it // records approvers within the predicate. If the app is trusted in the // repository's root of trust, then the approvers witnessed by the GitHub app // are trusted during gittuf verification. type PullRequestApprovalAttestation interface { // GetApprovers returns the list of approvers witnessed by the GitHub app. GetApprovers() []string // GetDismissedApprovers returns the list of approvers who later dismissed // their review. GetDismissedApprovers() []string authorizations.ReferenceAuthorization } gittuf-0.9.0/internal/attestations/github/v01/000077500000000000000000000000001475150141000212465ustar00rootroot00000000000000gittuf-0.9.0/internal/attestations/github/v01/approval.go000066400000000000000000000053121475150141000234220ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( authorizationsv01 "github.com/gittuf/gittuf/internal/attestations/authorizations/v01" "github.com/gittuf/gittuf/internal/attestations/common" "github.com/gittuf/gittuf/internal/attestations/github" "github.com/gittuf/gittuf/internal/common/set" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" ) const ( PullRequestApprovalPredicateType = "https://gittuf.dev/github-pull-request-approval/v0.1" digestGitTreeKey = "gitTree" ) type PullRequestApprovalAttestation struct { // Approvers contains the list of currently applicable approvers. Approvers *set.Set[string] `json:"approvers"` // DismissedApprovers contains the list of approvers who then dismissed // their approval. DismissedApprovers *set.Set[string] `json:"dismissedApprovers"` *authorizationsv01.ReferenceAuthorization } func (pra *PullRequestApprovalAttestation) GetApprovers() []string { return pra.Approvers.Contents() } func (pra *PullRequestApprovalAttestation) GetDismissedApprovers() []string { return pra.DismissedApprovers.Contents() } // NewPullRequestApprovalAttestation creates a new GitHub pull request approval // attestation for the provided information. The attestation is embedded in an // in-toto "statement" and returned with the appropriate "predicate type" set. // The `fromTargetID` and `toTargetID` specify the change to `targetRef` that is // approved on the corresponding GitHub pull request. func NewPullRequestApprovalAttestation(targetRef, fromRevisionID, targetTreeID string, approvers, dismissedApprovers []string) (*ita.Statement, error) { if len(approvers) == 0 && len(dismissedApprovers) == 0 { return nil, github.ErrInvalidPullRequestApprovalAttestation } predicate := &PullRequestApprovalAttestation{ ReferenceAuthorization: &authorizationsv01.ReferenceAuthorization{ TargetRef: targetRef, FromRevisionID: fromRevisionID, TargetTreeID: targetTreeID, }, Approvers: set.NewSetFromItems(approvers...), DismissedApprovers: set.NewSetFromItems(dismissedApprovers...), } predicateStruct, err := common.PredicateToPBStruct(predicate) if err != nil { return nil, err } return &ita.Statement{ Type: ita.StatementTypeUri, Subject: []*ita.ResourceDescriptor{ { Digest: map[string]string{digestGitTreeKey: targetTreeID}, }, }, PredicateType: PullRequestApprovalPredicateType, Predicate: predicateStruct, }, nil } func ValidatePullRequestApproval(env *sslibdsse.Envelope, targetRef, fromRevisionID, targetTreeID string) error { return authorizationsv01.Validate(env, targetRef, fromRevisionID, targetTreeID) } gittuf-0.9.0/internal/attestations/github/v01/approval_test.go000066400000000000000000000057111475150141000244640ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "testing" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/attestations/github" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" ita "github.com/in-toto/attestation/go/v1" "github.com/stretchr/testify/assert" ) const ( targetRefKey = "targetRef" fromRevisionIDKey = "fromRevisionID" targetTreeIDKey = "targetTreeID" ) func TestNewGitHubPullRequestApprovalAttestation(t *testing.T) { testRef := "refs/heads/main" testID := gitinterface.ZeroHash.String() approvers := []string{"jane.doe@example.com"} _, err := NewPullRequestApprovalAttestation(testRef, testID, testID, nil, nil) assert.ErrorIs(t, err, github.ErrInvalidPullRequestApprovalAttestation) approvalAttestation, err := NewPullRequestApprovalAttestation(testRef, testID, testID, approvers, nil) assert.Nil(t, err) // Check value of statement type assert.Equal(t, ita.StatementTypeUri, approvalAttestation.Type) // Check subject contents assert.Equal(t, 1, len(approvalAttestation.Subject)) assert.Contains(t, approvalAttestation.Subject[0].Digest, digestGitTreeKey) assert.Equal(t, approvalAttestation.Subject[0].Digest[digestGitTreeKey], testID) // Check predicate type assert.Equal(t, PullRequestApprovalPredicateType, approvalAttestation.PredicateType) // Check predicate predicate := approvalAttestation.Predicate.AsMap() assert.Equal(t, predicate[targetRefKey], testRef) assert.Equal(t, predicate[targetTreeIDKey], testID) assert.Equal(t, predicate[fromRevisionIDKey], testID) // FIXME: this is a really messy assertion assert.Equal(t, approvers[0], predicate["approvers"].([]any)[0]) } func TestValidatePullRequestApproval(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() approvers := []string{"jane.doe@example.com"} mainZeroZero := createTestPullRequestApprovalEnvelope(t, testRef, testID, testID, approvers) featureZeroZero := createTestPullRequestApprovalEnvelope(t, testAnotherRef, testID, testID, approvers) err := ValidatePullRequestApproval(mainZeroZero, testRef, testID, testID) assert.Nil(t, err) err = ValidatePullRequestApproval(featureZeroZero, testAnotherRef, testID, testID) assert.Nil(t, err) err = ValidatePullRequestApproval(mainZeroZero, testAnotherRef, testID, testID) assert.ErrorIs(t, err, authorizations.ErrInvalidAuthorization) } func createTestPullRequestApprovalEnvelope(t *testing.T, refName, fromID, toID string, approvers []string) *sslibdsse.Envelope { t.Helper() authorization, err := NewPullRequestApprovalAttestation(refName, fromID, toID, approvers, nil) if err != nil { t.Fatal(err) } env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } return env } gittuf-0.9.0/internal/attestations/github/v01/pr.go000066400000000000000000000022711475150141000222200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "encoding/json" "fmt" gogithub "github.com/google/go-github/v61/github" ita "github.com/in-toto/attestation/go/v1" "google.golang.org/protobuf/types/known/structpb" ) const ( PullRequestPredicateType = "https://gittuf.dev/github-pull-request/v0.1" digestGitCommitKey = "gitCommit" ) func NewPullRequestAttestation(owner, repository string, pullRequestNumber int, commitID string, pullRequest *gogithub.PullRequest) (*ita.Statement, error) { pullRequestBytes, err := json.Marshal(pullRequest) if err != nil { return nil, err } predicate := map[string]any{} if err := json.Unmarshal(pullRequestBytes, &predicate); err != nil { return nil, err } predicateStruct, err := structpb.NewStruct(predicate) if err != nil { return nil, err } return &ita.Statement{ Type: ita.StatementTypeUri, Subject: []*ita.ResourceDescriptor{ { Uri: fmt.Sprintf("https://github.com/%s/%s/pull/%d", owner, repository, pullRequestNumber), Digest: map[string]string{digestGitCommitKey: commitID}, }, }, PredicateType: PullRequestPredicateType, Predicate: predicateStruct, }, nil } gittuf-0.9.0/internal/attestations/github_test.go000066400000000000000000000104641475150141000222330ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestations import ( "encoding/base64" "fmt" "path" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/stretchr/testify/assert" ) func TestSetGitHubPullRequestApprovalAttestation(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() baseURL := "https://github.com" baseHost := "github.com" appName := "github" approvers := []string{"jane.doe@example.com"} mainZeroZero := createGitHubPullRequestApprovalAttestationEnvelope(t, testRef, testID, testID, approvers) featureZeroZero := createGitHubPullRequestApprovalAttestationEnvelope(t, testAnotherRef, testID, testID, approvers) tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) attestations := &Attestations{} // Add auth for first branch err := attestations.SetGitHubPullRequestApprovalAttestation(repo, mainZeroZero, baseURL, 1, appName, testRef, testID, testID) assert.Nil(t, err) assert.Contains(t, attestations.codeReviewApprovalAttestations, path.Join(GitHubPullRequestApprovalAttestationPath(testRef, testID, testID), base64.URLEncoding.EncodeToString([]byte(appName)))) assert.NotContains(t, attestations.codeReviewApprovalAttestations, path.Join(GitHubPullRequestApprovalAttestationPath(testAnotherRef, testID, testID), base64.URLEncoding.EncodeToString([]byte(appName)))) assert.Equal(t, GitHubPullRequestApprovalAttestationPath(testRef, testID, testID), attestations.codeReviewApprovalIndex[fmt.Sprintf("%s::%d", baseHost, 1)]) // Add auth for the other branch err = attestations.SetGitHubPullRequestApprovalAttestation(repo, featureZeroZero, baseURL, 2, appName, testAnotherRef, testID, testID) assert.Nil(t, err) assert.Contains(t, attestations.codeReviewApprovalAttestations, path.Join(GitHubPullRequestApprovalAttestationPath(testRef, testID, testID), base64.URLEncoding.EncodeToString([]byte(appName)))) assert.Contains(t, attestations.codeReviewApprovalAttestations, path.Join(GitHubPullRequestApprovalAttestationPath(testAnotherRef, testID, testID), base64.URLEncoding.EncodeToString([]byte(appName)))) assert.Equal(t, GitHubPullRequestApprovalAttestationPath(testRef, testID, testID), attestations.codeReviewApprovalIndex[fmt.Sprintf("%s::%d", baseHost, 1)]) assert.Equal(t, GitHubPullRequestApprovalAttestationPath(testAnotherRef, testID, testID), attestations.codeReviewApprovalIndex[fmt.Sprintf("%s::%d", baseHost, 2)]) } func TestGetGitHubPullRequestApprovalAttestation(t *testing.T) { testRef := "refs/heads/main" testAnotherRef := "refs/heads/feature" testID := gitinterface.ZeroHash.String() baseURL := "https://github.com" appName := "github" approvers := []string{"jane.doe@example.com"} mainZeroZero := createGitHubPullRequestApprovalAttestationEnvelope(t, testRef, testID, testID, approvers) featureZeroZero := createGitHubPullRequestApprovalAttestationEnvelope(t, testAnotherRef, testID, testID, approvers) tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) attestations := &Attestations{} err := attestations.SetGitHubPullRequestApprovalAttestation(repo, mainZeroZero, baseURL, 1, appName, testRef, testID, testID) if err != nil { t.Fatal(err) } err = attestations.SetGitHubPullRequestApprovalAttestation(repo, featureZeroZero, baseURL, 2, appName, testAnotherRef, testID, testID) if err != nil { t.Fatal(err) } mainAuth, err := attestations.GetGitHubPullRequestApprovalAttestationFor(repo, appName, testRef, testID, testID) assert.Nil(t, err) assert.Equal(t, mainZeroZero, mainAuth) featureAuth, err := attestations.GetGitHubPullRequestApprovalAttestationFor(repo, appName, testAnotherRef, testID, testID) assert.Nil(t, err) assert.Equal(t, featureZeroZero, featureAuth) } func createGitHubPullRequestApprovalAttestationEnvelope(t *testing.T, refName, fromID, toID string, approvers []string) *sslibdsse.Envelope { t.Helper() authorization, err := NewGitHubPullRequestApprovalAttestation(refName, fromID, toID, approvers, nil) if err != nil { t.Fatal(err) } env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } return env } gittuf-0.9.0/internal/cache/000077500000000000000000000000001475150141000156775ustar00rootroot00000000000000gittuf-0.9.0/internal/cache/attestations.go000066400000000000000000000073461475150141000207620ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package cache import ( "fmt" "log/slog" "slices" "github.com/gittuf/gittuf/internal/gitinterface" ) func (p *Persistent) GetAttestationsEntries() []RSLEntryIndex { return p.AttestationEntries } // FindAttestationsEntryNumberForEntry returns the index of the attestations // entry to use. If the returned index has EntryNumber set to 0, it indicates // that an applicable entry was not found in the cache. func (p *Persistent) FindAttestationsEntryNumberForEntry(entryNumber uint64) (RSLEntryIndex, bool) { // Set entryNumber as max scanned if it's higher than what's already there p.SetAddedAttestationsBeforeNumber(entryNumber) index, has := slices.BinarySearchFunc(p.AttestationEntries, RSLEntryIndex{EntryNumber: entryNumber}, binarySearch) if has { slog.Debug(fmt.Sprintf("Requested entry number '%d' is for attestations", entryNumber)) slog.Debug("Requested attestations entry found in cache!") return p.AttestationEntries[index], false } if !has && index == 0 { // this happens when an attestations entry doesn't exist before the // specified entryNumber. No need to use the fallthrough. slog.Debug("No applicable attestations entry found in cache") return RSLEntryIndex{EntryNumber: 0}, false } slog.Debug("Requested attestations entry found in cache!") return p.AttestationEntries[index-1], false } func (p *Persistent) InsertAttestationEntryNumber(entryNumber uint64, entryID gitinterface.Hash) { if entryNumber == 0 { // For now, we don't have a way to track non-numbered entries // We likely never want to track non-numbered entries in this // cache as this is very dependent on numbering return } // TODO: check this is for the right ref? slog.Debug(fmt.Sprintf("Inserting attestations entry with ID '%s' and number %d into persistent cache...", entryID.String(), entryNumber)) if len(p.AttestationEntries) == 0 { // No entries yet, just add the current entry slog.Debug("No attestations entries in cache, adding current entry as sole item...") p.AttestationEntries = []RSLEntryIndex{{EntryNumber: entryNumber, EntryID: entryID.String()}} return } if p.AttestationEntries[len(p.AttestationEntries)-1].GetEntryNumber() < entryNumber { // Current entry clearly belongs at the very end slog.Debug("Current entry belongs at the end of ordered list of attestations entry, appending...") p.AttestationEntries = append(p.AttestationEntries, RSLEntryIndex{EntryNumber: entryNumber, EntryID: entryID.String()}) return } // We don't check the converse where the current entry is less than the // first entry because we're inserting as entries are encountered // chronologically. Worst case, binary search fallthrough below will still // handle it slog.Debug("Searching for insertion point...") index, has := slices.BinarySearchFunc(p.AttestationEntries, RSLEntryIndex{EntryNumber: entryNumber}, binarySearch) if has { // We could assume that if we've seen an entry with a number greater // than this, we should have seen this one too, but for now... slog.Debug("Entry with same number found, skipping addition of entry...") return } slog.Debug(fmt.Sprintf("Found insertion point %d", index)) newSlice := make([]RSLEntryIndex, 0, len(p.AttestationEntries)+1) newSlice = append(newSlice, p.AttestationEntries[:index]...) newSlice = append(newSlice, RSLEntryIndex{EntryNumber: entryNumber, EntryID: entryID.String()}) newSlice = append(newSlice, p.AttestationEntries[index:]...) p.AttestationEntries = newSlice p.SetAddedAttestationsBeforeNumber(entryNumber) } func (p *Persistent) SetAddedAttestationsBeforeNumber(entryNumber uint64) { if p.AddedAttestationsBeforeNumber < entryNumber { p.AddedAttestationsBeforeNumber = entryNumber } } gittuf-0.9.0/internal/cache/cache.go000066400000000000000000000133511475150141000172740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package cache import ( "encoding/json" "errors" "log/slog" "github.com/gittuf/gittuf/internal/attestations" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" ) const ( Ref = "refs/local/gittuf/persistent-cache" persistentTreeEntryName = "persistentCache" policyRef = "refs/gittuf/policy" // this is copied from internal/policy to avoid an import cycle ) var ( ErrNoPersistentCache = errors.New("persistent cache not found") ErrEntryNotNumbered = errors.New("one or more entries are not numbered") ) type Persistent struct { // PolicyEntries is a list of index values for entries pertaining to the // policy ref. The list is ordered by each entry's Number. PolicyEntries []RSLEntryIndex `json:"policyEntries"` // AttestationEntries is a list of index values for entries pertaining to // the attestations ref. The list is ordered by each entry's Number. AttestationEntries []RSLEntryIndex `json:"attestationEntries"` // AddedAttestationsBeforeNumber tracks the number up to which // attestations have been searched for and added to // attestationsEntryNumbers. We need to track this for attestations in // particular because attestations are optional in gittuf repositories, // meaning attestationsEntryNumbers may be empty which would trigger a // full search. AddedAttestationsBeforeNumber uint64 `json:"addedAttestationsBeforeNumber"` // LastVerifiedEntryForRef is a map that indicates the last verified RSL // entry for a ref. LastVerifiedEntryForRef map[string]RSLEntryIndex `json:"lastVerifiedEntryForRef"` } func (p *Persistent) Commit(repo *gitinterface.Repository) error { if len(p.PolicyEntries) == 0 && len(p.AttestationEntries) == 0 && p.AddedAttestationsBeforeNumber == 0 && len(p.LastVerifiedEntryForRef) == 0 { // nothing to do return nil } contents, err := json.Marshal(p) if err != nil { return err } blobID, err := repo.WriteBlob(contents) if err != nil { return err } treeBuilder := gitinterface.NewTreeBuilder(repo) treeID, err := treeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{gitinterface.NewEntryBlob(persistentTreeEntryName, blobID)}) if err != nil { return err } currentCommitID, _ := repo.GetReference(Ref) //nolint:errcheck if !currentCommitID.IsZero() { currentTreeID, err := repo.GetCommitTreeID(currentCommitID) if err == nil && treeID.Equal(currentTreeID) { // no change in cache contents, noop return nil } } _, err = repo.Commit(treeID, Ref, "Set persistent cache\n", false) return err } // PopulatePersistentCache scans the repository's RSL and generates a persistent // local-only cache of policy and attestation entries. This makes subsequent // verifications faster. This is currently only available in gittuf's developer // mode. func PopulatePersistentCache(repo *gitinterface.Repository) error { persistent := &Persistent{ PolicyEntries: []RSLEntryIndex{}, AttestationEntries: []RSLEntryIndex{}, } iterator, err := rsl.GetLatestEntry(repo) if err != nil { return err } if iterator.GetNumber() == 0 { return ErrEntryNotNumbered } persistent.AddedAttestationsBeforeNumber = iterator.GetNumber() for { if iterator, isReferenceEntry := iterator.(*rsl.ReferenceEntry); isReferenceEntry { switch iterator.RefName { case policyRef: persistent.InsertPolicyEntryNumber(iterator.GetNumber(), iterator.GetID()) case attestations.Ref: persistent.InsertAttestationEntryNumber(iterator.GetNumber(), iterator.GetID()) } } iterator, err = rsl.GetParentForEntry(repo, iterator) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { break } return err } if iterator.GetNumber() == 0 { return ErrEntryNotNumbered } } return persistent.Commit(repo) } // LoadPersistentCache loads the persistent cache from the tip of the local ref. // If an instance has already been loaded and a pointer has been stored in // memory, that instance is returned. func LoadPersistentCache(repo *gitinterface.Repository) (*Persistent, error) { slog.Debug("Loading persistent cache from disk...") commitID, err := repo.GetReference(Ref) if err != nil { if errors.Is(err, gitinterface.ErrReferenceNotFound) { // Persistent cache doesn't exist slog.Debug("Persistent cache does not exist") return nil, ErrNoPersistentCache } return nil, err } treeID, err := repo.GetCommitTreeID(commitID) if err != nil { return nil, err } allFiles, err := repo.GetAllFilesInTree(treeID) if err != nil { return nil, err } blobID, has := allFiles[persistentTreeEntryName] if !has { // Persistent cache doesn't seem to exist? This maybe warrants // an error but we may have more than one file here in future? slog.Debug("Persistent cache does not exist") return nil, ErrNoPersistentCache } blob, err := repo.ReadBlob(blobID) if err != nil { return nil, err } persistentCache := &Persistent{} if err := json.Unmarshal(blob, &persistentCache); err != nil { return nil, err } slog.Debug("Loaded persistent cache") return persistentCache, nil } // RSLEntryIndex is essentially a tuple that maps RSL entry IDs to numbers. This // may be expanded in future to include more information as needed. type RSLEntryIndex struct { EntryID string `json:"entryID"` EntryNumber uint64 `json:"entryNumber"` } func (r *RSLEntryIndex) GetEntryID() gitinterface.Hash { hash, _ := gitinterface.NewHash(r.EntryID) // TODO: error? return hash } func (r *RSLEntryIndex) GetEntryNumber() uint64 { return r.EntryNumber } func binarySearch(a, b RSLEntryIndex) int { if a.GetEntryNumber() == b.GetEntryNumber() { // Exact match return 0 } if a.GetEntryNumber() < b.GetEntryNumber() { // Precedes return -1 } // Succeeds return 1 } gittuf-0.9.0/internal/cache/policy.go000066400000000000000000000107761475150141000175400ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package cache import ( "fmt" "log/slog" "slices" "github.com/gittuf/gittuf/internal/gitinterface" ) func (p *Persistent) GetPolicyEntries() []RSLEntryIndex { return p.PolicyEntries } func (p *Persistent) HasPolicyEntryNumber(entryNumber uint64) (gitinterface.Hash, bool) { if len(p.PolicyEntries) == 0 || entryNumber == 0 { return gitinterface.ZeroHash, false } index, has := slices.BinarySearchFunc(p.PolicyEntries, RSLEntryIndex{EntryNumber: entryNumber}, binarySearch) if !has { return gitinterface.ZeroHash, false } // Unlike Find... we're actively checking if a policy number has been // inserted into the cache before, so we return the ID from that index // exactly return p.PolicyEntries[index].GetEntryID(), true } func (p *Persistent) FindPolicyEntryNumberForEntry(entryNumber uint64) RSLEntryIndex { if len(p.PolicyEntries) == 0 { return RSLEntryIndex{EntryNumber: 0} // this is a special case } slog.Debug(fmt.Sprintf("Finding policy entry in cache before entry %d...", entryNumber)) index, has := slices.BinarySearchFunc(p.PolicyEntries, RSLEntryIndex{EntryNumber: entryNumber}, binarySearch) if has { // The entry number given to us is the first entry which happens // to be the start of verification as well // This can happen for full verification return p.PolicyEntries[index] } if !has && index == 0 { // this happens when a policy entry doesn't exist before the specified // entryNumber return RSLEntryIndex{EntryNumber: 0} } // When !has, index is point of insertion, but we want the applicable // entry which is index-1 return p.PolicyEntries[index-1] } func (p *Persistent) FindPolicyEntriesInRange(firstNumber, lastNumber uint64) ([]RSLEntryIndex, error) { if len(p.PolicyEntries) == 0 { return nil, ErrNoPersistentCache // TODO: check if custom error makes sense } firstIndex, has := slices.BinarySearchFunc(p.PolicyEntries, RSLEntryIndex{EntryNumber: firstNumber}, binarySearch) if !has { // When !has, index is point of insertion, but we want the applicable // entry which is index-1 firstIndex-- } lastIndex, has := slices.BinarySearchFunc(p.PolicyEntries, RSLEntryIndex{EntryNumber: lastNumber}, binarySearch) if has { // When has, lastIndex is an entry we want to return, so we increment // lastIndex to ensure the corresponding entry is included in the return lastIndex++ } return p.PolicyEntries[firstIndex:lastIndex], nil } func (p *Persistent) InsertPolicyEntryNumber(entryNumber uint64, entryID gitinterface.Hash) { if entryNumber == 0 { // For now, we don't have a way to track non-numbered entries // We likely never want to track non-numbered entries in this // cache as this is very dependent on numbering return } // TODO: check this is for the right ref? slog.Debug(fmt.Sprintf("Inserting policy entry with ID '%s' and number %d into persistent cache...", entryID.String(), entryNumber)) if len(p.PolicyEntries) == 0 { // No entries yet, just add the current entry slog.Debug("No policy entries in cache, adding current entry as sole item...") p.PolicyEntries = []RSLEntryIndex{{EntryNumber: entryNumber, EntryID: entryID.String()}} return } if p.PolicyEntries[len(p.PolicyEntries)-1].GetEntryNumber() < entryNumber { // Current entry clearly belongs at the very end slog.Debug("Current entry belongs at the end of ordered list of attestations entry, appending...") p.PolicyEntries = append(p.PolicyEntries, RSLEntryIndex{EntryNumber: entryNumber, EntryID: entryID.String()}) return } // We don't check the converse where the current entry is less than the // first entry because we're inserting as entries are encountered // chronologically. Worst case, binary search fallthrough below will still // handle it slog.Debug("Searching for insertion point...") index, has := slices.BinarySearchFunc(p.PolicyEntries, RSLEntryIndex{EntryNumber: entryNumber}, binarySearch) if has { // We could assume that if we've seen an entry with a number greater // than this, we should have seen this one too, but for now... slog.Debug("Entry with same number found, skipping addition of entry...") return } slog.Debug(fmt.Sprintf("Found insertion point %d", index)) newSlice := make([]RSLEntryIndex, 0, len(p.PolicyEntries)+1) newSlice = append(newSlice, p.PolicyEntries[:index]...) newSlice = append(newSlice, RSLEntryIndex{EntryNumber: entryNumber, EntryID: entryID.String()}) newSlice = append(newSlice, p.PolicyEntries[index:]...) p.PolicyEntries = newSlice } gittuf-0.9.0/internal/cache/verification.go000066400000000000000000000020061475150141000207060ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package cache import "github.com/gittuf/gittuf/internal/gitinterface" func (p *Persistent) SetLastVerifiedEntryForRef(ref string, entryNumber uint64, entryID gitinterface.Hash) { if p.LastVerifiedEntryForRef == nil { p.LastVerifiedEntryForRef = map[string]RSLEntryIndex{} } currentIndex, hasEntryForRef := p.LastVerifiedEntryForRef[ref] if hasEntryForRef { // If set verified number is higher than entryNumber, noop if currentIndex.GetEntryNumber() > entryNumber { return } } p.LastVerifiedEntryForRef[ref] = RSLEntryIndex{EntryNumber: entryNumber, EntryID: entryID.String()} } func (p *Persistent) GetLastVerifiedEntryForRef(ref string) (uint64, gitinterface.Hash) { if p.LastVerifiedEntryForRef == nil { return 0, gitinterface.ZeroHash } currentIndex, hasEntryForRef := p.LastVerifiedEntryForRef[ref] if !hasEntryForRef { return 0, gitinterface.ZeroHash } return currentIndex.GetEntryNumber(), currentIndex.GetEntryID() } gittuf-0.9.0/internal/cmd/000077500000000000000000000000001475150141000153775ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/addhooks/000077500000000000000000000000001475150141000171735ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/addhooks/addhooks.go000066400000000000000000000021711475150141000213170ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addhooks import ( "errors" "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { force bool } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().BoolVarP( &o.force, "force", "f", false, "overwrite hooks, if they already exist", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } err = repo.UpdateHook(gittuf.HookPrePush, prePushScript, o.force) var hookErr *gittuf.ErrHookExists if errors.As(err, &hookErr) { fmt.Fprintf( cmd.ErrOrStderr(), "'%s' already exists. Use --force flag or merge existing hook and the following script manually:\n\n%s\n", string(hookErr.HookType), prePushScript, ) } return err } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "add-hooks", Short: "Add git hooks that automatically create and sync RSL", RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/addhooks/prepush.go000066400000000000000000000010161475150141000212060ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addhooks var prePushScript = []byte(`#!/bin/sh set -e remote="$1" url="$2" if ! command -v gittuf > /dev/null then echo "gittuf could not be found" echo "Download from: https://github.com/gittuf/gittuf/releases/latest" exit 1 fi echo "Pulling RSL from ${remote}." gittuf rsl remote pull ${remote} echo "Creating new RSL record for HEAD." gittuf rsl record HEAD echo "Pushing RSL to ${remote}." gittuf rsl remote push ${remote} `) gittuf-0.9.0/internal/cmd/attest/000077500000000000000000000000001475150141000167035ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/attest/attest.go000066400000000000000000000006341475150141000205410ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attest import ( "github.com/gittuf/gittuf/internal/cmd/attest/authorize" "github.com/spf13/cobra" ) func New() *cobra.Command { cmd := &cobra.Command{ Use: "attest", Short: "Tools for attesting to code contributions", DisableAutoGenTag: true, } cmd.AddCommand(authorize.New()) return cmd } gittuf-0.9.0/internal/cmd/attest/authorize/000077500000000000000000000000001475150141000207155ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/attest/authorize/authorize.go000066400000000000000000000031751475150141000232640ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package authorize import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { signingKey string fromRef string revoke bool } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVarP( &o.signingKey, "signing-key", "k", "", "signing key to use for creating or revoking an authorization", ) cmd.MarkFlagRequired("signing-key") //nolint:errcheck cmd.Flags().StringVarP( &o.fromRef, "from-ref", "f", "", "ref to authorize merging changes from", ) cmd.MarkFlagRequired("from-ref") //nolint:errcheck cmd.Flags().BoolVarP( &o.revoke, "revoke", "r", false, "revoke existing authorization", ) } func (o *options) Run(cmd *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.signingKey) if err != nil { return err } if o.revoke { if len(args) < 3 { return fmt.Errorf("insufficient parameters for revoking authorization, requires ") } return repo.RemoveReferenceAuthorization(cmd.Context(), signer, args[0], args[1], args[2], true) } return repo.AddReferenceAuthorization(cmd.Context(), signer, args[0], o.fromRef, true) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "authorize", Short: "Add or revoke reference authorization", Args: cobra.MinimumNArgs(1), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/clone/000077500000000000000000000000001475150141000164775ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/clone/clone.go000066400000000000000000000030501475150141000201240ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package clone import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/tuf" "github.com/spf13/cobra" ) type options struct { branch string expectedRootKeys common.PublicKeys bare bool } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVarP( &o.branch, "branch", "b", "", "specify branch to check out", ) cmd.Flags().Var( &o.expectedRootKeys, "root-key", "set of initial root of trust keys for the repository (supported values: paths to SSH keys, GPG key fingerprints, Sigstore/Fulcio identities)", ) cmd.Flags().BoolVar( &o.bare, "bare", false, "make a bare Git repository", ) } func (o *options) Run(cmd *cobra.Command, args []string) error { var dir string if len(args) > 1 { dir = args[1] } expectedRootKeys := make([]tuf.Principal, len(o.expectedRootKeys)) for index, keyPath := range o.expectedRootKeys { key, err := gittuf.LoadPublicKey(keyPath) if err != nil { return err } expectedRootKeys[index] = key } _, err := gittuf.Clone(cmd.Context(), args[0], dir, o.branch, expectedRootKeys, o.bare) return err } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "clone", Short: "Clone repository and its gittuf references", Args: cobra.MinimumNArgs(1), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/common/000077500000000000000000000000001475150141000166675ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/common/common.go000066400000000000000000000020561475150141000205110ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package common import ( "errors" "strings" "github.com/spf13/cobra" ) var ErrSigningKeyNotSet = errors.New("required flag \"signing-key\" not set") // PublicKeys is a custom type to represent a list of paths type PublicKeys []string // String implements part of the pflag.Value interface. func (p *PublicKeys) String() string { return strings.Join(*p, ", ") } // Set implements part of the pflag.Value interface. func (p *PublicKeys) Set(value string) error { *p = append(*p, value) return nil } // Type implements part of the pflag.Value interface. func (p *PublicKeys) Type() string { return "public-keys" } // CheckForSigningKeyFlag checks if a signing key was specified via the // "signing-key" flag func CheckForSigningKeyFlag(cmd *cobra.Command, _ []string) error { signingKeyFlag := cmd.Flags().Lookup("signing-key") // Check if a signing key was specified via the "signing-key" flag if signingKeyFlag.Value.String() == "" { return ErrSigningKeyNotSet } return nil } gittuf-0.9.0/internal/cmd/dev/000077500000000000000000000000001475150141000161555ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/addgithubapproval/000077500000000000000000000000001475150141000216555ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/addgithubapproval/addgithubapproval.go000066400000000000000000000045561475150141000257160ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addgithubapproval import ( "fmt" "strings" "github.com/gittuf/gittuf/experimental/gittuf" githubopts "github.com/gittuf/gittuf/experimental/gittuf/options/github" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { signingKey string baseURL string repository string pullRequestNumber int reviewID int64 approver string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVarP( &o.signingKey, "signing-key", "k", "", "signing key to use for signing attestation", ) cmd.MarkFlagRequired("signing-key") //nolint:errcheck cmd.Flags().StringVar( &o.baseURL, "base-URL", githubopts.DefaultGitHubBaseURL, "location of GitHub instance", ) cmd.Flags().StringVar( &o.repository, "repository", "", "path to base GitHub repository the pull request is opened against, of form {owner}/{repo}", ) cmd.MarkFlagRequired("repository") //nolint:errcheck cmd.Flags().IntVar( &o.pullRequestNumber, "pull-request-number", -1, "pull request number", ) cmd.MarkFlagRequired("pull-request-number") //nolint:errcheck cmd.Flags().Int64Var( &o.reviewID, "review-ID", -1, "pull request review ID", ) cmd.MarkFlagRequired("review-ID") //nolint:errcheck cmd.Flags().StringVar( &o.approver, "approver", "", "identity of the reviewer who approved the change", ) cmd.MarkFlagRequired("approver") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repositoryParts := strings.Split(o.repository, "/") if len(repositoryParts) != 2 { return fmt.Errorf("invalid format for repository, must be {owner}/{repo}") } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.signingKey) if err != nil { return err } return repo.AddGitHubPullRequestApprover(cmd.Context(), signer, repositoryParts[0], repositoryParts[1], o.pullRequestNumber, o.reviewID, o.approver, true, githubopts.WithGitHubBaseURL(o.baseURL)) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "add-github-approval", Short: fmt.Sprintf("Record GitHub pull request approval as an attestation (developer mode only, set %s=1)", dev.DevModeKey), RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/dev/attestgithub/000077500000000000000000000000001475150141000206645ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/attestgithub/attestgithub.go000066400000000000000000000052251475150141000237260ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package attestgithub import ( "fmt" "strings" "github.com/gittuf/gittuf/experimental/gittuf" githubopts "github.com/gittuf/gittuf/experimental/gittuf/options/github" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { signingKey string baseURL string repository string pullRequestNumber int commitID string baseBranch string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVarP( &o.signingKey, "signing-key", "k", "", "signing key to use for signing attestation", ) cmd.MarkFlagRequired("signing-key") //nolint:errcheck cmd.Flags().StringVar( &o.baseURL, "base-URL", githubopts.DefaultGitHubBaseURL, "location of GitHub instance", ) cmd.Flags().StringVar( &o.repository, "repository", "", "path to base GitHub repository the pull request is opened against, of form {owner}/{repo}", ) cmd.MarkFlagRequired("repository") //nolint:errcheck cmd.Flags().IntVar( &o.pullRequestNumber, "pull-request-number", -1, "pull request number to record in attestation", ) cmd.Flags().StringVar( &o.commitID, "commit", "", "commit to record pull request attestation for", ) cmd.Flags().StringVar( &o.baseBranch, "base-branch", "", "base branch for pull request, used with --commit", ) // When we're using commit, we need the base branch to filter through nested // pull requests cmd.MarkFlagsRequiredTogether("commit", "base-branch") cmd.MarkFlagsOneRequired("pull-request-number", "commit") } func (o *options) Run(cmd *cobra.Command, _ []string) error { repositoryParts := strings.Split(o.repository, "/") if len(repositoryParts) != 2 { return fmt.Errorf("invalid format for repository, must be {owner}/{repo}") } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.signingKey) if err != nil { return err } if o.commitID != "" { return repo.AddGitHubPullRequestAttestationForCommit(cmd.Context(), signer, repositoryParts[0], repositoryParts[1], o.commitID, o.baseBranch, true, githubopts.WithGitHubBaseURL(o.baseURL)) } return repo.AddGitHubPullRequestAttestationForNumber(cmd.Context(), signer, repositoryParts[0], repositoryParts[1], o.pullRequestNumber, true, githubopts.WithGitHubBaseURL(o.baseURL)) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "attest-github", Short: fmt.Sprintf("Record GitHub pull request information as an attestation (developer mode only, set %s=1)", dev.DevModeKey), RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/dev/authorize/000077500000000000000000000000001475150141000201675ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/authorize/authorize.go000066400000000000000000000004671475150141000225370ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package authorize import ( "github.com/gittuf/gittuf/internal/cmd/attest/authorize" "github.com/spf13/cobra" ) func New() *cobra.Command { cmd := authorize.New() cmd.Deprecated = "switch to \"gittuf attest authorize\"" return cmd } gittuf-0.9.0/internal/cmd/dev/dev.go000066400000000000000000000023741475150141000172700ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package dev import ( "fmt" "github.com/gittuf/gittuf/internal/cmd/dev/addgithubapproval" "github.com/gittuf/gittuf/internal/cmd/dev/attestgithub" "github.com/gittuf/gittuf/internal/cmd/dev/authorize" "github.com/gittuf/gittuf/internal/cmd/dev/dismissgithubapproval" "github.com/gittuf/gittuf/internal/cmd/dev/populatecache" "github.com/gittuf/gittuf/internal/cmd/dev/rslrecordat" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) func New() *cobra.Command { cmd := &cobra.Command{ Use: "dev", Short: "Developer mode commands", Long: fmt.Sprintf("These commands are meant to be used to aid gittuf development, and are not expected to be used during standard workflows. If used, they can undermine repository security. To proceed, set %s=1.", dev.DevModeKey), PreRunE: checkInDevMode, } cmd.AddCommand(attestgithub.New()) cmd.AddCommand(addgithubapproval.New()) cmd.AddCommand(authorize.New()) cmd.AddCommand(dismissgithubapproval.New()) cmd.AddCommand(populatecache.New()) cmd.AddCommand(rslrecordat.New()) return cmd } func checkInDevMode(_ *cobra.Command, _ []string) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } return nil } gittuf-0.9.0/internal/cmd/dev/dismissgithubapproval/000077500000000000000000000000001475150141000226005ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/dismissgithubapproval/dismissgithubapproval.go000066400000000000000000000034011475150141000275500ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package dismissgithubapproval import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" githubopts "github.com/gittuf/gittuf/experimental/gittuf/options/github" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { signingKey string baseURL string reviewID int64 dismissedApprover string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVarP( &o.signingKey, "signing-key", "k", "", "signing key to use for signing attestation", ) cmd.MarkFlagRequired("signing-key") //nolint:errcheck cmd.Flags().StringVar( &o.baseURL, "base-URL", githubopts.DefaultGitHubBaseURL, "location of GitHub instance", ) cmd.Flags().StringVar( &o.dismissedApprover, "dismiss-approver", "", "identity of the reviewer whose review was dismissed", ) cmd.MarkFlagRequired("dismiss-approver") //nolint:errcheck cmd.Flags().Int64Var( &o.reviewID, "review-ID", -1, "pull request review ID", ) cmd.MarkFlagRequired("review-ID") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.signingKey) if err != nil { return err } return repo.DismissGitHubPullRequestApprover(cmd.Context(), signer, o.reviewID, o.dismissedApprover, true, githubopts.WithGitHubBaseURL(o.baseURL)) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "dismiss-github-approval", Short: fmt.Sprintf("Dismiss GitHub pull request approval as an attestation (developer mode only, set %s=1)", dev.DevModeKey), RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/dev/populatecache/000077500000000000000000000000001475150141000207725ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/populatecache/populatecache.go000066400000000000000000000013101475150141000241310ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package populatecache import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(_ *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.PopulateCache() } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "populate-cache", Short: fmt.Sprintf("Populate persistent cache (developer mode only, set %s=1)", dev.DevModeKey), RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/dev/rslrecordat/000077500000000000000000000000001475150141000205015ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/dev/rslrecordat/rslrecordat.go000066400000000000000000000030441475150141000233550ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rslrecordat import ( "fmt" "os" "github.com/gittuf/gittuf/experimental/gittuf" rslopts "github.com/gittuf/gittuf/experimental/gittuf/options/rsl" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { targetID string signingKeyPath string dstRef string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.dstRef, "dst-ref", "", "name of destination reference, if it differs from source reference", ) cmd.Flags().StringVarP( &o.targetID, "target", "t", "", "target ID", ) cmd.MarkFlagRequired("target") //nolint:errcheck cmd.Flags().StringVarP( &o.signingKeyPath, "signing-key", "k", "", "path to PEM encoded SSH or GPG signing key", ) cmd.MarkFlagRequired("signing-key") //nolint:errcheck } func (o *options) Run(_ *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signingKeyBytes, err := os.ReadFile(o.signingKeyPath) if err != nil { return err } return repo.RecordRSLEntryForReferenceAtTarget(args[0], o.targetID, signingKeyBytes, rslopts.WithOverrideRefName(o.dstRef)) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "rsl-record", Short: fmt.Sprintf("Record explicit state of a Git reference in the RSL, signed with specified key (developer mode only, set %s=1)", dev.DevModeKey), Args: cobra.ExactArgs(1), RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/000077500000000000000000000000001475150141000166765ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/addkey/000077500000000000000000000000001475150141000201375ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/addkey/addkey.go000066400000000000000000000037211475150141000217320ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addkey import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/tuf" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string authorizedKeys []string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to add key to", ) cmd.Flags().StringArrayVar( &o.authorizedKeys, "public-key", []string{}, "authorized public key", ) cmd.MarkFlagRequired("public-key") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } authorizedKeys := []tuf.Principal{} for _, key := range o.authorizedKeys { key, err := gittuf.LoadPublicKey(key) if err != nil { return err } authorizedKeys = append(authorizedKeys, key) } return repo.AddPrincipalToTargets(cmd.Context(), signer, o.policyName, authorizedKeys, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-key", Short: "Add a trusted key to a policy file", Long: `This command allows users to add trusted keys to the specified policy file. By default, the main policy file is selected. Note that the keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::".`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/addperson/000077500000000000000000000000001475150141000206555ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/addperson/addperson.go000066400000000000000000000073551475150141000231750ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addperson import ( "fmt" "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/policy" "github.com/gittuf/gittuf/internal/tuf" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string personID string publicKeys []string associatedIdentities []string customMetadata []string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to add key to", ) cmd.Flags().StringVar( &o.personID, "person-ID", "", "person ID", ) cmd.MarkFlagRequired("person-ID") //nolint:errcheck cmd.Flags().StringArrayVar( &o.publicKeys, "public-key", []string{}, "authorized public key for person", ) cmd.MarkFlagRequired("authorize-key") //nolint:errcheck cmd.Flags().StringArrayVar( &o.associatedIdentities, "associated-identity", []string{}, "identities on code review platforms in the form 'providerID::identity' (e.g., 'https://github.com::')", ) cmd.Flags().StringArrayVar( &o.customMetadata, "custom", []string{}, "additional custom metadata in the form KEY=VALUE", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { if !tufv02.AllowV02Metadata() { return fmt.Errorf("developer mode and v0.2 policy metadata must be enabled, set %s=1 and %s=1", dev.DevModeKey, tufv02.AllowV02MetadataKey) } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } publicKeys := map[string]*tufv02.Key{} for _, key := range o.publicKeys { key, err := gittuf.LoadPublicKey(key) if err != nil { return err } publicKeys[key.ID()] = key.(*tufv02.Key) } associatedIdentities := map[string]string{} for _, associatedIdentity := range o.associatedIdentities { split := strings.Split(associatedIdentity, "::") if len(split) != 2 { return fmt.Errorf("invalid format for associated identity '%s'", associatedIdentity) } associatedIdentities[split[0]] = split[1] } custom := map[string]string{} for _, customEntry := range o.customMetadata { split := strings.Split(customEntry, "=") if len(split) != 2 { return fmt.Errorf("invalid format for custom metadata '%s'", customEntry) } custom[split[0]] = split[1] } person := &tufv02.Person{ PersonID: o.personID, PublicKeys: publicKeys, AssociatedIdentities: associatedIdentities, Custom: custom, } return repo.AddPrincipalToTargets(cmd.Context(), signer, o.policyName, []tuf.Principal{person}, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-person", Short: fmt.Sprintf("Add a trusted person to a policy file (requires developer mode and v0.2 policy metadata to be enabled, set %s=1 and %s=1)", dev.DevModeKey, tufv02.AllowV02MetadataKey), Long: `This command allows users to add a trusted person to the specified policy file. By default, the main policy file is selected. Note that the person's keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::".`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/addrule/000077500000000000000000000000001475150141000203165ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/addrule/addrule.go000066400000000000000000000055761475150141000223020ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addrule import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string ruleName string authorizedKeys []string authorizedPrincipalIDs []string rulePatterns []string threshold int } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to add rule to", ) cmd.Flags().StringVar( &o.ruleName, "rule-name", "", "name of rule", ) cmd.MarkFlagRequired("rule-name") //nolint:errcheck cmd.Flags().StringArrayVar( &o.authorizedKeys, "authorize-key", []string{}, "authorized public key for rule", ) cmd.Flags().MarkDeprecated("authorize-key", "use --authorize instead") //nolint:errcheck cmd.Flags().StringArrayVar( &o.authorizedPrincipalIDs, "authorize", []string{}, "authorize the principal IDs for the rule", ) cmd.MarkFlagsOneRequired("authorize", "authorize-key") cmd.Flags().StringArrayVar( &o.rulePatterns, "rule-pattern", []string{}, "patterns used to identify namespaces rule applies to", ) cmd.MarkFlagRequired("rule-pattern") //nolint:errcheck cmd.Flags().IntVar( &o.threshold, "threshold", 1, "threshold of required valid signatures", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } authorizedPrincipalIDs := []string{} for _, key := range o.authorizedKeys { key, err := gittuf.LoadPublicKey(key) if err != nil { return err } authorizedPrincipalIDs = append(authorizedPrincipalIDs, key.ID()) } authorizedPrincipalIDs = append(authorizedPrincipalIDs, o.authorizedPrincipalIDs...) return repo.AddDelegation(cmd.Context(), signer, o.policyName, o.ruleName, authorizedPrincipalIDs, o.rulePatterns, o.threshold, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-rule", Short: "Add a new rule to a policy file", Long: `This command allows users to add a new rule to the specified policy file. By default, the main policy file is selected. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::".`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/init/000077500000000000000000000000001475150141000176415ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/init/init.go000066400000000000000000000022641475150141000211370ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package init import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to create", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.InitializeTargets(cmd.Context(), signer, o.policyName, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "init", Short: "Initialize policy file", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/listprincipals/000077500000000000000000000000001475150141000217365ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/listprincipals/listprincipals.go000066400000000000000000000032651475150141000253330ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package listprincipals import ( "fmt" "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/tuf" "github.com/spf13/cobra" ) const indentString = " " type options struct { policyRef string policyName string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyRef, "policy-ref", "policy", "specify which policy ref should be inspected", ) cmd.Flags().StringVar( &o.policyName, "policy-name", tuf.TargetsRoleName, "specify rule file to list principals for", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } principals, err := repo.ListPrincipals(cmd.Context(), o.policyRef, o.policyName) if err != nil { return err } for _, principal := range principals { fmt.Printf("Principal %s:\n", principal.ID()) fmt.Printf(indentString + "Keys:\n") for _, key := range principal.Keys() { fmt.Printf(strings.Repeat(indentString, 2)+"%s (%s)\n", key.KeyID, key.KeyType) } customMetadata := principal.CustomMetadata() if len(customMetadata) > 0 { fmt.Printf(indentString + "Custom Metadata:\n") for key, value := range principal.CustomMetadata() { fmt.Printf(strings.Repeat(indentString, 2)+"%s: %s\n", key, value) } } } return nil } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "list-principals", Short: "List principals for the current policy in the specified rule file", RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/listrules/000077500000000000000000000000001475150141000207245ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/listrules/listrules.go000066400000000000000000000043371475150141000233100ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package listrules import ( "fmt" "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { targetRef string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.targetRef, "target-ref", "policy", "specify which policy ref should be inspected", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } rules, err := repo.ListRules(cmd.Context(), o.targetRef) if err != nil { return err } // Iterate through the rules, they are already in order, and the depth tells us how to indent. // The order is a pre-order traversal of the delegation tree, so that the parent is always before the children. for _, curRule := range rules { fmt.Printf(strings.Repeat(" ", curRule.Depth)+"Rule %s:\n", curRule.Delegation.ID()) gitpaths, filepaths := []string{}, []string{} for _, path := range curRule.Delegation.GetProtectedNamespaces() { if strings.HasPrefix(path, "git:") { gitpaths = append(gitpaths, path) } else { filepaths = append(filepaths, path) } } if len(filepaths) > 0 { fmt.Println(strings.Repeat(" ", curRule.Depth+1) + "Paths affected:") for _, v := range filepaths { fmt.Printf(strings.Repeat(" ", curRule.Depth+2)+"%s\n", v) } } if len(gitpaths) > 0 { fmt.Println(strings.Repeat(" ", curRule.Depth+1) + "Refs affected:") for _, v := range gitpaths { fmt.Printf(strings.Repeat(" ", curRule.Depth+2)+"%s\n", v) } } fmt.Println(strings.Repeat(" ", curRule.Depth+1) + "Authorized keys:") for _, key := range curRule.Delegation.GetPrincipalIDs().Contents() { fmt.Printf(strings.Repeat(" ", curRule.Depth+2)+"%s\n", key) } fmt.Println(strings.Repeat(" ", curRule.Depth+1) + fmt.Sprintf("Required valid signatures: %d", curRule.Delegation.GetThreshold())) } return nil } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "list-rules", Short: "List rules for the current state", RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/persistent/000077500000000000000000000000001475150141000210765ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/persistent/persistent.go000066400000000000000000000005471475150141000236330ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package persistent import "github.com/spf13/cobra" type Options struct { SigningKey string } func (o *Options) AddPersistentFlags(cmd *cobra.Command) { cmd.PersistentFlags().StringVarP( &o.SigningKey, "signing-key", "k", "", "signing key to use to sign policy file", ) } gittuf-0.9.0/internal/cmd/policy/policy.go000066400000000000000000000035241475150141000205300ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "github.com/gittuf/gittuf/internal/cmd/policy/addkey" "github.com/gittuf/gittuf/internal/cmd/policy/addperson" "github.com/gittuf/gittuf/internal/cmd/policy/addrule" i "github.com/gittuf/gittuf/internal/cmd/policy/init" "github.com/gittuf/gittuf/internal/cmd/policy/listprincipals" "github.com/gittuf/gittuf/internal/cmd/policy/listrules" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/cmd/policy/removekey" "github.com/gittuf/gittuf/internal/cmd/policy/removeperson" "github.com/gittuf/gittuf/internal/cmd/policy/removerule" "github.com/gittuf/gittuf/internal/cmd/policy/reorderrules" "github.com/gittuf/gittuf/internal/cmd/policy/sign" tui "github.com/gittuf/gittuf/internal/cmd/policy/tui" "github.com/gittuf/gittuf/internal/cmd/policy/updaterule" "github.com/gittuf/gittuf/internal/cmd/trustpolicy/apply" "github.com/gittuf/gittuf/internal/cmd/trustpolicy/discard" "github.com/gittuf/gittuf/internal/cmd/trustpolicy/remote" "github.com/spf13/cobra" ) func New() *cobra.Command { o := &persistent.Options{} cmd := &cobra.Command{ Use: "policy", Short: "Tools to manage gittuf policies", DisableAutoGenTag: true, } o.AddPersistentFlags(cmd) cmd.AddCommand(i.New(o)) cmd.AddCommand(addkey.New(o)) cmd.AddCommand(removekey.New(o)) cmd.AddCommand(addperson.New(o)) cmd.AddCommand(removeperson.New(o)) cmd.AddCommand(apply.New()) cmd.AddCommand(discard.New()) cmd.AddCommand(addrule.New(o)) cmd.AddCommand(listprincipals.New()) cmd.AddCommand(listrules.New()) cmd.AddCommand(remote.New()) cmd.AddCommand(removerule.New(o)) cmd.AddCommand(reorderrules.New(o)) cmd.AddCommand(sign.New(o)) cmd.AddCommand(updaterule.New(o)) cmd.AddCommand(tui.New(o)) return cmd } gittuf-0.9.0/internal/cmd/policy/removekey/000077500000000000000000000000001475150141000207045ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/removekey/removekey.go000066400000000000000000000031241475150141000232410ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removekey import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string keyToRemove string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to remove key from", ) cmd.Flags().StringVar( &o.keyToRemove, "public-key", "", "public key ID to remove from the policy", ) cmd.MarkFlagRequired("public-key") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemovePrincipalFromTargets(cmd.Context(), signer, o.policyName, o.keyToRemove, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-key", Short: "Remove a key from a policy file", Long: `This command allows users to remove keys from the specified policy file. The public key ID is required. By default, the main policy file is selected.`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/removeperson/000077500000000000000000000000001475150141000214225ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/removeperson/removeperson.go000066400000000000000000000037361475150141000245060ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removeperson import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/policy" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string personID string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to remove person from", ) cmd.Flags().StringVar( &o.personID, "person-ID", "", "person ID", ) cmd.MarkFlagRequired("person-ID") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { if !tufv02.AllowV02Metadata() { return fmt.Errorf("developer mode and v0.2 policy metadata must be enabled, set %s=1 and %s=1", dev.DevModeKey, tufv02.AllowV02MetadataKey) } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemovePrincipalFromTargets(cmd.Context(), signer, o.policyName, o.personID, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-person", Short: fmt.Sprintf("Remove a person from a policy file (requires developer mode and v0.2 policy metadata to be enabled, set %s=1 and %s=1)", dev.DevModeKey, tufv02.AllowV02MetadataKey), Long: `This command allows users to remove a person from the specified policy file. The person's ID is required. By default, the main policy file is selected.`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/removerule/000077500000000000000000000000001475150141000210635ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/removerule/removerule.go000066400000000000000000000025701475150141000236030ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removerule import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string ruleName string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to remove rule from", ) cmd.Flags().StringVar( &o.ruleName, "rule-name", "", "name of rule", ) cmd.MarkFlagRequired("rule-name") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemoveDelegation(cmd.Context(), signer, o.policyName, o.ruleName, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-rule", Short: "Remove rule from a policy file", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/reorderrules/000077500000000000000000000000001475150141000214135ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/reorderrules/reorderrules.go000066400000000000000000000031511475150141000244570ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package reorderrules import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string ruleNames []string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", "targets", "name of policy file to reorder rules in", ) } func (o *options) Run(cmd *cobra.Command, args []string) error { o.ruleNames = append(o.ruleNames, args...) repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } err = repo.ReorderDelegations(cmd.Context(), signer, o.policyName, o.ruleNames, true) return err } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "reorder-rules", Short: "Reorder rules in the specified policy file", Long: "This command allows users to reorder rules in the specified policy file. By default, the main policy file is selected. The rule names need to be passed as arguments, in the new order they must appear in, starting from the first to the last rule. Rule names may contain spaces, so they should be enclosed in quotes if necessary.", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/sign/000077500000000000000000000000001475150141000176365ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/sign/sign.go000066400000000000000000000024151475150141000211270ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package sign import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to sign", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.SignTargets(cmd.Context(), signer, o.policyName, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "sign", Short: "Sign policy file", Long: "This command allows users to add their signature to the specified policy file.", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/tui/000077500000000000000000000000001475150141000174775ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/tui/tui.go000066400000000000000000000335231475150141000206350ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package tui import ( "context" "fmt" "strings" "github.com/charmbracelet/bubbles/cursor" "github.com/charmbracelet/bubbles/list" "github.com/charmbracelet/bubbles/textinput" tea "github.com/charmbracelet/bubbletea" "github.com/charmbracelet/lipgloss" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/secure-systems-lab/go-securesystemslib/dsse" "github.com/spf13/cobra" ) const ( colorRegularText = "#FFFFFF" colorFocus = "#007AFF" colorBlur = "#A0A0A0" colorFooter = "#FF0000" colorSubtext = "#555555" ) var ( titleStyle = lipgloss.NewStyle(). Foreground(lipgloss.Color(colorRegularText)). Padding(0, 2). MarginTop(1). Bold(true) itemStyle = lipgloss.NewStyle(). PaddingLeft(4). Foreground(lipgloss.Color(colorRegularText)) selectedItemStyle = lipgloss.NewStyle(). PaddingLeft(4). Foreground(lipgloss.Color(colorRegularText)). Background(lipgloss.Color(colorFocus)) focusedStyle = lipgloss.NewStyle(). PaddingLeft(4) blurredStyle = lipgloss.NewStyle(). Foreground(lipgloss.Color(colorBlur)) cursorStyle = lipgloss.NewStyle(). Foreground(lipgloss.Color(colorRegularText)) ) type screen int const ( screenMain screen = iota screenAddRule screenRemoveRule screenListRules screenReorderRules ) type rule struct { name string pattern string key string } type model struct { screen screen mainList list.Model rules []rule ruleList list.Model inputs []textinput.Model focusIndex int cursorMode cursor.Mode repo *gittuf.Repository signer dsse.SignerVerifier policyName string options *options footer string } // initialModel returns the initial model for the Terminal UI func initialModel(o *options) model { repo, err := gittuf.LoadRepository() if err != nil { return model{} } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return model{} } // Initialize the model m := model{ screen: screenMain, cursorMode: cursor.CursorBlink, repo: repo, signer: signer, policyName: o.policyName, rules: getCurrRules(o), options: o, } // Set up the main list items mainItems := []list.Item{ item{title: "Add Rule", desc: "Add a new policy rule"}, item{title: "Remove Rule", desc: "Remove an existing policy rule"}, item{title: "List Rules", desc: "View all current policy rules"}, item{title: "Reorder Rules", desc: "Change the order of policy rules"}, } // Set up the list delegate delegate := list.NewDefaultDelegate() delegate.Styles.SelectedTitle = selectedItemStyle delegate.Styles.SelectedDesc = selectedItemStyle delegate.Styles.NormalTitle = itemStyle delegate.Styles.NormalDesc = itemStyle // Set up the main list m.mainList = list.New(mainItems, delegate, 0, 0) m.mainList.Title = "gittuf Policy Operations" m.mainList.SetShowStatusBar(false) m.mainList.SetFilteringEnabled(false) m.mainList.Styles.Title = titleStyle m.mainList.SetShowHelp(false) // Set up the rule list m.ruleList = list.New([]list.Item{}, delegate, 0, 0) m.ruleList.Title = "Current Rules" m.ruleList.SetShowStatusBar(false) m.ruleList.SetFilteringEnabled(false) m.ruleList.Styles.Title = titleStyle m.ruleList.SetShowHelp(false) // Set up the input fields m.inputs = make([]textinput.Model, 3) for i := range m.inputs { t := textinput.New() t.Cursor.Style = cursorStyle t.CharLimit = 64 switch i { case 0: t.Placeholder = "Enter Rule Name Here" t.Focus() t.PromptStyle = focusedStyle t.Prompt = "Rule Name:" t.TextStyle = focusedStyle case 1: t.Placeholder = "Enter Pattern Here" t.Prompt = "Pattern:" t.PromptStyle = blurredStyle t.TextStyle = blurredStyle case 2: t.Placeholder = "Enter Path to Key Here" t.Prompt = "Authorize Key:" t.PromptStyle = blurredStyle t.TextStyle = blurredStyle } m.inputs[i] = t } return m } type item struct { title, desc string } // virtual methods must be implemented for the item struct // Title returns the title of the item func (i item) Title() string { return i.title } // Description returns the description of the item func (i item) Description() string { return i.desc } // FilterValue returns the value to filter on func (i item) FilterValue() string { return i.title } // Init initializes the input field func (m model) Init() tea.Cmd { return textinput.Blink } // Update updates the model based on the message received func (m model) Update(msg tea.Msg) (tea.Model, tea.Cmd) { var cmd tea.Cmd switch msg := msg.(type) { case tea.WindowSizeMsg: h, v := lipgloss.NewStyle().Margin(1, 2).GetFrameSize() m.mainList.SetSize(msg.Width-h, msg.Height-v) m.ruleList.SetSize(msg.Width-h, msg.Height-v) case tea.KeyMsg: switch msg.String() { case "ctrl+c", "q": return m, tea.Quit case "left": if m.screen != screenMain { m.footer = "" // Clear footer on navigation m.screen = screenMain return m, nil } case "enter": switch m.screen { case screenMain: i, ok := m.mainList.SelectedItem().(item) if ok { switch i.title { case "Add Rule": m.screen = screenAddRule m.focusIndex = 0 m.inputs[0].Focus() case "Remove Rule": m.screen = screenRemoveRule m.updateRuleList() case "List Rules": m.screen = screenListRules case "Reorder Rules": m.screen = screenReorderRules m.updateRuleList() } } case screenAddRule: if m.focusIndex == len(m.inputs)-1 { newRule := rule{ name: m.inputs[0].Value(), pattern: m.inputs[1].Value(), key: m.inputs[2].Value(), } authorizedKeys := []string{m.inputs[2].Value()} err := repoAddRule(m.options, newRule, authorizedKeys) if err != nil { m.footer = fmt.Sprintf("Error adding rule: %v", err) return m, nil } m.rules = append(m.rules, newRule) m.updateRuleList() m.footer = "Rule added successfully!" m.screen = screenMain } case screenRemoveRule: if i, ok := m.ruleList.SelectedItem().(item); ok { err := repoRemoveRule(m.options, rule{name: i.title}) if err != nil { m.footer = fmt.Sprintf("Error removing rule: %v", err) return m, nil } for idx, rule := range m.rules { if rule.name == i.title { m.rules = append(m.rules[:idx], m.rules[idx+1:]...) break } } m.updateRuleList() m.footer = "Rule removed successfully!" m.screen = screenMain } } case "u": if m.screen == screenReorderRules { if i := m.ruleList.Index(); i > 0 { m.rules[i], m.rules[i-1] = m.rules[i-1], m.rules[i] if err := repoReorderRules(m.options, m.rules); err != nil { m.footer = fmt.Sprintf("Error reordering rules: %v", err) return m, nil } m.updateRuleList() m.footer = "Rules reordered successfully!" } } case "d": if m.screen == screenReorderRules { if i := m.ruleList.Index(); i < len(m.rules)-1 { m.rules[i], m.rules[i+1] = m.rules[i+1], m.rules[i] if err := repoReorderRules(m.options, m.rules); err != nil { m.footer = fmt.Sprintf("Error reordering rules: %v", err) return m, nil } m.updateRuleList() m.footer = "Rules reordered successfully!" } } case "tab", "shift+tab", "up", "down": if m.screen == screenAddRule { s := msg.String() if s == "up" || s == "shift+tab" { if m.focusIndex > 0 { m.focusIndex-- m.footer = "" } else { m.focusIndex = len(m.inputs) - 1 } } else { if m.focusIndex < len(m.inputs)-1 { m.focusIndex++ } else { m.focusIndex = 0 } } for i := 0; i <= len(m.inputs)-1; i++ { if i == m.focusIndex { m.inputs[i].Focus() m.inputs[i].PromptStyle = focusedStyle m.inputs[i].TextStyle = focusedStyle continue } m.inputs[i].Blur() m.inputs[i].PromptStyle = blurredStyle m.inputs[i].TextStyle = blurredStyle } return m, nil } } } switch m.screen { case screenMain: m.mainList, cmd = m.mainList.Update(msg) case screenAddRule: m.inputs[m.focusIndex], cmd = m.inputs[m.focusIndex].Update(msg) case screenRemoveRule, screenReorderRules: m.ruleList, cmd = m.ruleList.Update(msg) } return m, cmd } // updateRuleList updates the rule list within TUI func (m *model) updateRuleList() { items := make([]list.Item, len(m.rules)) for i, rule := range m.rules { items[i] = item{title: rule.name, desc: fmt.Sprintf("Pattern: %s, Key: %s", rule.pattern, rule.key)} } m.ruleList.SetItems(items) } // View renders the TUI func (m model) View() string { switch m.screen { case screenMain: return lipgloss.NewStyle().Margin(1, 2).Render( m.mainList.View() + "\n" + lipgloss.NewStyle().Foreground(lipgloss.Color(colorFooter)).Render(m.footer) + "\nRun `gittuf policy apply` to apply staged changes to the selected policy file", ) case screenAddRule: var b strings.Builder b.WriteString(titleStyle.Render("Add Rule") + "\n\n") for _, input := range m.inputs { b.WriteString(input.View() + "\n") } b.WriteString("\nPress Enter to add, Left Arrow to go back\n") b.WriteString(lipgloss.NewStyle().Foreground(lipgloss.Color(colorFooter)).Render(m.footer)) return lipgloss.NewStyle().Margin(1, 2).Render(b.String()) case screenRemoveRule: return lipgloss.NewStyle().Margin(1, 2).Render( m.ruleList.View() + "\n\n" + lipgloss.NewStyle().Foreground(lipgloss.Color(colorFooter)).Render(m.footer) + "\nPress Enter to remove selected rule, Left Arrow to go back", ) case screenListRules: var sb strings.Builder sb.WriteString(titleStyle.Render("Current Rules") + "\n\n") for _, rule := range m.rules { sb.WriteString(fmt.Sprintf("- %s\n Pattern: %s\n Key: %s\n\n", lipgloss.NewStyle().Foreground(lipgloss.Color(colorRegularText)).Bold(true).Render(rule.name), lipgloss.NewStyle().Foreground(lipgloss.Color(colorSubtext)).Render(rule.pattern), lipgloss.NewStyle().Foreground(lipgloss.Color(colorSubtext)).Render(rule.key))) } sb.WriteString("\nPress Left Arrow to go back") return lipgloss.NewStyle().Margin(1, 2).Render(sb.String()) case screenReorderRules: return lipgloss.NewStyle().Margin(1, 2).Render( m.ruleList.View() + "\n\n" + lipgloss.NewStyle().Foreground(lipgloss.Color(colorFooter)).Render(m.footer) + "\nUse 'u' to move up, 'd' to move down, Left Arrow to go back", ) default: return "Unknown screen" } } // getCurrRules returns the current rules from the policy file func getCurrRules(o *options) []rule { repo, err := gittuf.LoadRepository() if err != nil { return nil } rules, err := repo.ListRules(context.Background(), o.targetRef) if err != nil { return nil } var currRules = make([]rule, len(rules)) for i, r := range rules { currRules[i] = rule{ name: r.Delegation.ID(), pattern: strings.Join(r.Delegation.GetProtectedNamespaces(), ", "), key: strings.Join(r.Delegation.GetPrincipalIDs().Contents(), ", "), } } return currRules } // repoAddRule adds a rule to the policy file func repoAddRule(o *options, rule rule, keyPath []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } authorizedPrincipalIDs := []string{} for _, key := range keyPath { key, err := gittuf.LoadPublicKey(key) if err != nil { return err } authorizedPrincipalIDs = append(authorizedPrincipalIDs, key.ID()) } res := repo.AddDelegation(context.Background(), signer, o.policyName, rule.name, authorizedPrincipalIDs, []string{rule.pattern}, 1, true) return res } // repoRemoveRule removes a rule from the policy file func repoRemoveRule(o *options, rule rule) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemoveDelegation(context.Background(), signer, o.policyName, rule.name, true) } // repoReorderRules reorders the rules in the policy file func repoReorderRules(o *options, rules []rule) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } ruleNames := make([]string, len(rules)) for i, r := range rules { ruleNames[i] = r. name } return repo.ReorderDelegations(context.Background(), signer, o.policyName, ruleNames, true) } // startTUI starts the TUI func startTUI(o *options) error { p := tea.NewProgram(initialModel(o), tea.WithAltScreen()) _, err := p.Run() return err } type options struct { p *persistent.Options policyName string targetRef string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.targetRef, "target-ref", "policy", "specify which policy ref should be inspected", ) cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to make changes to", ) } func (o *options) Run(_ *cobra.Command, _ []string) error { return startTUI(o) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "tui", Short: "Start the TUI for managing policies", Long: "This command allows users to start a terminal-based interface to manage policies. The signing key specified will be used to sign all operations while in the TUI. Changes to the policy files in the TUI are staged immediately without further confirmation and users are required to run `gittuf policy apply` to commit the changes", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/policy/updaterule/000077500000000000000000000000001475150141000210505ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/policy/updaterule/updaterule.go000066400000000000000000000056311475150141000235560ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package updaterule import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/policy/persistent" "github.com/gittuf/gittuf/internal/policy" "github.com/spf13/cobra" ) type options struct { p *persistent.Options policyName string ruleName string authorizedKeys []string authorizedPrincipalIDs []string rulePatterns []string threshold int } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.policyName, "policy-name", policy.TargetsRoleName, "name of policy file to add rule to", ) cmd.Flags().StringVar( &o.ruleName, "rule-name", "", "name of rule", ) cmd.MarkFlagRequired("rule-name") //nolint:errcheck cmd.Flags().StringArrayVar( &o.authorizedKeys, "authorize-key", []string{}, "authorized public key for rule", ) cmd.Flags().MarkDeprecated("authorize-key", "use --authorize instead") //nolint:errcheck cmd.Flags().StringArrayVar( &o.authorizedPrincipalIDs, "authorize", []string{}, "authorize the principal IDs for the rule", ) cmd.MarkFlagsOneRequired("authorize", "authorize-key") cmd.Flags().StringArrayVar( &o.rulePatterns, "rule-pattern", []string{}, "patterns used to identify namespaces rule applies to", ) cmd.MarkFlagRequired("rule-pattern") //nolint:errcheck cmd.Flags().IntVar( &o.threshold, "threshold", 1, "threshold of required valid signatures", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } authorizedPrincipalIDs := []string{} for _, key := range o.authorizedKeys { key, err := gittuf.LoadPublicKey(key) if err != nil { return err } authorizedPrincipalIDs = append(authorizedPrincipalIDs, key.ID()) } authorizedPrincipalIDs = append(authorizedPrincipalIDs, o.authorizedPrincipalIDs...) return repo.UpdateDelegation(cmd.Context(), signer, o.policyName, o.ruleName, authorizedPrincipalIDs, o.rulePatterns, o.threshold, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "update-rule", Short: "Update an existing rule in a policy file", Long: `This command allows users to update an existing rule to the specified policy file. By default, the main policy file is selected. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::".`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/profile/000077500000000000000000000000001475150141000170375ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/profile/profile.go000066400000000000000000000015741475150141000210350ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package profile import ( "os" "runtime/pprof" ) var stopProfilingQueue = []func() error{} func StartProfiling(cpuFile, memoryFile string) error { cpuF, err := os.Create(cpuFile) if err != nil { return err } if err := pprof.StartCPUProfile(cpuF); err != nil { return err } stopProfilingQueue = append(stopProfilingQueue, func() error { pprof.StopCPUProfile() return cpuF.Close() }) memoryF, err := os.Create(memoryFile) if err != nil { return err } stopProfilingQueue = append(stopProfilingQueue, func() error { if err := pprof.WriteHeapProfile(memoryF); err != nil { return err } return memoryF.Close() }) return nil } func StopProfiling() error { for _, f := range stopProfilingQueue { if f != nil { if err := f(); err != nil { return err } } } return nil } gittuf-0.9.0/internal/cmd/root/000077500000000000000000000000001475150141000163625ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/root/root.go000066400000000000000000000052701475150141000177000ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package root import ( "log/slog" "os" "github.com/gittuf/gittuf/internal/cmd/addhooks" "github.com/gittuf/gittuf/internal/cmd/attest" "github.com/gittuf/gittuf/internal/cmd/clone" "github.com/gittuf/gittuf/internal/cmd/dev" "github.com/gittuf/gittuf/internal/cmd/policy" "github.com/gittuf/gittuf/internal/cmd/profile" "github.com/gittuf/gittuf/internal/cmd/rsl" "github.com/gittuf/gittuf/internal/cmd/trust" "github.com/gittuf/gittuf/internal/cmd/verifymergeable" "github.com/gittuf/gittuf/internal/cmd/verifyref" "github.com/gittuf/gittuf/internal/cmd/version" "github.com/gittuf/gittuf/internal/display" "github.com/mattn/go-isatty" "github.com/spf13/cobra" ) type options struct { noColor bool verbose bool profile bool cpuProfileFile string memoryProfileFile string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.PersistentFlags().BoolVar( &o.noColor, "no-color", false, "turn off colored output", ) cmd.PersistentFlags().BoolVar( &o.verbose, "verbose", false, "enable verbose logging", ) cmd.PersistentFlags().BoolVar( &o.profile, "profile", false, "enable CPU and memory profiling", ) cmd.PersistentFlags().StringVar( &o.cpuProfileFile, "profile-CPU-file", "cpu.prof", "file to store CPU profile", ) cmd.PersistentFlags().StringVar( &o.memoryProfileFile, "profile-memory-file", "memory.prof", "file to store memory profile", ) } func (o *options) PreRunE(_ *cobra.Command, _ []string) error { // Check if colored output must be disabled output := os.Stdout isTerminal := isatty.IsTerminal(output.Fd()) || isatty.IsCygwinTerminal(output.Fd()) if o.noColor || !isTerminal { display.DisableColor() } // Setup logging level := slog.LevelInfo if o.verbose { level = slog.LevelDebug } slog.SetDefault(slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{ Level: level, }))) // Start profiling if flag is set if o.profile { return profile.StartProfiling(o.cpuProfileFile, o.memoryProfileFile) } return nil } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "gittuf", Short: "A security layer for Git repositories, powered by TUF", SilenceUsage: true, DisableAutoGenTag: true, PersistentPreRunE: o.PreRunE, } o.AddFlags(cmd) cmd.AddCommand(addhooks.New()) cmd.AddCommand(attest.New()) cmd.AddCommand(clone.New()) cmd.AddCommand(dev.New()) cmd.AddCommand(trust.New()) cmd.AddCommand(policy.New()) cmd.AddCommand(rsl.New()) cmd.AddCommand(verifymergeable.New()) cmd.AddCommand(verifyref.New()) cmd.AddCommand(version.New()) return cmd } gittuf-0.9.0/internal/cmd/rsl/000077500000000000000000000000001475150141000161775ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/annotate/000077500000000000000000000000001475150141000200105ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/annotate/annotate.go000066400000000000000000000020211475150141000221430ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package annotate import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { skip bool message string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().BoolVarP( &o.skip, "skip", "s", false, "mark annotated entries as to be skipped", ) cmd.Flags().StringVarP( &o.message, "message", "m", "", "annotation message", ) cmd.MarkFlagRequired("message") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.RecordRSLAnnotation(cmd.Context(), args, o.skip, o.message, true) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "annotate", Short: "Annotate prior RSL entries", Args: cobra.MinimumNArgs(1), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/rsl/log/000077500000000000000000000000001475150141000167605ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/log/log.go000066400000000000000000000014031475150141000200660ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package log import ( "os" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/display" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(_ *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return display.RSLLog(repo.GetGitRepository(), display.NewDisplayWriter(os.Stdout)) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "log", Short: "Display the repository's Reference State Log", RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/rsl/propagate/000077500000000000000000000000001475150141000201615ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/propagate/propagate.go000066400000000000000000000016141475150141000224740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package propagate import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(cmd *cobra.Command, _ []string) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.PropagateChangesFromUpstreamRepositories(cmd.Context(), true) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "propagate", Short: fmt.Sprintf("Propagate contents of remote repositories into local repository (developer mode only, set %s=1)", dev.DevModeKey), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/rsl/record/000077500000000000000000000000001475150141000174555ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/record/record.go000066400000000000000000000030241475150141000212610ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package record import ( "github.com/gittuf/gittuf/experimental/gittuf" rslopts "github.com/gittuf/gittuf/experimental/gittuf/options/rsl" "github.com/spf13/cobra" ) type options struct { dstRef string skipDuplicateCheck bool skipPropagation bool } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.dstRef, "dst-ref", "", "name of destination reference, if it differs from source reference", ) cmd.Flags().BoolVar( &o.skipDuplicateCheck, "skip-duplicate-check", false, "skip check to see if latest entry for reference has same target", ) cmd.Flags().BoolVar( &o.skipPropagation, "skip-propagation", false, "skip propagation workflow", ) } func (o *options) Run(cmd *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } opts := []rslopts.Option{rslopts.WithOverrideRefName(o.dstRef)} if o.skipDuplicateCheck { opts = append(opts, rslopts.WithSkipCheckForDuplicateEntry()) } if o.skipPropagation { opts = append(opts, rslopts.WithSkipPropagation()) } return repo.RecordRSLEntryForReference(cmd.Context(), args[0], true, opts...) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "record", Short: "Record latest state of a Git reference in the RSL", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/rsl/remote/000077500000000000000000000000001475150141000174725ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/remote/check/000077500000000000000000000000001475150141000205475ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/remote/check/check.go000066400000000000000000000021741475150141000221570ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package check import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { } func (o *options) Run(cmd *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } hasUpdates, hasDiverged, err := repo.CheckRemoteRSLForUpdates(cmd.Context(), args[0]) //nolint:staticcheck if err != nil { return err } if hasUpdates { fmt.Printf("RSL at remote %s has updates", args[0]) if hasDiverged { fmt.Printf(" and has diverged from local RSL") } } else { fmt.Printf("RSL at remote %s has no updates", args[0]) } fmt.Println() // Trailing newline return nil } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "check ", Short: "Check remote RSL for updates, for development use only", Args: cobra.ExactArgs(1), RunE: o.Run, Deprecated: "This command will be replaced soon with the new reconciliation workflow", DisableAutoGenTag: true, } return cmd } gittuf-0.9.0/internal/cmd/rsl/remote/pull/000077500000000000000000000000001475150141000204465ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/remote/pull/pull.go000066400000000000000000000012051475150141000217470ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package pull import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { } func (o *options) Run(_ *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.PullRSL(args[0]) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "pull ", Short: "Pull RSL from the specified remote", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } return cmd } gittuf-0.9.0/internal/cmd/rsl/remote/push/000077500000000000000000000000001475150141000204515ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/remote/push/push.go000066400000000000000000000012031475150141000217530ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package push import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { } func (o *options) Run(_ *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.PushRSL(args[0]) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "push ", Short: "Push RSL to the specified remote", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } return cmd } gittuf-0.9.0/internal/cmd/rsl/remote/reconcile/000077500000000000000000000000001475150141000214355ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/remote/reconcile/reconcile.go000066400000000000000000000023261475150141000237320ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package reconcile import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct{} func (o *options) Run(cmd *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.ReconcileLocalRSLWithRemote(cmd.Context(), args[0], true) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "reconcile ", Short: "Reconcile local RSL with remote RSL", Long: `This command checks the local RSL against the specified remote and reconciles the local RSL if needed. If the local RSL doesn't exist or is strictly behind the remote RSL, then the local RSL is updated to match the remote RSL. If the local RSL is ahead of the remote RSL, nothing is updated. Finally, if the local and remote RSLs have diverged, then the local only RSL entries are reapplied over the latest entries in the remote if the local only RSL entries and remote only entries are for different Git references.`, Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } return cmd } gittuf-0.9.0/internal/cmd/rsl/remote/remote.go000066400000000000000000000012261475150141000213150ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package remote import ( "github.com/gittuf/gittuf/internal/cmd/rsl/remote/check" "github.com/gittuf/gittuf/internal/cmd/rsl/remote/pull" "github.com/gittuf/gittuf/internal/cmd/rsl/remote/push" "github.com/gittuf/gittuf/internal/cmd/rsl/remote/reconcile" "github.com/spf13/cobra" ) func New() *cobra.Command { cmd := &cobra.Command{ Use: "remote", Short: "Tools for managing remote RSLs", DisableAutoGenTag: true, } cmd.AddCommand(check.New()) cmd.AddCommand(pull.New()) cmd.AddCommand(push.New()) cmd.AddCommand(reconcile.New()) return cmd } gittuf-0.9.0/internal/cmd/rsl/rsl.go000066400000000000000000000015041475150141000173260ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl import ( "github.com/gittuf/gittuf/internal/cmd/rsl/annotate" "github.com/gittuf/gittuf/internal/cmd/rsl/log" "github.com/gittuf/gittuf/internal/cmd/rsl/propagate" "github.com/gittuf/gittuf/internal/cmd/rsl/record" "github.com/gittuf/gittuf/internal/cmd/rsl/remote" "github.com/gittuf/gittuf/internal/cmd/rsl/skiprewritten" "github.com/spf13/cobra" ) func New() *cobra.Command { cmd := &cobra.Command{ Use: "rsl", Short: "Tools to manage the repository's reference state log", DisableAutoGenTag: true, } cmd.AddCommand(annotate.New()) cmd.AddCommand(log.New()) cmd.AddCommand(propagate.New()) cmd.AddCommand(record.New()) cmd.AddCommand(remote.New()) cmd.AddCommand(skiprewritten.New()) return cmd } gittuf-0.9.0/internal/cmd/rsl/skiprewritten/000077500000000000000000000000001475150141000211115ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/rsl/skiprewritten/skiprewritten.go000066400000000000000000000015041475150141000243520ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package skiprewritten import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(_ *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.SkipAllInvalidReferenceEntriesForRef(args[0], true) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "skip-rewritten", Short: "Creates an RSL annotation to skip RSL reference entries that point to commits that do not exist in the specified ref", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/000077500000000000000000000000001475150141000165605ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/addgithubapp/000077500000000000000000000000001475150141000212145ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/addgithubapp/addgithubapp.go000066400000000000000000000031511475150141000241770ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addgithubapp import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options appKey string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.appKey, "app-key", "", "app key to add to root of trust", ) cmd.MarkFlagRequired("app-key") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } appKey, err := gittuf.LoadPublicKey(o.appKey) if err != nil { return err } return repo.AddGitHubApp(cmd.Context(), signer, appKey, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-github-app", Short: "Add GitHub app to gittuf root of trust", Long: `This command allows users to add a trusted key for the special GitHub app role. This key is used to verify signatures on GitHub pull request approval attestations. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::".`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/addglobalrule/000077500000000000000000000000001475150141000213615ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/addglobalrule/addglobalrule.go000066400000000000000000000047331475150141000245200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addglobalrule import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/tuf" "github.com/spf13/cobra" ) type options struct { p *persistent.Options ruleName string ruleType string rulePatterns []string threshold int } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.ruleName, "rule-name", "", "name of rule", ) cmd.MarkFlagRequired("rule-name") //nolint:errcheck cmd.Flags().StringVar( &o.ruleType, "type", "", fmt.Sprintf("type of rule (%s|%s)", tuf.GlobalRuleThresholdType, tuf.GlobalRuleBlockForcePushesType), ) cmd.MarkFlagRequired("type") //nolint:errcheck cmd.Flags().StringArrayVar( &o.rulePatterns, "rule-pattern", []string{}, "patterns used to identify namespaces rule applies to", ) cmd.Flags().IntVar( &o.threshold, "threshold", 1, "threshold of required valid signatures", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } switch o.ruleType { case tuf.GlobalRuleThresholdType: if len(o.rulePatterns) == 0 { return fmt.Errorf("required flag --rule-pattern not set for global rule type '%s'", tuf.GlobalRuleThresholdType) } return repo.AddGlobalRuleThreshold(cmd.Context(), signer, o.ruleName, o.rulePatterns, o.threshold, true) case tuf.GlobalRuleBlockForcePushesType: if len(o.rulePatterns) == 0 { return fmt.Errorf("required flag --rule-pattern not set for global rule type '%s'", tuf.GlobalRuleBlockForcePushesType) } return repo.AddGlobalRuleBlockForcePushes(cmd.Context(), signer, o.ruleName, o.rulePatterns, true) default: return tuf.ErrUnknownGlobalRuleType } } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-global-rule", Short: fmt.Sprintf("Add a new global rule to root of trust (developer mode only, set %s=1)", dev.DevModeKey), PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/addpolicykey/000077500000000000000000000000001475150141000212415ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/addpolicykey/addpolicykey.go000066400000000000000000000030741475150141000242550ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addpolicykey import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options targetsKey string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.targetsKey, "policy-key", "", "policy key to add to root of trust", ) cmd.MarkFlagRequired("policy-key") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } targetsKey, err := gittuf.LoadPublicKey(o.targetsKey) if err != nil { return err } return repo.AddTopLevelTargetsKey(cmd.Context(), signer, targetsKey, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-policy-key", Short: "Add Policy key to gittuf root of trust", Long: `This command allows users to add a new trusted key for the main policy file. Note that authorized keys can be specified from disk, from the GPG keyring using the "gpg:" format, or as a Sigstore identity as "fulcio:::".`, PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/addpropagationdirective/000077500000000000000000000000001475150141000234535ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/addpropagationdirective/addpropagationdirective.go000066400000000000000000000045141475150141000307010ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addpropagationdirective import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { p *persistent.Options name string upstreamRepository string upstreamReference string downstreamReference string downstreamPath string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.name, "name", "", "name of propagation directive", ) cmd.MarkFlagRequired("name") //nolint:errcheck cmd.Flags().StringVar( &o.upstreamRepository, "from-repository", "", "location of upstream repository", ) cmd.MarkFlagRequired("from-repository") //nolint:errcheck cmd.Flags().StringVar( &o.upstreamReference, "from-reference", "", "reference to propagate from in upstream repository", ) cmd.MarkFlagRequired("from-reference") //nolint:errcheck cmd.Flags().StringVar( &o.downstreamReference, "into-reference", "", "reference to propagate into in downstream repository", ) cmd.MarkFlagRequired("into-reference") //nolint:errcheck cmd.Flags().StringVar( &o.downstreamPath, "into-path", "", "path to propagate upstream contents into in downstream reference", ) cmd.MarkFlagRequired("into-path") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.AddPropagationDirective(cmd.Context(), signer, o.name, o.upstreamRepository, o.upstreamReference, o.downstreamReference, o.downstreamPath, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-propagation-directive", Short: fmt.Sprintf("Add propagation directive into gittuf root of trust (developer mode only, set %s=1)", dev.DevModeKey), PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/addrootkey/000077500000000000000000000000001475150141000207255ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/addrootkey/addrootkey.go000066400000000000000000000024231475150141000234220ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package addrootkey import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options newRootKey string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.newRootKey, "root-key", "", "root key to add to root of trust", ) cmd.MarkFlagRequired("root-key") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } newRootKey, err := gittuf.LoadPublicKey(o.newRootKey) if err != nil { return err } return repo.AddRootKey(cmd.Context(), signer, newRootKey, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "add-root-key", Short: "Add Root key to gittuf root of trust", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/disablegithubappapprovals/000077500000000000000000000000001475150141000240175ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/disablegithubappapprovals/disablegithubappapprovals.go000066400000000000000000000020471475150141000316100ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package disablegithubappapprovals import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options } func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.UntrustGitHubApp(cmd.Context(), signer, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "disable-github-app-approvals", Short: "Mark GitHub app approvals as untrusted henceforth", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/enablegithubappapprovals/000077500000000000000000000000001475150141000236425ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/enablegithubappapprovals/enablegithubappapprovals.go000066400000000000000000000020411475150141000312500ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package enablegithubappapprovals import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options } func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.TrustGitHubApp(cmd.Context(), signer, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "enable-github-app-approvals", Short: "Mark GitHub app approvals as trusted henceforth", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/init/000077500000000000000000000000001475150141000175235ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/init/init.go000066400000000000000000000023351475150141000210200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package init import ( "github.com/gittuf/gittuf/experimental/gittuf" rootopts "github.com/gittuf/gittuf/experimental/gittuf/options/root" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options location string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.location, "location", "", "location of repository", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.InitializeRoot(cmd.Context(), signer, true, rootopts.WithRepositoryLocation(o.location)) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "init", Short: "Initialize gittuf root of trust for repository", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/persistent/000077500000000000000000000000001475150141000207605ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/persistent/persistent.go000066400000000000000000000005511475150141000235100ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package persistent import "github.com/spf13/cobra" type Options struct { SigningKey string } func (o *Options) AddPersistentFlags(cmd *cobra.Command) { cmd.PersistentFlags().StringVarP( &o.SigningKey, "signing-key", "k", "", "signing key to use to sign root of trust", ) } gittuf-0.9.0/internal/cmd/trust/removegithubapp/000077500000000000000000000000001475150141000217615ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/removegithubapp/removegithubapp.go000066400000000000000000000020131475150141000255050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removegithubapp import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options } func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemoveGitHubApp(cmd.Context(), signer, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-github-app", Short: "Remove GitHub app from gittuf root of trust", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/removeglobalrule/000077500000000000000000000000001475150141000221265ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/removeglobalrule/removeglobalrule.go000066400000000000000000000025361475150141000260310ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removeglobalrule import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { p *persistent.Options ruleName string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.ruleName, "rule-name", "", "name of rule", ) cmd.MarkFlagRequired("rule-name") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemoveGlobalRule(cmd.Context(), signer, o.ruleName, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-global-rule", Short: fmt.Sprintf("Remove a global rule from root of trust (developer mode only, set %s=1)", dev.DevModeKey), PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/removepolicykey/000077500000000000000000000000001475150141000220065ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/removepolicykey/removepolicykey.go000066400000000000000000000024331475150141000255650ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removepolicykey import ( "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options targetsKeyID string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.targetsKeyID, "policy-key-ID", "", "ID of Policy key to be removed from root of trust", ) cmd.MarkFlagRequired("policy-key-ID") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemoveTopLevelTargetsKey(cmd.Context(), signer, strings.ToLower(o.targetsKeyID), true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-policy-key", Short: "Remove Policy key from gittuf root of trust", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/removepropagationdirective/000077500000000000000000000000001475150141000242205ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/removepropagationdirective/removepropagationdirective.go000066400000000000000000000026011475150141000322060ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removepropagationdirective import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { p *persistent.Options name string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.name, "name", "", "name of propagation directive", ) cmd.MarkFlagRequired("name") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { if !dev.InDevMode() { return dev.ErrNotInDevMode } repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemovePropagationDirective(cmd.Context(), signer, o.name, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-propagation-directive", Short: fmt.Sprintf("Remove propagation directive from gittuf root of trust (developer mode only, set %s=1)", dev.DevModeKey), PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/removerootkey/000077500000000000000000000000001475150141000214725ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/removerootkey/removerootkey.go000066400000000000000000000023701475150141000247350ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package removerootkey import ( "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options rootKeyID string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.rootKeyID, "root-key-ID", "", "ID of Root key to be removed from root of trust", ) cmd.MarkFlagRequired("root-key-ID") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.RemoveRootKey(cmd.Context(), signer, strings.ToLower(o.rootKeyID), true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "remove-root-key", Short: "Remove Root key from gittuf root of trust", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/setrepositorylocation/000077500000000000000000000000001475150141000232445ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/setrepositorylocation/setrepositorylocation.go000066400000000000000000000022761475150141000302660ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package setrepositorylocation import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options location string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.location, "location", "", "location of repository", ) cmd.MarkFlagRequired("location") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.SetRepositoryLocation(cmd.Context(), signer, o.location, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "set-repository-location", Short: "Set repository location", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/sign/000077500000000000000000000000001475150141000175205ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/sign/sign.go000066400000000000000000000020671475150141000210140ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package sign import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options } func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.SignRoot(cmd.Context(), signer, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "sign", Short: "Sign root of trust", Long: "This command allows users to add their signature to the root of trust file.", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/trust.go000066400000000000000000000045231475150141000202740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package trust import ( "github.com/gittuf/gittuf/internal/cmd/trust/addgithubapp" "github.com/gittuf/gittuf/internal/cmd/trust/addglobalrule" "github.com/gittuf/gittuf/internal/cmd/trust/addpolicykey" "github.com/gittuf/gittuf/internal/cmd/trust/addpropagationdirective" "github.com/gittuf/gittuf/internal/cmd/trust/addrootkey" "github.com/gittuf/gittuf/internal/cmd/trust/disablegithubappapprovals" "github.com/gittuf/gittuf/internal/cmd/trust/enablegithubappapprovals" i "github.com/gittuf/gittuf/internal/cmd/trust/init" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/gittuf/gittuf/internal/cmd/trust/removegithubapp" "github.com/gittuf/gittuf/internal/cmd/trust/removeglobalrule" "github.com/gittuf/gittuf/internal/cmd/trust/removepolicykey" "github.com/gittuf/gittuf/internal/cmd/trust/removepropagationdirective" "github.com/gittuf/gittuf/internal/cmd/trust/removerootkey" "github.com/gittuf/gittuf/internal/cmd/trust/setrepositorylocation" "github.com/gittuf/gittuf/internal/cmd/trust/sign" "github.com/gittuf/gittuf/internal/cmd/trust/updatepolicythreshold" "github.com/gittuf/gittuf/internal/cmd/trust/updaterootthreshold" "github.com/gittuf/gittuf/internal/cmd/trustpolicy/apply" "github.com/gittuf/gittuf/internal/cmd/trustpolicy/remote" "github.com/spf13/cobra" ) func New() *cobra.Command { o := &persistent.Options{} cmd := &cobra.Command{ Use: "trust", Short: "Tools for gittuf's root of trust", DisableAutoGenTag: true, } o.AddPersistentFlags(cmd) cmd.AddCommand(i.New(o)) cmd.AddCommand(addgithubapp.New(o)) cmd.AddCommand(addglobalrule.New(o)) cmd.AddCommand(addpolicykey.New(o)) cmd.AddCommand(addpropagationdirective.New(o)) cmd.AddCommand(addrootkey.New(o)) cmd.AddCommand(apply.New()) cmd.AddCommand(disablegithubappapprovals.New(o)) cmd.AddCommand(enablegithubappapprovals.New(o)) cmd.AddCommand(remote.New()) cmd.AddCommand(removegithubapp.New(o)) cmd.AddCommand(removeglobalrule.New(o)) cmd.AddCommand(removepolicykey.New(o)) cmd.AddCommand(removepropagationdirective.New(o)) cmd.AddCommand(removerootkey.New(o)) cmd.AddCommand(setrepositorylocation.New(o)) cmd.AddCommand(sign.New(o)) cmd.AddCommand(updatepolicythreshold.New(o)) cmd.AddCommand(updaterootthreshold.New(o)) return cmd } gittuf-0.9.0/internal/cmd/trust/updatepolicythreshold/000077500000000000000000000000001475150141000231775ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/updatepolicythreshold/updatepolicythreshold.go000066400000000000000000000024551475150141000301530ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package updatepolicythreshold import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options threshold int } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().IntVar( &o.threshold, "threshold", -1, "threshold of valid signatures required for main policy", ) cmd.MarkFlagRequired("threshold") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.UpdateTopLevelTargetsThreshold(cmd.Context(), signer, o.threshold, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "update-policy-threshold", Short: "Update Policy threshold in the gittuf root of trust", Long: "This command allows users to update the threshold of valid signatures required for the policy.", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trust/updaterootthreshold/000077500000000000000000000000001475150141000226635ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trust/updaterootthreshold/updaterootthreshold.go000066400000000000000000000024341475150141000273200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package updaterootthreshold import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/cmd/common" "github.com/gittuf/gittuf/internal/cmd/trust/persistent" "github.com/spf13/cobra" ) type options struct { p *persistent.Options threshold int } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().IntVar( &o.threshold, "threshold", -1, "threshold of valid signatures required for root", ) cmd.MarkFlagRequired("threshold") //nolint:errcheck } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } signer, err := gittuf.LoadSigner(repo, o.p.SigningKey) if err != nil { return err } return repo.UpdateRootThreshold(cmd.Context(), signer, o.threshold, true) } func New(persistent *persistent.Options) *cobra.Command { o := &options{p: persistent} cmd := &cobra.Command{ Use: "update-root-threshold", Short: "Update Root threshold in the gittuf root of trust", Long: "This command allows users to update the threshold of valid signatures required for the root of trust.", PreRunE: common.CheckForSigningKeyFlag, RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trustpolicy/000077500000000000000000000000001475150141000200005ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trustpolicy/apply/000077500000000000000000000000001475150141000211255ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trustpolicy/apply/apply.go000066400000000000000000000011731475150141000226030ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package apply import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.ApplyPolicy(cmd.Context(), true) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "apply", Short: "Validate and apply changes from policy-staging to policy", RunE: o.Run, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trustpolicy/discard/000077500000000000000000000000001475150141000214115ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trustpolicy/discard/discard.go000066400000000000000000000012411475150141000233470ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package discard import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(_ *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.DiscardPolicy() } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "discard", Short: "Discard the currently staged changes to policy", RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/trustpolicy/remote/000077500000000000000000000000001475150141000212735ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trustpolicy/remote/pull/000077500000000000000000000000001475150141000222475ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trustpolicy/remote/pull/pull.go000066400000000000000000000012131475150141000235470ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package pull import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { } func (o *options) Run(_ *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.PullPolicy(args[0]) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "pull ", Short: "Pull policy from the specified remote", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } return cmd } gittuf-0.9.0/internal/cmd/trustpolicy/remote/push/000077500000000000000000000000001475150141000222525ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/trustpolicy/remote/push/push.go000066400000000000000000000012111475150141000235530ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package push import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/spf13/cobra" ) type options struct { } func (o *options) Run(_ *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } return repo.PushPolicy(args[0]) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "push ", Short: "Push policy to the specified remote", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } return cmd } gittuf-0.9.0/internal/cmd/trustpolicy/remote/remote.go000066400000000000000000000007641475150141000231240ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package remote import ( "github.com/gittuf/gittuf/internal/cmd/trustpolicy/remote/pull" "github.com/gittuf/gittuf/internal/cmd/trustpolicy/remote/push" "github.com/spf13/cobra" ) func New() *cobra.Command { cmd := &cobra.Command{ Use: "remote", Short: "Tools for managing remote policies", DisableAutoGenTag: true, } cmd.AddCommand(pull.New()) cmd.AddCommand(push.New()) return cmd } gittuf-0.9.0/internal/cmd/verifymergeable/000077500000000000000000000000001475150141000205475ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/verifymergeable/verifymergeable.go000066400000000000000000000027771475150141000242630ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package verifymergeable import ( "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/experimental/gittuf/options/verifymergeable" "github.com/spf13/cobra" ) type options struct { baseBranch string featureBranch string bypassRSL bool } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().StringVar( &o.baseBranch, "base-branch", "", "base branch for proposed merge", ) cmd.MarkFlagRequired("base-branch") //nolint:errcheck cmd.Flags().StringVar( &o.featureBranch, "feature-branch", "", "feature branch for proposed merge", ) cmd.MarkFlagRequired("feature-branch") //nolint:errcheck cmd.Flags().BoolVar( &o.bypassRSL, "bypass-RSL", false, "bypass RSL when identifying current state of feature ref", ) } func (o *options) Run(cmd *cobra.Command, _ []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } opts := []verifymergeable.Option{} if o.bypassRSL { opts = append(opts, verifymergeable.WithBypassRSLForFeatureRef()) } _, err = repo.VerifyMergeable(cmd.Context(), o.baseBranch, o.featureBranch, opts...) return err } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "verify-mergeable", Short: "Tools for verifying mergeability using gittuf policies", Args: cobra.ExactArgs(0), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/verifyref/000077500000000000000000000000001475150141000174005ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/verifyref/verifyref.go000066400000000000000000000034031475150141000217300ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package verifyref import ( "fmt" "github.com/gittuf/gittuf/experimental/gittuf" verifyopts "github.com/gittuf/gittuf/experimental/gittuf/options/verify" "github.com/gittuf/gittuf/internal/dev" "github.com/spf13/cobra" ) type options struct { latestOnly bool fromEntry string remoteRefName string } func (o *options) AddFlags(cmd *cobra.Command) { cmd.Flags().BoolVar( &o.latestOnly, "latest-only", false, "perform verification against latest entry in the RSL", ) cmd.Flags().StringVar( &o.fromEntry, "from-entry", "", fmt.Sprintf("perform verification from specified RSL entry (developer mode only, set %s=1)", dev.DevModeKey), ) cmd.MarkFlagsMutuallyExclusive("latest-only", "from-entry") cmd.Flags().StringVar( &o.remoteRefName, "remote-ref-name", "", "name of remote reference, if it differs from the local name", ) } func (o *options) Run(cmd *cobra.Command, args []string) error { repo, err := gittuf.LoadRepository() if err != nil { return err } if o.fromEntry != "" { if !dev.InDevMode() { return dev.ErrNotInDevMode } return repo.VerifyRefFromEntry(cmd.Context(), args[0], o.fromEntry, verifyopts.WithOverrideRefName(o.remoteRefName)) } opts := []verifyopts.Option{verifyopts.WithOverrideRefName(o.remoteRefName)} if o.latestOnly { opts = append(opts, verifyopts.WithLatestOnly()) } return repo.VerifyRef(cmd.Context(), args[0], opts...) } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "verify-ref", Short: "Tools for verifying gittuf policies", Args: cobra.ExactArgs(1), RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/cmd/version/000077500000000000000000000000001475150141000170645ustar00rootroot00000000000000gittuf-0.9.0/internal/cmd/version/version.go000066400000000000000000000014711475150141000211030ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package version import ( "fmt" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/version" "github.com/spf13/cobra" ) type options struct{} func (o *options) AddFlags(_ *cobra.Command) {} func (o *options) Run(_ *cobra.Command, _ []string) error { v := version.GetVersion() if v[0] == 'v' { v = v[1:] } fmt.Printf("gittuf version %s\n", v) if dev.InDevMode() { fmt.Printf("gittuf is operating in developer mode. Override by setting %s=0.\n", dev.DevModeKey) } return nil } func New() *cobra.Command { o := &options{} cmd := &cobra.Command{ Use: "version", Short: "Version of gittuf", RunE: o.Run, DisableAutoGenTag: true, } o.AddFlags(cmd) return cmd } gittuf-0.9.0/internal/common/000077500000000000000000000000001475150141000161245ustar00rootroot00000000000000gittuf-0.9.0/internal/common/common.go000066400000000000000000000077301475150141000177520ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package common import ( "fmt" "testing" "time" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/go-git/go-git/v5/config" "github.com/jonboulle/clockwork" ) const ( testName = "Jane Doe" testEmail = "jane.doe@example.com" ) var ( TestGitConfig = &config.Config{ User: struct { Name string Email string }{ Name: testName, Email: testEmail, }, } TestClock = clockwork.NewFakeClockAt(time.Date(1995, time.October, 26, 9, 0, 0, 0, time.UTC)) ) // CreateTestRSLReferenceEntryCommit is a test helper used to create a // **signed** reference entry using the specified GPG key. It is used to // substitute for the default RSL entry creation and signing mechanism which // relies on the user's Git config. // // Update: This helper just wraps around CommitUsingSpecificKey in the rsl // package. We can probably get rid of it, but it's a pretty big delta. func CreateTestRSLReferenceEntryCommit(t *testing.T, repo *gitinterface.Repository, entry *rsl.ReferenceEntry, signingKeyBytes []byte) gitinterface.Hash { t.Helper() if err := entry.CommitUsingSpecificKey(repo, signingKeyBytes); err != nil { t.Fatal(err) } entryID, err := repo.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } return entryID } // CreateTestRSLAnnotationEntryCommit is a test helper used to create a // **signed** RSL annotation using the specified GPG key. It is used to // substitute for the default RSL annotation creation and signing mechanism // which relies on the user's Git config. // // Update: This helper just wraps around CommitUsingSpecificKey in the rsl // package. We can probably get rid of it, but it's a pretty big delta. func CreateTestRSLAnnotationEntryCommit(t *testing.T, repo *gitinterface.Repository, annotation *rsl.AnnotationEntry, signingKeyBytes []byte) gitinterface.Hash { t.Helper() if err := annotation.CommitUsingSpecificKey(repo, signingKeyBytes); err != nil { t.Fatal(err) } entryID, err := repo.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } return entryID } // AddNTestCommitsToSpecifiedRef is a test helper that adds test commits to the // specified Git ref in the provided repository. Parameter `n` determines how // many commits are added. Each commit is associated with a distinct tree. The // first commit contains a tree with one object (an empty blob), the second with // two objects (both empty blobs), and so on. Each commit is signed using the // specified key. func AddNTestCommitsToSpecifiedRef(t *testing.T, repo *gitinterface.Repository, refName string, n int, signingKeyBytes []byte) []gitinterface.Hash { t.Helper() emptyBlobHash, err := repo.WriteBlob(nil) if err != nil { t.Fatal(err) } treeBuilder := gitinterface.NewTreeBuilder(repo) // Create N trees with 1...N artifacts treeHashes := make([]gitinterface.Hash, 0, n) for i := 1; i <= n; i++ { objects := []gitinterface.TreeEntry{} for j := 0; j < i; j++ { objects = append(objects, gitinterface.NewEntryBlob(fmt.Sprintf("%d", j+1), emptyBlobHash)) } treeHash, err := treeBuilder.WriteTreeFromEntries(objects) if err != nil { t.Fatal(err) } treeHashes = append(treeHashes, treeHash) } commitIDs := []gitinterface.Hash{} for i := 0; i < n; i++ { commitID, err := repo.CommitUsingSpecificKey(treeHashes[i], refName, "Test commit\n", signingKeyBytes) if err != nil { t.Fatal(err) } commitIDs = append(commitIDs, commitID) } return commitIDs } // CreateTestSignedTag creates a signed tag in the repository pointing to the // target object. The tag is signed using the specified key. func CreateTestSignedTag(t *testing.T, repo *gitinterface.Repository, tagName string, target gitinterface.Hash, signingKeyBytes []byte) gitinterface.Hash { t.Helper() tagMessage := fmt.Sprintf("%s\n", tagName) tagID, err := repo.TagUsingSpecificKey(target, tagName, tagMessage, signingKeyBytes) if err != nil { t.Fatal(err) } return tagID } gittuf-0.9.0/internal/common/set/000077500000000000000000000000001475150141000167175ustar00rootroot00000000000000gittuf-0.9.0/internal/common/set/set.go000066400000000000000000000056061475150141000200500ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package set import ( "cmp" "encoding/json" "slices" ) // Set implements a generic set data structure for use in gittuf metadata and // workflows. type Set[T cmp.Ordered] struct { contents map[T]bool } // NewSet creates a new instance of a set for the specified type that fulfils // the cmp.Ordered constraint. func NewSet[T cmp.Ordered]() *Set[T] { return &Set[T]{contents: map[T]bool{}} } // NewSetFromItems creates a new instance of a set and populates it with the // items provided. func NewSetFromItems[T cmp.Ordered](items ...T) *Set[T] { set := NewSet[T]() for _, item := range items { set.Add(item) } return set } // MarshalJSON is used to serialize the instance of the set into JSON. func (s *Set[T]) MarshalJSON() ([]byte, error) { contents := s.Contents() slices.Sort(contents) return json.Marshal(contents) } // UnmarshalJSON is used to load a set from the JSON representation. func (s *Set[T]) UnmarshalJSON(jsonBytes []byte) error { items := []T{} if err := json.Unmarshal(jsonBytes, &items); err != nil { return err } s.contents = map[T]bool{} for _, item := range items { s.Add(item) } return nil } // Contents returns the objects present in the set. func (s *Set[T]) Contents() []T { if s.contents == nil { return nil } items := []T{} for item := range s.contents { items = append(items, item) } return items } // Add inserts an item into the set. func (s *Set[T]) Add(item T) { s.contents[item] = true } // Remove deletes the item from the set. func (s *Set[T]) Remove(item T) { delete(s.contents, item) } // Extend adds all of the items in the passed set, resulting in a union // operation. func (s *Set[T]) Extend(set *Set[T]) { if set == nil { return } for item := range set.contents { s.Add(item) } } // Has returns true if the set has the corresponding item. func (s *Set[T]) Has(item T) bool { return s.contents[item] } // Len returns the number of objects in the set. func (s *Set[T]) Len() int { return len(s.contents) } // Intersection returns a new set consisting of the items present in both sets. func (s *Set[T]) Intersection(set *Set[T]) *Set[T] { intersection := NewSet[T]() rangeOver := s other := set if set.Len() < s.Len() { rangeOver = set other = s } for item := range rangeOver.contents { if other.Has(item) { intersection.Add(item) } } return intersection } // Minus returns a new set consisting of the items present only in the current // set. func (s *Set[T]) Minus(set *Set[T]) *Set[T] { minus := NewSet[T]() for item := range s.contents { if !set.Has(item) { minus.Add(item) } } return minus } // Equal returns true if both sets have the same items. func (s *Set[T]) Equal(set *Set[T]) bool { if s.Len() != set.Len() { return false } for item := range s.contents { if !set.Has(item) { return false } } return true } gittuf-0.9.0/internal/dev/000077500000000000000000000000001475150141000154125ustar00rootroot00000000000000gittuf-0.9.0/internal/dev/dev.go000066400000000000000000000007011475150141000165150ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package dev import ( "fmt" "os" ) const DevModeKey = "GITTUF_DEV" var ErrNotInDevMode = fmt.Errorf("this feature is only available in developer mode, and can potentially UNDERMINE repository security; override by setting %s=1", DevModeKey) // InDevMode returns true if gittuf is currently in developer mode. func InDevMode() bool { return os.Getenv(DevModeKey) == "1" } gittuf-0.9.0/internal/display/000077500000000000000000000000001475150141000163015ustar00rootroot00000000000000gittuf-0.9.0/internal/display/color.go000066400000000000000000000016311475150141000177470ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package display import "fmt" type color uint func (c color) Code() string { switch c { case reset: return "\033[0m" case red: return "\033[31m" case green: return "\033[32m" case yellow: return "\033[33m" case blue: return "\033[34m" case magenta: return "\033[35m" case cyan: return "\033[36m" case gray: return "\033[37m" case white: return "\033[97m" default: return "" } } const ( reset color = iota red green yellow blue magenta cyan gray white ) type colorerFunc = func(string, color) string var colorer colorerFunc = colorerOn //nolint:revive func colorerOn(s string, c color) string { return fmt.Sprintf("%s%s%s", c.Code(), s, reset.Code()) } func colorerOff(s string, _ color) string { return s } func EnableColor() { colorer = colorerOn } func DisableColor() { colorer = colorerOff } gittuf-0.9.0/internal/display/color_test.go000066400000000000000000000020151475150141000210030ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package display import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) func TestColorer(t *testing.T) { tests := map[string]struct { c color }{ "red": {c: red}, "green": {c: green}, "yellow": {c: yellow}, "blue": {c: blue}, "magenta": {c: magenta}, "cyan": {c: cyan}, "gray": {c: gray}, "white": {c: white}, } testString := "gittuf" t.Run("colorer on", func(t *testing.T) { for name, test := range tests { coloredString := colorer(testString, test.c) assert.Equal(t, fmt.Sprintf("%s%s%s", test.c.Code(), testString, reset.Code()), coloredString, fmt.Sprintf("unexpected colored string sequence in test '%s'", name)) } }) t.Run("colorer off", func(t *testing.T) { colorer = colorerOff for name, test := range tests { coloredString := colorer(testString, test.c) assert.Equal(t, testString, coloredString, fmt.Sprintf("unexpected colored string sequence in test '%s'", name)) } }) } gittuf-0.9.0/internal/display/display.go000066400000000000000000000052431475150141000203010ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package display import ( "fmt" "io" "log/slog" "os" "os/exec" ) type pagerGetter = func() string var getPager pagerGetter = getPagerReal //nolint:revive func getPagerReal() string { var pagerPrograms = []string{ os.Getenv("PAGER"), // look at what the user has configured "less", // default on unix-like systems "more", // default on Windows } for _, bin := range pagerPrograms { if _, err := exec.LookPath(bin); err == nil { return bin } } return "" } func NewDisplayWriter(output io.Writer) io.WriteCloser { slog.Debug("Finding pager program...") pagerBin := getPager() if pagerBin != "" { slog.Debug(fmt.Sprintf("Found pager program %s", pagerBin)) cmd := exec.Command(pagerBin) cmd.Stdout = output cmd.Stderr = os.Stderr return &pager{command: cmd} } slog.Debug("Pager program not found, writing to output directly...") switch output := output.(type) { // adityasaky: os.Stdout is an io.WriteCloser and we hardcode that as our // output medium. So, do we even need this check and noopwritecloser? // Possibly not, but I suggest we keep it until we can sufficiently evaluate // across multiple environments. noopwritecloser is handy for test writers // as well, so. case io.WriteCloser: return output default: return &noopwritecloser{ writer: output, } } } // pager implements the io.WriteCloser while supporting writing buffered // contents displayed using a pager program like less or more. type pager struct { command *exec.Cmd stdInWriter io.WriteCloser started bool } func (p *pager) Write(contents []byte) (int, error) { if !p.started { // Load the page program's stdin pipe so we can feed it content to // display stdInWriter, err := p.command.StdinPipe() if err != nil { return -1, err } p.stdInWriter = stdInWriter // Start the page cmd if err := p.command.Start(); err != nil { return -1, err } p.started = true } return p.stdInWriter.Write(contents) } func (p *pager) Close() error { // Close the stdin pipe first as the cmd will wait indefinitely otherwise if p.stdInWriter != nil { if err := p.stdInWriter.Close(); err != nil { return err } } if p.started { if err := p.command.Wait(); err != nil { return err } } return nil } // noopwritecloser is a fallback to convert an io.Writer into io.WriteCloser. It // adds a Close method which does nothing (i.e., it's a noop). type noopwritecloser struct { writer io.Writer } func (n *noopwritecloser) Write(contents []byte) (int, error) { return n.writer.Write(contents) } func (n *noopwritecloser) Close() error { return nil } gittuf-0.9.0/internal/display/display_test.go000066400000000000000000000023231475150141000213340ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package display import ( "bytes" "fmt" "runtime" "strings" "testing" "github.com/stretchr/testify/assert" ) func getPagerTestCat() string { return "cat" } func getPagerTestNone() string { return "" } func TestNewDisplayWriter(t *testing.T) { tests := map[string]struct { contents []byte page bool }{ "without paging": { contents: []byte("Hello, world!"), page: false, }, "with paging": { contents: []byte("Hello, world!"), page: true, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { if test.page { getPager = getPagerTestCat } else { getPager = getPagerTestNone } output := &bytes.Buffer{} writer := NewDisplayWriter(output) _, err := writer.Write(test.contents) if err != nil { t.Fatal(err) } if err := writer.Close(); err != nil { t.Fatal(err) } gotOutput := output.String() if runtime.GOOS == "windows" { gotOutput = strings.TrimSpace(gotOutput) } assert.Equal(t, string(test.contents), gotOutput, fmt.Sprintf("unexpected result in test '%s', got '%s', want '%s'", name, gotOutput, string(test.contents))) }) } } gittuf-0.9.0/internal/display/rsl.go000066400000000000000000000121371475150141000174340ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package display import ( "errors" "fmt" "io" "log/slog" "strings" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" ) // RSLLog implements the display function for `gittuf rsl log`. func RSLLog(repo *gitinterface.Repository, writer io.WriteCloser) error { defer writer.Close() //nolint:errcheck annotationsMap := make(map[string][]*rsl.AnnotationEntry) iteratorEntry, err := rsl.GetLatestEntry(repo) if err != nil { return err } for { hasParent := true // assume an entry has a parent parentEntry, err := rsl.GetParentForEntry(repo, iteratorEntry) if err != nil { if !errors.Is(err, rsl.ErrRSLEntryNotFound) { return err } // only reachable when err is ErrRSLEntryNotFound // Now we know the iteratorEntry does not have a parent hasParent = false } switch iteratorEntry := iteratorEntry.(type) { case *rsl.ReferenceEntry: slog.Debug(fmt.Sprintf("Writing reference entry '%s'...", iteratorEntry.ID.String())) if err := writeRSLReferenceEntry(writer, iteratorEntry, annotationsMap[iteratorEntry.ID.String()], hasParent); err != nil { // We return nil here to avoid noisy output when the writer is // unexpectedly closed, such as by killing the pager return nil } case *rsl.AnnotationEntry: slog.Debug(fmt.Sprintf("Tracking annotation entry '%s'...", iteratorEntry.ID.String())) for _, targetID := range iteratorEntry.RSLEntryIDs { targetIDString := targetID.String() if _, has := annotationsMap[targetIDString]; !has { annotationsMap[targetIDString] = []*rsl.AnnotationEntry{} } annotationsMap[targetIDString] = append(annotationsMap[targetIDString], iteratorEntry) } case *rsl.PropagationEntry: slog.Debug(fmt.Sprintf("Writing propagation entry '%s'...", iteratorEntry.ID.String())) if err := writeRSLPropagationEntry(writer, iteratorEntry, hasParent); err != nil { // We return nil here to avoid noisy output when // the writer is unexpectedly closed, such as by // killing the pager return nil } } if !hasParent { // We're done return nil } iteratorEntry = parentEntry } } // writeRSLReferenceEntry prepares the output for the given entry and its // annotations. It then writes the output to the provided writer. If hasParent // is false, then the prepared output for the entry has a single trailing // newline. Otherwise, an additional newline is added to separate entries from // one another. func writeRSLReferenceEntry(writer io.WriteCloser, entry *rsl.ReferenceEntry, annotations []*rsl.AnnotationEntry, hasParent bool) error { /* Output format: entry (skipped) Ref: Target: Number: Annotation ID: Skip: Number: Message: Annotation ID: Skip: Number: Message: */ text := colorer(fmt.Sprintf("entry %s", entry.ID.String()), yellow) for _, annotation := range annotations { if annotation.Skip { text += fmt.Sprintf(" %s", colorer("(skipped)", red)) break } } text += "\n" text += fmt.Sprintf("\n Ref: %s", entry.RefName) text += fmt.Sprintf("\n Target: %s", entry.TargetID.String()) if entry.Number != 0 { text += fmt.Sprintf("\n Number: %d", entry.Number) } for _, annotation := range annotations { text += "\n\n" text += colorer(fmt.Sprintf(" Annotation ID: %s", annotation.ID.String()), green) text += "\n" if annotation.Skip { text += colorer(" Skip: yes", red) } else { text += " Skip: no" } if annotation.Number != 0 { text += fmt.Sprintf("\n Number: %d", annotation.Number) } text += fmt.Sprintf("\n Message:\n %s", strings.TrimSpace(annotation.Message)) } text += "\n" // single trailing newline by default if hasParent { text += "\n" // extra newline for all intermediate (i.e., not last) entries } _, err := writer.Write([]byte(text)) return err } func writeRSLPropagationEntry(writer io.WriteCloser, entry *rsl.PropagationEntry, hasParent bool) error { /* Output format: propagation entry Ref: Target: UpstreamRepo: UpstreamEntry: Number: */ text := colorer(fmt.Sprintf("propagation entry %s", entry.ID.String()), yellow) text += "\n" text += fmt.Sprintf("\n Ref: %s", entry.RefName) text += fmt.Sprintf("\n Target: %s", entry.TargetID.String()) text += fmt.Sprintf("\n UpstreamRepo: %s", entry.UpstreamRepository) text += fmt.Sprintf("\n UpstreamEntry: %s", entry.UpstreamEntryID.String()) if entry.Number != 0 { text += fmt.Sprintf("\n Number: %d", entry.Number) } text += "\n" // single trailing newline by default if hasParent { text += "\n" // extra newline for all intermediate (i.e., not last) entries } _, err := writer.Write([]byte(text)) return err } gittuf-0.9.0/internal/display/rsl_test.go000066400000000000000000000361611475150141000204760ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package display import ( "bytes" "fmt" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/stretchr/testify/assert" ) func TestRSLLog(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) // add first entry if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } // skip annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, true, "msg").Commit(repo, false); err != nil { t.Fatal(err) } // add another entry if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // add another entry if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err = rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } // skip annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, true, "msg").Commit(repo, false); err != nil { t.Fatal(err) } // non-skip annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "msg").Commit(repo, false); err != nil { t.Fatal(err) } expectedOutput := `entry 2d21a6b9fb1f3e432e0776eac63acdc23a57b538 (skipped) Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 4 Annotation ID: 630618d8f80714658fb6d88bc352f92189d1d443 Skip: no Number: 6 Message: msg Annotation ID: 15f60db9f339375f709dae8d04e0055ea50ed2b9 Skip: yes Number: 5 Message: msg entry ba2a366ccd85b3a4a636641c3604ce2d1496c08c Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 3 entry ae4467eaa656782fe9d04eaabfa30db47e9ea24b (skipped) Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: f79156492abec45bb2e1dbc518999a83b31a069c Skip: yes Number: 2 Message: msg ` output := &bytes.Buffer{} writer := &noopwritecloser{writer: output} err = RSLLog(repo, writer) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) } func TestWriteRSLReferenceEntry(t *testing.T) { // Set colorer to off for tests colorer = colorerOff t.Run("simple without number, no parent", func(t *testing.T) { entry := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash) entry.ID = gitinterface.ZeroHash expectedOutput := `entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLReferenceEntry(testWriter, entry, nil, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("simple without number, has parent", func(t *testing.T) { entry := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash) entry.ID = gitinterface.ZeroHash expectedOutput := `entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLReferenceEntry(testWriter, entry, nil, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("simple with number, no parent", func(t *testing.T) { entry := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash) entry.ID = gitinterface.ZeroHash entry.Number = 1 expectedOutput := `entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLReferenceEntry(testWriter, entry, nil, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("simple with number, has parent", func(t *testing.T) { entry := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash) entry.ID = gitinterface.ZeroHash entry.Number = 1 expectedOutput := `entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLReferenceEntry(testWriter, entry, nil, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("with skip annotation, no parent", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, true) if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, true, "msg").Commit(repo, false); err != nil { t.Fatal(err) } annotationEntryT, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } annotationEntry := annotationEntryT.(*rsl.AnnotationEntry) expectedOutput := fmt.Sprintf(`entry %s (skipped) Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: %s Skip: yes Number: 2 Message: msg `, entry.GetID().String(), annotationEntry.GetID().String()) output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err = writeRSLReferenceEntry(testWriter, entry.(*rsl.ReferenceEntry), []*rsl.AnnotationEntry{annotationEntry}, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("with skip annotation, has parent", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, true) if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, true, "msg").Commit(repo, false); err != nil { t.Fatal(err) } annotationEntryT, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } annotationEntry := annotationEntryT.(*rsl.AnnotationEntry) expectedOutput := fmt.Sprintf(`entry %s (skipped) Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: %s Skip: yes Number: 2 Message: msg `, entry.GetID().String(), annotationEntry.GetID().String()) output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err = writeRSLReferenceEntry(testWriter, entry.(*rsl.ReferenceEntry), []*rsl.AnnotationEntry{annotationEntry}, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("with non-skip annotation, no parent", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, true) if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "msg").Commit(repo, false); err != nil { t.Fatal(err) } annotationEntryT, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } annotationEntry := annotationEntryT.(*rsl.AnnotationEntry) expectedOutput := fmt.Sprintf(`entry %s Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: %s Skip: no Number: 2 Message: msg `, entry.GetID().String(), annotationEntry.GetID().String()) output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err = writeRSLReferenceEntry(testWriter, entry.(*rsl.ReferenceEntry), []*rsl.AnnotationEntry{annotationEntry}, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("with non-skip annotation, has parent", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, true) if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "msg").Commit(repo, false); err != nil { t.Fatal(err) } annotationEntryT, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } annotationEntry := annotationEntryT.(*rsl.AnnotationEntry) expectedOutput := fmt.Sprintf(`entry %s Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: %s Skip: no Number: 2 Message: msg `, entry.GetID().String(), annotationEntry.GetID().String()) output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err = writeRSLReferenceEntry(testWriter, entry.(*rsl.ReferenceEntry), []*rsl.AnnotationEntry{annotationEntry}, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("with non-skip annotation, no parent, annotation message has trailing newline", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, true) if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "msg\n").Commit(repo, false); err != nil { t.Fatal(err) } annotationEntryT, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } annotationEntry := annotationEntryT.(*rsl.AnnotationEntry) expectedOutput := fmt.Sprintf(`entry %s Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: %s Skip: no Number: 2 Message: msg `, entry.GetID().String(), annotationEntry.GetID().String()) output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err = writeRSLReferenceEntry(testWriter, entry.(*rsl.ReferenceEntry), []*rsl.AnnotationEntry{annotationEntry}, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("with non-skip annotation, has parent, annotation message has trailing newline", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, true) if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo) if err != nil { t.Fatal(err) } if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "msg\n").Commit(repo, false); err != nil { t.Fatal(err) } annotationEntryT, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } annotationEntry := annotationEntryT.(*rsl.AnnotationEntry) expectedOutput := fmt.Sprintf(`entry %s Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 Number: 1 Annotation ID: %s Skip: no Number: 2 Message: msg `, entry.GetID().String(), annotationEntry.GetID().String()) output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err = writeRSLReferenceEntry(testWriter, entry.(*rsl.ReferenceEntry), []*rsl.AnnotationEntry{annotationEntry}, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) } func TestWriteRSLPropagationEntry(t *testing.T) { // Set colorer to off for tests colorer = colorerOff t.Run("simple, without number, without parent", func(t *testing.T) { entry := rsl.NewPropagationEntry("refs/heads/main", gitinterface.ZeroHash, "https://git.example.com/repository", gitinterface.ZeroHash) entry.ID = gitinterface.ZeroHash expectedOutput := `propagation entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 UpstreamRepo: https://git.example.com/repository UpstreamEntry: 0000000000000000000000000000000000000000 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLPropagationEntry(testWriter, entry, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("simple, with number, without parent", func(t *testing.T) { entry := rsl.NewPropagationEntry("refs/heads/main", gitinterface.ZeroHash, "https://git.example.com/repository", gitinterface.ZeroHash) entry.Number = 1 entry.ID = gitinterface.ZeroHash expectedOutput := `propagation entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 UpstreamRepo: https://git.example.com/repository UpstreamEntry: 0000000000000000000000000000000000000000 Number: 1 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLPropagationEntry(testWriter, entry, false) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("simple, without number, with parent", func(t *testing.T) { entry := rsl.NewPropagationEntry("refs/heads/main", gitinterface.ZeroHash, "https://git.example.com/repository", gitinterface.ZeroHash) entry.ID = gitinterface.ZeroHash expectedOutput := `propagation entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 UpstreamRepo: https://git.example.com/repository UpstreamEntry: 0000000000000000000000000000000000000000 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLPropagationEntry(testWriter, entry, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) t.Run("simple, with number, with parent", func(t *testing.T) { entry := rsl.NewPropagationEntry("refs/heads/main", gitinterface.ZeroHash, "https://git.example.com/repository", gitinterface.ZeroHash) entry.Number = 1 entry.ID = gitinterface.ZeroHash expectedOutput := `propagation entry 0000000000000000000000000000000000000000 Ref: refs/heads/main Target: 0000000000000000000000000000000000000000 UpstreamRepo: https://git.example.com/repository UpstreamEntry: 0000000000000000000000000000000000000000 Number: 1 ` output := &bytes.Buffer{} testWriter := &noopwritecloser{writer: output} err := writeRSLPropagationEntry(testWriter, entry, true) assert.Nil(t, err) assert.Equal(t, expectedOutput, output.String()) }) } gittuf-0.9.0/internal/git-remote-gittuf/000077500000000000000000000000001475150141000202105ustar00rootroot00000000000000gittuf-0.9.0/internal/git-remote-gittuf/README.md000066400000000000000000000042011475150141000214640ustar00rootroot00000000000000# git-remote-gittuf Alongside the `gittuf` binary, gittuf ships with a custom remote transfer protocol binary, implementing Git's [remote-helper interface](https://git-scm.com/docs/gitremote-helpers). We call this the **transport** binary, named `git-remote-gittuf`. It's an easy way to get started with using gittuf on your repository, as it takes care of the following common operations for you: - Creating RSL entries upon pushing your changes - Fetching gittuf metadata when pulling changes > [!NOTE] The transport does not perform the steps needed to *initialize* a > gittuf repository (i.e. setting up root of trust, policy, etc.). These steps > must be done manually for new repositories (see the [getting started > guide](/docs/get-started.md)). The gittuf transport supports both HTTPS and SSH remotes. ## How to Install This repository provides pre-built binaries for the transport that are signed and published using [GoReleaser]. The signature for these binaries are generated using [Sigstore], using the release workflow's identity. Refer to the instructions in the [get started guide] to verify the signature for the transport binary. Alternatively, the transport can be built from source. Running `go install` will compile the transport and place it in your `GOBIN`. ## How to Use Once it's installed, using the custom transport is simple; you'll need to add the `gittuf::` prefix to the repository URL. How to do this depends on the repository you'd like to use it for. ### Using with a fresh `git clone` When running `git clone`, add `gittuf::` to the beginning of the URL of the repository. For example, - `gittuf::git@github.com:gittuf/gittuf`, if you're using SSH - `gittuf::https://github.com/gittuf/gittuf`, if you're using HTTPS ### Using with an existing repository In this case you'll need to set the remote for your repository (most likely `origin`): ```bash # For SSH git remote set-url origin gittuf::git@github.com:gittuf/gittuf # For HTTPS git remote set-url origin gittuf::https://github.com/gittuf/gittuf ``` [Sigstore]: https://www.sigstore.dev/ [GoReleaser]: https://goreleaser.com/ [get started guide]: /docs/get-started.md gittuf-0.9.0/internal/git-remote-gittuf/curl.go000066400000000000000000000410511475150141000215050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package main import ( "bufio" "bytes" "context" "errors" "fmt" "os" "os/exec" "strings" "github.com/gittuf/gittuf/experimental/gittuf" rslopts "github.com/gittuf/gittuf/experimental/gittuf/options/rsl" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/rsl" ) var ErrFailedAuthentication = errors.New("failed getting remote refs") // handleCurl implements the helper for remotes configured to use the curl // backend. For this transport, we invoke git-remote-http, only interjecting at // specific points to make gittuf specific additions. func handleCurl(ctx context.Context, repo *gittuf.Repository, remoteName, url string) (map[string]string, bool, error) { // Scan git-remote-gittuf stdin for commands from the parent process stdInScanner := &logScanner{name: "git-remote-gittuf stdin", scanner: bufio.NewScanner(os.Stdin)} stdInScanner.Split(splitInput) stdOutWriter := &logWriteCloser{name: "git-remote-gittuf stdout", writeCloser: os.Stdout} // We invoke git-remote-http, itself a Git remote helper helper := exec.Command("git-remote-http", remoteName, url) helper.Stderr = os.Stderr // We want to inspect the helper's stdout for the gittuf ref statuses helperStdOutPipe, err := helper.StdoutPipe() if err != nil { return nil, false, err } helperStdOut := &logReadCloser{name: "git-remote-http stdout", readCloser: helperStdOutPipe} // We want to interpose with the helper's stdin to push and fetch gittuf // specific objects and refs helperStdInPipe, err := helper.StdinPipe() if err != nil { return nil, false, err } helperStdIn := &logWriteCloser{name: "git-remote-http stdin", writeCloser: helperStdInPipe} if err := helper.Start(); err != nil { return nil, false, err } var ( gittufRefsTips = map[string]string{} isPush bool ) for stdInScanner.Scan() { input := stdInScanner.Bytes() switch { case bytes.HasPrefix(input, []byte("stateless-connect")): /* stateless-connect is the new experimental way of communicating with the remote. It implements Git Protocol v2. Here, we don't do much other than recognizing that we're in a fetch, as this protocol doesn't support pushes yet. */ log("cmd: stateless-connect") // Write to git-remote-http if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // Receive the initial info sent by the service via // git-remote-http seenFlush := false for { // We wrap this in an extra loop because for // some reason, on Windows, git-remote-http // responds with a buffer that just contains // `\n` followed by the actual response. // However, the initial buffer is taken to be // the end of output, meaning we miss the actual // end of output. helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitOutput) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } // If nothing is returned, the user has likely failed to // authenticate with the remote if len(output) == 0 { return nil, false, ErrFailedAuthentication } // flushPkt is used to indicate the end of // output if bytes.Equal(output, flushPkt) { seenFlush = true break } } if seenFlush { break } } // Read in command from parent process -> this should be // command=ls-refs with protocol v2 // ls-refs is a command to upload-pack. Like list and list // for-push, it enumerates the refs and their states on the remote. // Unlike those commands, this must be passed to upload-pack. // Further, ls-refs must be parametrized with ref-prefixes. We add // refs/gittuf/ as a prefix to learn about the gittuf refs on the // remote during fetches. for stdInScanner.Scan() { input = stdInScanner.Bytes() // Add ref-prefix refs/gittuf/ to the ls-refs command before // flush if bytes.Equal(input, flushPkt) { log("adding ref-prefix for refs/gittuf/") gittufRefPrefixCommand := fmt.Sprintf("ref-prefix %s\n", gittufRefPrefix) if _, err := helperStdIn.Write(packetEncode(gittufRefPrefixCommand)); err != nil { return nil, false, err } } if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // flushPkt is used to indicate the end of input if bytes.Equal(input, flushPkt) { break } } // Read advertised refs from the remote helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() if !bytes.Equal(output, flushPkt) && !bytes.Equal(output, endOfReadPkt) { refAd := string(output) refAd = refAd[4:] // remove pkt length prefix refAd = strings.TrimSpace(refAd) // If the gittuf ref is the very first, then there will be // additional information in the output after a null byte. // However, this is unlikely as HEAD is typically the first. if i := strings.IndexByte(refAd, '\x00'); i > 0 { refAd = refAd[:i] // drop everything from null byte onwards } refAdSplit := strings.Split(refAd, " ") if strings.HasPrefix(refAdSplit[1], gittufRefPrefix) { gittufRefsTips[refAdSplit[1]] = refAdSplit[0] } } // Write output to parent process if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } // endOfReadPkt indicates end of response // in stateless connections if bytes.Equal(output, endOfReadPkt) { break } } // At this point, we enter the haves / wants negotiation, which is // followed usually by the remote sending a packfile with the // requested Git objects. // We add the gittuf specific objects as wants. We don't have to // specify haves as Git automatically specifies all the objects it // has regardless of what refs they're reachable via. // Read in command from parent process -> this should be // command=fetch with protocol v2 var ( wroteGittufWantsAndHaves = false // track this in case there are multiple rounds of negotiation wroteWants = false allWants = set.NewSet[string]() allHaves = set.NewSet[string]() ) for stdInScanner.Scan() { input = stdInScanner.Bytes() switch { case bytes.Equal(input, flushPkt), bytes.Contains(input, []byte("done")): if !wroteGittufWantsAndHaves { // We only write gittuf specific haves and wants when we // haven't already written them. We track this because // in multiple rounds of negotiations, we only want to // write them the first time. log("adding gittuf wants") wants, haves, err := getGittufWantsAndHaves(repo, gittufRefsTips) if err != nil { wants = gittufRefsTips } for _, tip := range wants { if !allWants.Has(tip) { // indicate we // want the // gittuf obj wantCmd := fmt.Sprintf("want %s\n", tip) if _, err := helperStdIn.Write(packetEncode(wantCmd)); err != nil { return nil, false, err } } } for _, tip := range haves { if !allHaves.Has(tip) { // indicate we // have the // gittuf obj haveCmd := fmt.Sprintf("have %s\n", tip) if _, err := helperStdIn.Write(packetEncode(haveCmd)); err != nil { return nil, false, err } } } wroteGittufWantsAndHaves = true } if bytes.Equal(input, flushPkt) { // On a clone, we see `done` and // then flush. We need to write // our wants before done, but // wroteWants can't be set to // true until the next buffer // with flush is written to the // remote. wroteWants = true } case bytes.Contains(input, []byte("want")): idx := bytes.Index(input, []byte("want ")) sha := string(bytes.TrimSpace(input[idx+len("want "):])) allWants.Add(sha) for ref, tip := range gittufRefsTips { if tip == sha { // Take out this ref as // something for us to // update or add wants // for log("taking out", ref, "as it matches", sha) delete(gittufRefsTips, ref) } } case bytes.Contains(input, []byte("have")): idx := bytes.Index(input, []byte("have ")) sha := string(bytes.TrimSpace(input[idx+len("have "):])) allHaves.Add(sha) } if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // Read from remote if wants are done // We may need to scan multiple times for inputs, which is // why this flag is used if wroteWants { helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) // TODO: check multiplexed output for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() // Send along to parent process if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } if bytes.Equal(output, endOfReadPkt) { // Two things are possible // a) The communication is done // b) The remote indicates another round of // negotiation is required // Instead of parsing the output to find out, // we let the parent process tell us // If the parent process has further input, more // negotiation is needed if !stdInScanner.Scan() { break } input = stdInScanner.Bytes() if len(input) == 0 { break } for ref, tip := range gittufRefsTips { wantCmd := fmt.Sprintf("want %s", tip) if bytes.Contains(input, []byte(wantCmd)) { // Take out this ref as // something for us to // update or add wants // for delete(gittufRefsTips, ref) } } // Having scanned already, we must write prior // to letting the scan continue in the outer // loop // This assumes the very first input isn't just // flush again... if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } wroteWants = false break } } } } case bytes.HasPrefix(input, []byte("list for-push")): /* The helper has two commands, in reality: list, list for-push. Both of these are used to list the states of refs on the remote. The for-push variation just formats it in a way that can be used for the push comamnd later. We inspect this to learn we're in a push. We also use the output of this command, implemented by git-remote-https, to learn what the states of the gittuf refs are on the remote. */ log("cmd: list for-push") // Write it to git-remote-http if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // Read remote refs helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitOutput) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() // If nothing is returned, the user has likely failed to // authenticate with the remote if len(output) == 0 { return nil, false, ErrFailedAuthentication } refAdSplit := strings.Split(strings.TrimSpace(string(output)), " ") if len(refAdSplit) >= 2 { // Inspect each one to see if it's a gittuf ref if strings.HasPrefix(refAdSplit[1], gittufRefPrefix) { gittufRefsTips[refAdSplit[1]] = refAdSplit[0] } } // Pass remote ref status to parent process if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } // flushPkt indicates end of message if bytes.Equal(output, flushPkt) { break } } case bytes.HasPrefix(input, []byte("push")): // multiline input log("cmd: push") isPush = true pushCommands := [][]byte{} for { if bytes.Equal(input, []byte("\n")) { break } pushCommands = append(pushCommands, input) if !stdInScanner.Scan() { break } input = stdInScanner.Bytes() } if len(gittufRefsTips) != 0 { if err := repo.ReconcileLocalRSLWithRemote(ctx, remoteName, true); err != nil { return nil, false, err } } // dstRefs tracks the explicitly pushed refs so we know // to pass the response from the server for those refs // back to Git dstRefs := set.NewSet[string]() for _, pushCommand := range pushCommands { // TODO: maybe find another way to determine // whether repo is gittuf enabled // The remote may not have gittuf refs but the // local may, meaning this won't get synced if len(gittufRefsTips) != 0 { pushCommandString := string(pushCommand) pushCommandString = strings.TrimSpace(pushCommandString) refSpec := strings.TrimPrefix(pushCommandString, "push ") refSpecSplit := strings.Split(refSpec, ":") srcRef := refSpecSplit[0] srcRef = strings.TrimPrefix(srcRef, "+") // force push // TODO: during a force push, we want to also revoke prior // pushes dstRef := refSpecSplit[1] dstRefs.Add(dstRef) if !strings.HasPrefix(dstRef, gittufRefPrefix) { // Create RSL entries for the ref as long as it's not a // gittuf ref // A gittuf ref can pop up here when it's explicitly // pushed by the user // TODO: skipping propagation; invoke it once total instead of per ref if err := repo.RecordRSLEntryForReference(ctx, srcRef, true, rslopts.WithOverrideRefName(dstRef), rslopts.WithSkipCheckForDuplicateEntry(), rslopts.WithSkipPropagation()); err != nil { return nil, false, err } } } // Write push command to helper if _, err := helperStdIn.Write(pushCommand); err != nil { return nil, false, err } } if len(gittufRefsTips) != 0 && !dstRefs.Has(rsl.Ref) { // Push RSL if it hasn't been explicitly pushed pushCommand := fmt.Sprintf("push %s:%s\n", rsl.Ref, rsl.Ref) if _, err := helperStdIn.Write([]byte(pushCommand)); err != nil { return nil, false, err } } // Indicate end of push statements if _, err := helperStdIn.Write([]byte("\n")); err != nil { return nil, false, err } seenTrailingNewLine := false for { // We wrap this in an extra loop because for // some reason, on Windows, the trailing newline // indicating end of output is sent in a // separate buffer that's otherwise missed. If // we miss that newline, we hang as though the // push isn't complete. helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitOutput) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() outputSplit := bytes.Split(output, []byte(" ")) // outputSplit has either two items or // three items. It has two when the // response is `ok` and potentially // three when the response is `error`. // Either way, the second item is the // ref in question that we want to // bubble back to our caller. if len(outputSplit) < 2 { // This should never happen but // if it does, just send it back // to the caller if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } } else { if dstRefs.Has(strings.TrimSpace(string(outputSplit[1]))) { // this was explicitly // pushed by the user if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } } } if bytes.Equal(output, []byte("\n")) { seenTrailingNewLine = true break } } if seenTrailingNewLine { break } } default: // Pass through other commands we don't want to interpose to the // curl helper if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // Receive the initial info sent by the service helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitOutput) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } // Check for end of message if bytes.Equal(output, flushPkt) { break } } } } if err := helperStdIn.Close(); err != nil { return nil, false, err } if err := helperStdOut.Close(); err != nil { return nil, false, err } if err := helper.Wait(); err != nil { return nil, false, err } return gittufRefsTips, isPush, nil } gittuf-0.9.0/internal/git-remote-gittuf/helpers.go000066400000000000000000000073401475150141000222050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package main import ( "bufio" "bytes" "fmt" "io" "os" "os/exec" "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/common/set" ) type logWriteCloser struct { name string writeCloser io.WriteCloser } func (l *logWriteCloser) Write(p []byte) (int, error) { prefix := fmt.Sprintf("writing to %s", l.name) trimmed := bytes.TrimSpace(p) if len(trimmed) != len(p) { prefix += " (space trimmed)" } prefix += ":" log(prefix, string(trimmed)) return l.writeCloser.Write(p) } func (l *logWriteCloser) Close() error { return l.writeCloser.Close() } type logReadCloser struct { name string readCloser io.ReadCloser } func (l *logReadCloser) Read(p []byte) (int, error) { n, err := l.readCloser.Read(p) prefix := fmt.Sprintf("reading from %s", l.name) trimmed := bytes.TrimSpace(p) if len(trimmed) != len(p) { prefix += " (space trimmed)" } prefix += ":" log(prefix, string(trimmed)) return n, err } func (l *logReadCloser) Close() error { return l.readCloser.Close() } type logScanner struct { name string scanner *bufio.Scanner } func (l *logScanner) Buffer(buf []byte, maxN int) { l.scanner.Buffer(buf, maxN) } func (l *logScanner) Bytes() []byte { b := l.scanner.Bytes() prefix := fmt.Sprintf("scanner %s returned", l.name) trimmed := bytes.TrimSpace(b) if len(trimmed) != len(b) { prefix += " (space trimmed)" } prefix += ":" log(prefix, b, string(trimmed)) return b } func (l *logScanner) Err() error { return l.scanner.Err() } func (l *logScanner) Scan() bool { return l.scanner.Scan() } func (l *logScanner) Split(split bufio.SplitFunc) { l.scanner.Split(split) } func (l *logScanner) Text() string { t := l.scanner.Text() prefix := fmt.Sprintf("scanner %s returned", l.name) trimmed := strings.TrimSpace(t) if len(trimmed) != len(t) { prefix += " (space trimmed)" } prefix += ":" log(prefix, trimmed) return t } func log(messages ...any) { if len(messages) == 0 { return } fmtStr := "%v" for i := 1; i < len(messages); i++ { fmtStr += " %v" } fmtStr += "\n" if logFile != nil { fmt.Fprintf(logFile, fmtStr, messages...) } } func packetEncode(str string) []byte { return []byte(fmt.Sprintf("%04x%s", 4+len(str), str)) } func getGittufWantsAndHaves(repo *gittuf.Repository, remoteTips map[string]string) (map[string]string, []string, error) { wants := map[string]string{} currentTips := set.NewSet[string]() for remoteRef, tip := range remoteTips { currentTip, err := repo.GetGitRepository().GetReference(remoteRef) if err != nil { return nil, nil, err } if currentTip.String() != tip { wants[remoteRef] = tip } currentTips.Add(currentTip.String()) } return wants, currentTips.Contents(), nil } func getSSHCommand(repo *gittuf.Repository) ([]string, error) { sshCmd := os.Getenv("GIT_SSH_COMMAND") if len(sshCmd) != 0 { return strings.Split(sshCmd, " "), nil } sshCmd = os.Getenv("GIT_SSH") if len(sshCmd) != 0 { return []string{sshCmd}, nil } config, err := repo.GetGitRepository().GetGitConfig() if err != nil { return nil, err } sshCmd, defined := config["core.sshcommand"] if defined { return strings.Split(sshCmd, " "), nil } return []string{"ssh"}, nil } func testSSH(sshCmd []string, host string) error { command := append(sshCmd, "-T", host) //nolint:gocritic cmd := exec.Command(command[0], command[1:]...) //nolint:gosec if output, err := cmd.CombinedOutput(); err != nil { if cmd.ProcessState.ExitCode() == 255 { // with GitHub, we see exit code 1 while with GitLab and BitBucket, // we see exit code 0 return fmt.Errorf("%s: %s", err.Error(), bytes.TrimSpace(output)) } } return nil } gittuf-0.9.0/internal/git-remote-gittuf/main.go000066400000000000000000000164661475150141000215000ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package main import ( "context" "fmt" "io" "os" "os/exec" "path/filepath" "strings" "github.com/gittuf/gittuf/experimental/gittuf" "github.com/gittuf/gittuf/internal/gitinterface" ) /* Git supports the idea of "remote helpers" which can be used to modify interactions with remote repositories. https://git-scm.com/docs/gitremote-helpers Interactions with a remote take two forms: either we're fetching objects (git fetch, pull, clone) or we're sending objects (git push). Git has a custom protocol dictating the interactions for fetches and pushes and how data is communicated using "packfiles". Note: this protocol is now versioned. v0 and v1 are effectively identical except that v1 has an explicit version declaration while v0 has nothing. https://git-scm.com/docs/pack-protocol On the other hand, v2 is a significant departure, and is documented separately. Currently, v2 only supports fetches, with pushes using v0/v1. https://git-scm.com/docs/protocol-v2 In both cases, there's an underlying communication protocol. Overwhelmingly, this is ssh in the real world, to the point that it is part of the Git implementation itself. In contrast, the HTTP(s) / FTP(s) protocols are implemented as a remote helper program using curl. Setup: * git-remote-gittuf must be in PATH. * A remote must be configured to use `gittuf::` as the prefix. The result of the remote URL must indicate the underlying transport mechanism. Example: `gittuf::https://github.com/gittuf/gittuf` and `gittuf::git@github.com:gittuf/gittuf` Invocation: During an interaction with a remote configured using the gittuf:: prefix, Git invokes the gittuf helper from the PATH. Initial interaction: Git learns what capabilities the remote helper implements, and chooses the appropriate one for the task at hand. git-remote-gittuf is not consistent in the advertised capabilities. This is because when the underlying transport is HTTP(s) / FTP(s), we just invoke git-remote-http and relay its capabilities back to Git. When the underlying transport is SSH, we advertise a lightweight set of capabilities to ensure Git chooses the one we want it to for both cases. Anatomy of a fetch: * Git invokes stateless-connect (protocol v2) to communicate with git-upload-pack on the remote * Git invokes ls-refs, a command of git-upload-pack, to learn what refs the remote has and what their tips point to Note: we interpose this to learn the status of gittuf refs on the remote * Git negotiates with git-upload-pack the objects it wants based on the refs that must be fetched Note: we interpose this to request gittuf specific objects as well * The remote sends a packfile with the requested objects * git-remote-gittuf uses update-ref to set the local gittuf refs, as Git will not do this for us Anatomy of a push: * Git invokes list for-push (protocol v0/v1) to list the refs available on the remote * Git invokes push (protocol v0/v1) to indicate what refs must be updated to on the remote * A packfile is created and streamed to git-receive-pack on the server */ var ( // logFile is used to debug git-remote-gittuf. It is set using // GITTUF_LOG_FILE. logFile io.Writer // gitVersion contains the version of Git used by the client invoking // git-remote-gittuf. It is used to self-identify with a remote service such // as git-upload-pack and git-receive-pack. gitVersion string flushPkt = []byte{'0', '0', '0', '0'} delimiterPkt = []byte{'0', '0', '0', '1'} endOfReadPkt = []byte{'0', '0', '0', '2'} ) const ( gitUploadPack = "git-upload-pack" gitReceivePack = "git-receive-pack" gittufRefPrefix = "refs/gittuf/" ) func run(ctx context.Context) error { if len(os.Args) < 3 { return fmt.Errorf("usage: %s ", os.Args[0]) } gitDir := os.Getenv("GIT_DIR") remoteName := os.Args[1] url := os.Args[2] var handler func(context.Context, *gittuf.Repository, string, string) (map[string]string, bool, error) switch { case strings.HasPrefix(url, "https://"), strings.HasPrefix(url, "http://"), strings.HasPrefix(url, "ftp://"), strings.HasPrefix(url, "ftps://"): log("Prefix indicates curl remote helper must be used") handler = handleCurl case strings.HasPrefix(url, "/"), strings.HasPrefix(url, "file://"): log("Prefix indicates file helper must be used") return nil default: log("Using ssh helper") handler = handleSSH } repo, err := gittuf.LoadRepository() if err != nil { return err } gittufRefsTips, isPush, err := handler(ctx, repo, remoteName, url) if err != nil { return err } for { // When cloning/fetching, we have to hang until Git sets things up // before we can update-ref entries, err := os.ReadDir(filepath.Join(gitDir, "objects", "pack")) if err != nil { return err } if len(entries) == 0 { break } // `entries` is sorted by name. The "regular" entries have pack- as a // prefix. When actually fetching contents, it's stored in a tmp_pack or // temp_rev file. Therefore, if the last entry starts with pack-, we // know we don't have a tmp_ file. lastEntryName := entries[len(entries)-1].Name() if strings.HasPrefix(lastEntryName, "pack-") { // not tmp_pack or tmp_rev break } } if !isPush { // TODO: this breaks when `git fetch` is invoked explicitly for a gittuf // ref because Git separately tries to update-ref. // During wants, check if the latest remote gittuf ref tips are // requested. Use _only_ the latest so as to avoid any unnecessary blob // collisions. for ref, tip := range gittufRefsTips { tipH, err := gitinterface.NewHash(tip) if err != nil { return err } if err := repo.GetGitRepository().SetReference(ref, tipH); err != nil { msg := fmt.Sprintf("Unable to set reference '%s': '%s'", ref, err.Error()) log(msg) os.Stderr.Write([]byte(fmt.Sprintf("git-remote-gittuf: %s\n", msg))) //nolint:errcheck } } // Uncomment after gittuf can accept a git_dir env var; this will happen // with the gitinterface PRs naturally. // TODO: this must either be looped to address each changed ref that // exists locally or gittuf needs another flag for --all. // var cmd *exec.Cmd // if rslTip != "" { // log("we have rsl tip") // cmd = exec.Command("gittuf", "verify-ref", "--from-entry", rslTip, "HEAD") // } else { // cwd, _ := os.Getwd() // log("we don't have rsl tip", cwd) // cmd = exec.Command("gittuf", "verify-ref", "HEAD") // } // _, err := cmd.Output() // if err != nil { // log(err.Error()) // if _, nerr := os.Stderr.Write([]byte("gittuf verification failed\n")); nerr != nil { // return errors.Join(err, nerr) // } // return err // } } return nil } func populateGitVersion() error { cmd := exec.Command("git", "--version") output, err := cmd.Output() if err != nil { return err } gitVersion = strings.TrimPrefix(strings.TrimSpace(string(output)), "git version ") return nil } func main() { logFilePath := os.Getenv("GITTUF_LOG_FILE") if logFilePath != "" { file, err := os.Create(logFilePath) if err != nil { panic(err) } logFile = file } if err := populateGitVersion(); err != nil { fmt.Fprintln(os.Stderr, err.Error()) os.Exit(1) } if err := run(context.Background()); err != nil { fmt.Fprintln(os.Stderr, err.Error()) os.Exit(1) } } gittuf-0.9.0/internal/git-remote-gittuf/split.go000066400000000000000000000047241475150141000217010ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package main import ( "bufio" "bytes" "strconv" ) func dropCR(data []byte) []byte { if len(data) > 0 && data[len(data)-1] == '\r' { return data[0 : len(data)-1] } return data } func splitInput(data []byte, atEOF bool) (int, []byte, error) { if atEOF { return len(data), data, bufio.ErrFinalToken } if len(data) == 0 { // Request more data. return 0, nil, nil } if bytes.HasPrefix(data, flushPkt) { // We have the flushPkt that'll otherwise cause it to hang. // This packet isn't followed by a newline a lot of the time, so we just // end up requesting data perennially. return len(flushPkt), flushPkt, nil } if i := bytes.IndexByte(data, '\n'); i >= 0 { // We have more data to process, so we just return the current line. return i + 1, dropCR(data[0 : i+1]), nil } // Request more data. return 0, nil, nil } func splitOutput(data []byte, atEOF bool) (advance int, token []byte, err error) { if atEOF { return len(data), data, bufio.ErrFinalToken } if len(data) == 0 { // Request more data. return 0, nil, nil } if bytes.HasPrefix(data, flushPkt) { // We have the flushPkt that'll otherwise cause it to hang. // This packet isn't followed by a newline a lot of the time, so we just // end up requesting data perennially. return len(flushPkt), flushPkt, nil } if i := bytes.IndexByte(data, '\n'); i >= 0 { if i == len(data)-1 { // This is the very last newline, we need to return ErrFinalToken to // not block the stdin scanner anymore. // This is the fundamental difference between this function and // splitInput, because this can block stdin. return len(data), dropCR(data), bufio.ErrFinalToken } // We have more data to process, so we just return the current line. return i + 1, dropCR(data[0 : i+1]), nil } // Request more data. return 0, nil, nil } func splitPacket(data []byte, atEOF bool) (int, []byte, error) { if len(data) < 4 { return 0, nil, nil // request more } lengthB := data[:4] if bytes.Equal(lengthB, flushPkt) || bytes.Equal(lengthB, delimiterPkt) || bytes.Equal(lengthB, endOfReadPkt) { return 4, lengthB, nil } length, err := strconv.ParseInt(string(lengthB), 16, 64) if err != nil { return -1, nil, err } l := int(length) if l > len(data) { return 0, nil, nil // request more in a new buffer } if atEOF { if l == len(data) { return l, data, bufio.ErrFinalToken } } return l, data[:l], nil } gittuf-0.9.0/internal/git-remote-gittuf/ssh.go000066400000000000000000000536441475150141000213500ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package main import ( "bufio" "bytes" "context" "fmt" "io" "os" "os/exec" "strings" "github.com/gittuf/gittuf/experimental/gittuf" rslopts "github.com/gittuf/gittuf/experimental/gittuf/options/rsl" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" ) // handleSSH implements the helper for remotes configured to use SSH. For this // transport, we invoke the installed ssh binary to interact with the remote. func handleSSH(ctx context.Context, repo *gittuf.Repository, remoteName, url string) (map[string]string, bool, error) { url = strings.TrimPrefix(url, "ssh://") url = strings.TrimPrefix(url, "git+ssh://") url = strings.TrimPrefix(url, "ssh+git://") urlSplit := strings.Split(url, ":") // 0 is the connection [user@]host, 1 is the repo host := urlSplit[0] repository := urlSplit[1] // Scan git-remote-gittuf stdin for commands from the parent process stdInScanner := &logScanner{name: "git-remote-gittuf stdin", scanner: bufio.NewScanner(os.Stdin)} stdInScanner.Split(splitInput) stdOutWriter := &logWriteCloser{name: "git-remote-gittuf stdout", writeCloser: os.Stdout} var ( helperStdOut io.ReadCloser helperStdIn io.WriteCloser gittufRefsTips = map[string]string{} remoteRefTips = map[string]string{} ) for stdInScanner.Scan() { input := stdInScanner.Bytes() switch { case bytes.HasPrefix(input, []byte("capabilities")): /* For SSH, we have several options wrt capabilities. First, we could just implement fetch and push. These are v0/v1 protocols. The issue here is that while push is fine, fetch effectively fetches _all_ refs it sees on the remote via list. Additionally, using v2 protocol where possible seems good for efficiency improvements hinted at by the docs. The connect capability sets up a bidirectional connection with the server. It can handle both fetches and pushes; depending on what's happening, either upload-pack or receive-pack must be invoked on the server. This is fine for fetch operations. However, for push, we can tell the server to set refs/gittuf/ to the object. However, we do not control the invocation of git pack-objects --stdout. Git (which invokes us) invokes pack-objects separately, and routes its stdout into the transport's stdin to transmit the packfile bytes. In summary, we cannot use a combination of fetch and push, and we cannot use connect. What about stateless-connect? This is part of the v2 protocol and can only handle fetches at the moment. It's marked as experimental, which is something we want to be wary about with new Git versions. There may well be breaking changes here, given that the only intended user of this command is other Git tooling. stateless-connect is quite easy to work with to handle the fetch aspects. In addition, we implement the push capability. Here, Git tells us the refspecs that must be pushed. We are separately responsible for actually sending the packfile(s). So, the solution is that we create RSL entries for each requested ref, and include the gittuf objects in the packfile. Thus, we specify stateless-connect and push as the two capabilities supported by this helper. */ log("cmd: capabilities") if _, err := stdOutWriter.Write([]byte("stateless-connect\npush\n\n")); err != nil { return nil, false, err } case bytes.HasPrefix(input, []byte("stateless-connect")): /* When we see stateless-connect, right now we know this means a fetch is underway. us: ssh -o SendEnv=GIT_PROTOCOL 'git-upload-pack ' ssh: if v2 { server capabilities } else { server capabilities refs and their states } Assuming v2: us (to ssh): ls-refs // add gittuf prefix ssh: refs and their states us (to git): output of ls-refs git: fetch, wants, haves us (to ssh): fetch, wants, haves // add gittuf wants ssh: acks (optionally triggers another round of wants, haves) ssh: packfile us (to git): acks, packfile Assuming v0/v1: git: wants, haves // NO FETCH HERE IIRC us (to ssh): wants, haves // add gittuf wants ssh: acks, packfile us (to git): acks, packfile Notes: * v0/v1 of the pack protocol is only partially supported here. * Once the service is invoked, all messages are wrapped in the packet-line format. * In the v0/v1 format, each line is packet encoded, and the entire message is in turn packet encoded for wants/haves. * The flushPkt is commonly used to signify end of a message. * The endOfReadPkt is sent at the end of the packfile transmission. */ log("cmd: stateless-connect") sshCmd, err := getSSHCommand(repo) if err != nil { return nil, false, err } if err := testSSH(sshCmd, host); err != nil { return nil, false, err } sshCmd = append(sshCmd, "-o", "SendEnv=GIT_PROTOCOL") // This allows us to request GIT_PROTOCOL v2 sshExecCmd := fmt.Sprintf("%s '%s'", gitUploadPack, repository) // with stateless-connect, it's only fetches sshCmd = append(sshCmd, host, sshExecCmd) // Crafting ssh subprocess for fetches helper := exec.Command(sshCmd[0], sshCmd[1:]...) //nolint:gosec // Add env var for GIT_PROTOCOL v2 helper.Env = append(os.Environ(), "GIT_PROTOCOL=version=2") helper.Stderr = os.Stderr // We want to inspect the helper's stdout for gittuf ref statuses helperStdOutPipe, err := helper.StdoutPipe() if err != nil { return nil, false, err } helperStdOut = &logReadCloser{readCloser: helperStdOutPipe, name: "ssh stdout"} // We want to interpose with the helper's stdin by passing in // extra refs etc. helperStdInPipe, err := helper.StdinPipe() if err != nil { return nil, false, err } helperStdIn = &logWriteCloser{writeCloser: helperStdInPipe, name: "ssh stdin"} if err := helper.Start(); err != nil { return nil, false, err } // Indicate connection established successfully if _, err := stdOutWriter.Write([]byte("\n")); err != nil { return nil, false, err } // Read from remote service // TODO: we may need nested infinite loops here helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() // TODO: handle git protocol v0/v1 // If server doesn't support v2, as soon as we connect, // it tells us the ref statuses if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } // check for end of message if bytes.Equal(output, flushPkt) { break } } // In protocol v2, this should now go to our parent process // requesting ls-refs for stdInScanner.Scan() { input = stdInScanner.Bytes() // Add ref-prefix refs/gittuf/ to the ls-refs command before // flush if bytes.Equal(input, flushPkt) { log("adding ref-prefix for refs/gittuf/") gittufRefPrefixCommand := fmt.Sprintf("ref-prefix %s\n", gittufRefPrefix) if _, err := helperStdIn.Write(packetEncode(gittufRefPrefixCommand)); err != nil { return nil, false, err } } if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // Check for end of message if bytes.Equal(input, flushPkt) { break } } helperStdOutScanner = bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() // In the curl transport, we also look out for endOfReadPkt // However, this has been a bit flakey // So when we see the flushPkt, we'll also write the // endOfReadPkt ourselves if !bytes.Equal(output, flushPkt) { refAd := string(output) refAd = refAd[4:] // remove pkt length prefix refAd = strings.TrimSpace(refAd) // If the gittuf ref is the very first, then there will be // additional information in the output after a null byte. // However, this is unlikely as HEAD is typically the first. if i := strings.IndexByte(refAd, '\x00'); i > 0 { refAd = refAd[:i] // drop everything from null byte onwards } refAdSplit := strings.Split(refAd, " ") if strings.HasPrefix(refAdSplit[1], gittufRefPrefix) { gittufRefsTips[refAdSplit[1]] = refAdSplit[0] } } // Write output to parent process if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } if bytes.Equal(output, flushPkt) { // For a stateless connection, we must // also add the endOfRead packet // ourselves if _, err := stdOutWriter.Write(endOfReadPkt); err != nil { return nil, false, err } break } } // At this point, we enter the haves / wants negotiation, which is // followed usually by the remote sending a packfile with the // requested Git objects. // We add the gittuf specific objects as wants. We don't have to // specify haves as Git automatically specifies all the objects it // has regardless of what refs they're reachable via. // Read in command from parent process -> this should be // command=fetch with protocol v2 var ( wroteGittufWantsAndHaves = false // track this in case there are multiple rounds of negotiation wroteWants = false allWants = set.NewSet[string]() allHaves = set.NewSet[string]() ) for stdInScanner.Scan() { input = stdInScanner.Bytes() if len(input) == 0 { // We're done but we need to exit gracefully if err := helperStdIn.Close(); err != nil { return nil, false, err } if err := helperStdOut.Close(); err != nil { return nil, false, err } if err := helper.Wait(); err != nil { return nil, false, err } return gittufRefsTips, false, nil } if bytes.Equal(input, flushPkt) { if !wroteGittufWantsAndHaves { // We only write gittuf specific haves and wants when we // haven't already written them. We track this because // in multiple rounds of negotiations, we only want to // write them the first time. log("adding gittuf wants") wants, haves, err := getGittufWantsAndHaves(repo, gittufRefsTips) if err != nil { wants = gittufRefsTips } for _, tip := range wants { if !allWants.Has(tip) { // indicate we // want the // gittuf obj wantCmd := fmt.Sprintf("want %s\n", tip) if _, err := helperStdIn.Write(packetEncode(wantCmd)); err != nil { return nil, false, err } } } for _, tip := range haves { if !allHaves.Has(tip) { // indicate we // have the // gittuf obj haveCmd := fmt.Sprintf("have %s\n", tip) if _, err := helperStdIn.Write(packetEncode(haveCmd)); err != nil { return nil, false, err } } } wroteGittufWantsAndHaves = true } wroteWants = true } else { if bytes.Contains(input, []byte("want")) { idx := bytes.Index(input, []byte("want ")) sha := string(bytes.TrimSpace(input[idx+len("want "):])) allWants.Add(sha) for ref, tip := range gittufRefsTips { if tip == sha { // Take out this ref as // something for us to // update or add wants // for log("taking out", ref, "as it matches", sha) delete(gittufRefsTips, ref) } } } else if bytes.Contains(input, []byte("have")) { idx := bytes.Index(input, []byte("have ")) sha := string(bytes.TrimSpace(input[idx+len("have "):])) allHaves.Add(sha) } } if _, err := helperStdIn.Write(input); err != nil { return nil, false, err } // Read from remote if wants are done // We may need to scan multiple times for inputs, which is why // this flag is used if wroteWants { helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) packReusedSeen := false // TODO: find something cleaner to terminate for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() // Send along to parent process if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } if len(output) > 4 { line := output[4:] if line[0] == 2 && bytes.Contains(line, []byte("pack-reused")) { packReusedSeen = true // we see this at the end } } else if bytes.Equal(output, flushPkt) { if packReusedSeen { if _, err := stdOutWriter.Write(endOfReadPkt); err != nil { return nil, false, err } break } // Go back for more input wroteWants = false break } } } } case bytes.HasPrefix(input, []byte("list for-push")): /* git: list for-push // wants to know remote ref statuses us: ssh...git-receive-pack ssh: list of refs us (to git): list of refs // trailing newline git: push cmds us: track list of push cmds, create RSL entry for each us (to ssh): push cmds (receive-pack format) // also track oldTip for eachRef us (to ssh): git pack-objects > ssh // include object range desired */ log("cmd: list for-push") sshCmd, err := getSSHCommand(repo) if err != nil { return nil, false, err } if err := testSSH(sshCmd, host); err != nil { return nil, false, err } sshCmd = append(sshCmd, "-o", "SendEnv=GIT_PROTOCOL") // This allows us to request GIT_PROTOCOL v2 sshExecCmd := fmt.Sprintf("%s '%s'", gitReceivePack, repository) // with stateless-connect, it's only fetches sshCmd = append(sshCmd, host, sshExecCmd) // Crafting ssh subprocess for pushes helper := exec.Command(sshCmd[0], sshCmd[1:]...) //nolint:gosec // Add env var for GIT_PROTOCOL v2 helper.Env = append(os.Environ(), "GIT_PROTOCOL=version=2") helper.Stderr = os.Stderr // We want to inspect the helper's stdout for gittuf ref statuses helperStdOutPipe, err := helper.StdoutPipe() if err != nil { return nil, false, err } helperStdOut = &logReadCloser{readCloser: helperStdOutPipe, name: "ssh stdout"} // We want to interpose with the helper's stdin by passing in // extra refs etc. helperStdInPipe, err := helper.StdinPipe() if err != nil { return nil, false, err } helperStdIn = &logWriteCloser{writeCloser: helperStdInPipe, name: "ssh stdin"} if err := helper.Start(); err != nil { return nil, false, err } helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) // TODO: does this need a nested loop? for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() // TODO: do we need endOfReadPkt check? if !bytes.Equal(output, flushPkt) { refAd := string(output[4:]) // remove length prefix refAd = strings.TrimSpace(refAd) refAdSplit := strings.Split(refAd, " ") ref := refAdSplit[1] if i := strings.IndexByte(ref, '\x00'); i > 0 { ref = ref[:i] // remove config string passed after null byte } tip := refAdSplit[0] if strings.HasPrefix(ref, gittufRefPrefix) { gittufRefsTips[ref] = tip } remoteRefTips[ref] = tip // We don't use ref, instead we use refAdSplit[1]. This // allows us to propagate remote capabilities to the parent // process if _, err := stdOutWriter.Write([]byte(fmt.Sprintf("%s %s\n", tip, refAdSplit[1]))); err != nil { return nil, false, err } } if bytes.Equal(output, flushPkt) { // Add trailing new line as we're bridging git-receive-pack // output with git remote helper output if _, err := stdOutWriter.Write([]byte("\n")); err != nil { return nil, false, err } break } } case bytes.HasPrefix(input, []byte("push")): log("cmd: push") pushRefSpecs := []string{} for { if bytes.Equal(input, []byte("\n")) { break } line := string(input) line = strings.TrimSpace(line) line = strings.TrimPrefix(line, "push ") pushRefSpecs = append(pushRefSpecs, line) if !stdInScanner.Scan() { break } input = stdInScanner.Bytes() } if len(gittufRefsTips) != 0 { if err := repo.ReconcileLocalRSLWithRemote(ctx, remoteName, true); err != nil { return nil, false, err } } log("adding gittuf RSL entries") pushObjects := set.NewSet[string]() dstRefs := set.NewSet[string]() for i, refSpec := range pushRefSpecs { refSpecSplit := strings.Split(refSpec, ":") srcRef := refSpecSplit[0] srcRef = strings.TrimPrefix(srcRef, "+") dstRef := refSpecSplit[1] dstRefs.Add(dstRef) if dstRef == rsl.Ref { // We explicitly push the RSL ref below // because we need to know what its tip // will be after all other refs are // pushed. continue } if !strings.HasPrefix(dstRef, gittufRefPrefix) { // TODO: skipping propagation; invoke it once total instead of per ref if err := repo.RecordRSLEntryForReference(ctx, srcRef, true, rslopts.WithOverrideRefName(dstRef), rslopts.WithSkipCheckForDuplicateEntry(), rslopts.WithSkipPropagation()); err != nil { return nil, false, err } } oldTip := remoteRefTips[dstRef] if oldTip == "" { oldTip = gitinterface.ZeroHash.String() } newTipHash, err := repo.GetGitRepository().GetReference(srcRef) if err != nil { return nil, false, err } newTip := newTipHash.String() pushCmd := fmt.Sprintf("%s %s %s", oldTip, newTip, dstRef) if i == 0 { // report-status-v2 indicates we want the result for each pushed ref // atomic indicates either all must be successful or none // object-format indicates SHA-1 vs SHA-256 repo // agent indicates the version of the local git client (most of the time) // Note: we explicitly don't use the sideband here // because of inconsistencies between receive-pack // implementations in sending status messages. // TODO: check that server advertises all of these pushCmd = fmt.Sprintf("%s%s report-status-v2 atomic object-format=sha1 agent=git/%s", pushCmd, string('\x00'), gitVersion) } pushCmd += "\n" if _, err := helperStdIn.Write(packetEncode(pushCmd)); err != nil { return nil, false, err } if newTip != gitinterface.ZeroHash.String() { pushObjects.Add(newTip) } if oldTip != gitinterface.ZeroHash.String() { pushObjects.Add(fmt.Sprintf("^%s", oldTip)) // this is passed on to git rev-list to enumerate objects, and we're saying don't send the old objects } } // TODO: gittuf verify-ref for each dstRef; abort if // verification fails // TODO: find better way to evaluate if gittuf refs must // be pushed if len(gittufRefsTips) != 0 { oldTip, has := remoteRefTips[rsl.Ref] if !has { oldTip = gitinterface.ZeroHash.String() } newTipHash, err := repo.GetGitRepository().GetReference(rsl.Ref) if err != nil { return nil, false, err } newTip := newTipHash.String() log("RSL now has tip", newTip) pushCmd := fmt.Sprintf("%s %s %s\n", oldTip, newTip, rsl.Ref) if _, err := helperStdIn.Write(packetEncode(pushCmd)); err != nil { return nil, false, err } if newTip != gitinterface.ZeroHash.String() { pushObjects.Add(newTip) } if oldTip != gitinterface.ZeroHash.String() { pushObjects.Add(fmt.Sprintf("^%s", oldTip)) // this is passed on to git rev-list to enumerate objects, and we're saying don't send the old objects } } // Write the flush packet as we're done with ref processing if _, err := helperStdIn.Write(flushPkt); err != nil { return nil, false, err } cmd := exec.Command("git", "pack-objects", "--all-progress-implied", "--revs", "--stdout", "--thin", "--delta-base-offset", "--progress") // Write objects that must be pushed to stdin cmd.Stdin = bytes.NewBufferString(strings.Join(pushObjects.Contents(), "\n") + "\n") // the extra \n is used to indicate end of stdin entries // Redirect packfile bytes to remote service stdin cmd.Stdout = helperStdIn // Status updates get sent to parent process cmd.Stderr = os.Stderr if err := cmd.Run(); err != nil { return nil, false, err } helperStdOutScanner := bufio.NewScanner(helperStdOut) helperStdOutScanner.Split(splitPacket) for helperStdOutScanner.Scan() { output := helperStdOutScanner.Bytes() if len(output) == 4 { if _, err := stdOutWriter.Write([]byte("\n")); err != nil { return nil, false, err } if err := helperStdIn.Close(); err != nil { return nil, false, err } if err := helperStdOut.Close(); err != nil { return nil, false, err } return gittufRefsTips, true, nil } output = output[4:] // remove length prefix outputSplit := bytes.Split(output, []byte(" ")) pushedRef := strings.TrimSpace(string(outputSplit[1])) if bytes.HasPrefix(output, []byte("ok")) { if dstRefs.Has(pushedRef) { if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } } } else if bytes.HasPrefix(output, []byte("ng")) { if dstRefs.Has(pushedRef) { output = bytes.TrimPrefix(output, []byte("ng")) output = append([]byte("error"), output...) // replace ng with error if _, err := stdOutWriter.Write(output); err != nil { return nil, false, err } } } } // Trailing newline for end of output if _, err := stdOutWriter.Write([]byte("\n")); err != nil { return nil, false, err } default: c := string(bytes.TrimSpace(input)) if c == "" { return nil, false, nil } return nil, false, fmt.Errorf("unknown command %s to gittuf-ssh helper", c) } } // FIXME: we return in fetch and push when successful, need to assess when // this is reachable return nil, false, nil } gittuf-0.9.0/internal/gitinterface/000077500000000000000000000000001475150141000173005ustar00rootroot00000000000000gittuf-0.9.0/internal/gitinterface/blob.go000066400000000000000000000023761475150141000205550ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "bytes" "fmt" "io" ) // ReadBlob returns the contents of the blob referenced by blobID. func (r *Repository) ReadBlob(blobID Hash) ([]byte, error) { objType, err := r.executor("cat-file", "-t", blobID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to inspect if object is blob: %w", err) } else if objType != "blob" { return nil, fmt.Errorf("requested Git ID '%s' is not a blob object", blobID.String()) } stdOut, stdErr, err := r.executor("cat-file", "-p", blobID.String()).execute() if err != nil { return nil, fmt.Errorf("unable to read blob: %s", stdErr) } return io.ReadAll(stdOut) } // WriteBlob creates a blob object with the specified contents and returns the // ID of the resultant blob. func (r *Repository) WriteBlob(contents []byte) (Hash, error) { stdInBuf := bytes.NewBuffer(contents) objID, err := r.executor("hash-object", "-t", "blob", "-w", "--stdin").withStdIn(stdInBuf).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to write blob: %w", err) } hash, err := NewHash(objID) if err != nil { return ZeroHash, fmt.Errorf("invalid Git ID for blob: %w", err) } return hash, nil } gittuf-0.9.0/internal/gitinterface/blob_test.go000066400000000000000000000023011475150141000216000ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestRepositoryReadBlob(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) contents := []byte("test file read") expectedBlobID, err := NewHash("2ecdd330475d93568ed27f717a84a7fe207d1c58") require.Nil(t, err) blobID, err := repo.WriteBlob(contents) if err != nil { t.Fatal(err) } assert.Equal(t, expectedBlobID, blobID) t.Run("read existing blob", func(t *testing.T) { readContents, err := repo.ReadBlob(blobID) assert.Nil(t, err) assert.Equal(t, contents, readContents) }) t.Run("read non-existing blob", func(t *testing.T) { _, err := repo.ReadBlob(ZeroHash) assert.NotNil(t, err) }) } func TestRepositoryWriteBlob(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) contents := []byte("test file write") expectedBlobID, err := NewHash("999c05e9578e5d244920306842f516789a2498f7") require.Nil(t, err) blobID, err := repo.WriteBlob(contents) assert.Nil(t, err) assert.Equal(t, expectedBlobID, blobID) } gittuf-0.9.0/internal/gitinterface/changes.go000066400000000000000000000047601475150141000212460ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "sort" "strings" ) // GetFilePathsChangedByCommit returns the paths changed by the commit relative // to its parent commit. If the commit is a merge commit, i.e., it has more than // one parent, check if the commit is the same as at least one of its parents. // If there is a matching parent, we return no changes. If there is no matching // parent commit, we return the changes between the commit and each of its parents. func (r *Repository) GetFilePathsChangedByCommit(commitID Hash) ([]string, error) { if err := r.ensureIsCommit(commitID); err != nil { return nil, err } parentCommitIDs, err := r.GetCommitParentIDs(commitID) if err != nil { return nil, err } if len(parentCommitIDs) == 0 { filePaths, err := r.executor("ls-tree", "--name-only", "-r", commitID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to identify all commit file paths: %w", err) } paths := strings.Split(filePaths, "\n") return paths, nil } if len(parentCommitIDs) > 1 { // Check if tree matches last commit stdOut, err := r.executor("diff-tree", "--no-commit-id", "--name-only", "-r", parentCommitIDs[len(parentCommitIDs)-1].String(), commitID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to diff commit against last parent commit: %w", err) } if stdOut == "" { return nil, nil } pathSet := map[string]bool{} for _, parentCommitID := range parentCommitIDs { stdOut, err := r.executor("diff-tree", "--no-commit-id", "--name-only", "-r", parentCommitID.String(), commitID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to diff commit against parent: %w", err) } if stdOut == "" { continue } paths := strings.Split(stdOut, "\n") for _, path := range paths { if path == "" { continue } pathSet[path] = true } } paths := make([]string, 0, len(pathSet)) for path := range pathSet { paths = append(paths, path) } sort.Slice(paths, func(i, j int) bool { return paths[i] < paths[j] }) return paths, nil } stdOut, err := r.executor("diff-tree", "--no-commit-id", "--name-only", "-r", fmt.Sprintf("%s~1", commitID.String()), commitID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to diff commit against parent: %w", err) } if stdOut == "" { return nil, nil } paths := strings.Split(stdOut, "\n") return paths, nil } gittuf-0.9.0/internal/gitinterface/changes_test.go000066400000000000000000000223031475150141000222760ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) func TestGetFilePathsChangedByCommitRepository(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) treeBuilder := NewTreeBuilder(repo) blobIDs := []Hash{} for i := 0; i < 3; i++ { blobID, err := repo.WriteBlob([]byte(fmt.Sprintf("%d", i))) if err != nil { t.Fatal(err) } blobIDs = append(blobIDs, blobID) } emptyTree, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // In each of the tests below, repo.Commit uses the test name as a ref // This allows us to use a single repo in all the tests without interference // For example, if we use a single repo and a single ref (say main), the test that // expects a commit with no parents will have a parent because of a commit created // in a previous test t.Run("modify single file", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[1])}) if err != nil { t.Fatal(err) } _, err = repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cB) assert.Nil(t, err) assert.Equal(t, []string{"a"}, diffs) }) t.Run("rename single file", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("b", blobIDs[0])}) if err != nil { t.Fatal(err) } _, err = repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cB) assert.Nil(t, err) assert.Equal(t, []string{"a", "b"}, diffs) }) t.Run("swap two files around", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0]), NewEntryBlob("b", blobIDs[1])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[1]), NewEntryBlob("b", blobIDs[0])}) if err != nil { t.Fatal(err) } _, err = repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cB) assert.Nil(t, err) assert.Equal(t, []string{"a", "b"}, diffs) }) t.Run("create new file", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0]), NewEntryBlob("b", blobIDs[1])}) if err != nil { t.Fatal(err) } _, err = repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cB) assert.Nil(t, err) assert.Equal(t, []string{"b"}, diffs) }) t.Run("delete file", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0]), NewEntryBlob("b", blobIDs[1])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } _, err = repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cB) assert.Nil(t, err) assert.Equal(t, []string{"b"}, diffs) }) t.Run("modify file and create new file", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[2]), NewEntryBlob("b", blobIDs[1])}) if err != nil { t.Fatal(err) } _, err = repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cB) assert.Nil(t, err) assert.Equal(t, []string{"a", "b"}, diffs) }) t.Run("no parent", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } cA, err := repo.Commit(treeA, testNameToRefName(t.Name()), "Test commit\n", false) if err != nil { t.Fatal(err) } diffs, err := repo.GetFilePathsChangedByCommit(cA) assert.Nil(t, err) assert.Equal(t, []string{"a"}, diffs) }) t.Run("merge commit with commit matching parent", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[1])}) if err != nil { t.Fatal(err) } mainBranch := testNameToRefName(t.Name()) featureBranch := testNameToRefName(t.Name() + " feature branch") // Write common commit for both branches cCommon, err := repo.Commit(emptyTree, mainBranch, "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.SetReference(featureBranch, cCommon); err != nil { t.Fatal(err) } cA, err := repo.Commit(treeA, mainBranch, "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, featureBranch, "Test commit\n", false) if err != nil { t.Fatal(err) } // Create a merge commit with two parents cM := repo.commitWithParents(t, treeB, []Hash{cA, cB}, "Merge commit\n", false) diffs, err := repo.GetFilePathsChangedByCommit(cM) assert.Nil(t, err) assert.Nil(t, diffs) }) t.Run("merge commit with no matching parent", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("b", blobIDs[1])}) if err != nil { t.Fatal(err) } treeC, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("c", blobIDs[2])}) if err != nil { t.Fatal(err) } mainBranch := testNameToRefName(t.Name()) featureBranch := testNameToRefName(t.Name() + " feature branch") // Write common commit for both branches cCommon, err := repo.Commit(emptyTree, mainBranch, "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.SetReference(featureBranch, cCommon); err != nil { t.Fatal(err) } cA, err := repo.Commit(treeA, mainBranch, "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, featureBranch, "Test commit\n", false) if err != nil { t.Fatal(err) } // Create a merge commit with two parents and a different tree cM := repo.commitWithParents(t, treeC, []Hash{cA, cB}, "Merge commit\n", false) diffs, err := repo.GetFilePathsChangedByCommit(cM) assert.Nil(t, err) assert.Equal(t, []string{"a", "b", "c"}, diffs) }) t.Run("merge commit with overlapping parent trees", func(t *testing.T) { treeA, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[0])}) if err != nil { t.Fatal(err) } treeB, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[1])}) if err != nil { t.Fatal(err) } treeC, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobIDs[2])}) if err != nil { t.Fatal(err) } mainBranch := testNameToRefName(t.Name()) featureBranch := testNameToRefName(t.Name() + " feature branch") // Write common commit for both branches cCommon, err := repo.Commit(emptyTree, mainBranch, "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := repo.SetReference(featureBranch, cCommon); err != nil { t.Fatal(err) } cA, err := repo.Commit(treeA, mainBranch, "Test commit\n", false) if err != nil { t.Fatal(err) } cB, err := repo.Commit(treeB, featureBranch, "Test commit\n", false) if err != nil { t.Fatal(err) } // Create a merge commit with two parents and an overlapping tree cM := repo.commitWithParents(t, treeC, []Hash{cA, cB}, "Merge commit\n", false) diffs, err := repo.GetFilePathsChangedByCommit(cM) assert.Nil(t, err) assert.Equal(t, []string{"a"}, diffs) }) } gittuf-0.9.0/internal/gitinterface/commit.go000066400000000000000000000231441475150141000211230ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "context" "errors" "fmt" "io" "strings" "testing" "time" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/storage/memory" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) // Commit creates a new commit in the repo and sets targetRef's to the commit. // This function is meant only for gittuf references, and therefore it does not // mutate repository worktrees. func (r *Repository) Commit(treeID Hash, targetRef, message string, sign bool) (Hash, error) { currentGitID, err := r.GetReference(targetRef) if err != nil { if !errors.Is(err, ErrReferenceNotFound) { return ZeroHash, err } } args := []string{"commit-tree", "-m", message} if !currentGitID.IsZero() { args = append(args, "-p", currentGitID.String()) } if sign { args = append(args, "-S") } args = append(args, treeID.String()) now := r.clock.Now().Format(time.RFC3339) env := []string{fmt.Sprintf("%s=%s", committerTimeKey, now), fmt.Sprintf("%s=%s", authorTimeKey, now)} stdOut, err := r.executor(args...).withEnv(env...).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to create commit: %w", err) } commitID, err := NewHash(stdOut) if err != nil { return ZeroHash, fmt.Errorf("received invalid commit ID: %w", err) } return commitID, r.CheckAndSetReference(targetRef, commitID, currentGitID) } // CommitUsingSpecificKey creates a new commit in the repository for the // specified parameters. The commit is signed using the PEM encoded SSH or GPG // private key. This function is expected for use in tests and gittuf's // developer mode. In standard workflows, Commit() must be used instead which // infers the signing key from the user's Git config. func (r *Repository) CommitUsingSpecificKey(treeID Hash, targetRef, message string, signingKeyPEMBytes []byte) (Hash, error) { gitConfig, err := r.GetGitConfig() if err != nil { return ZeroHash, err } commitMetadata := object.Signature{ Name: gitConfig["user.name"], Email: gitConfig["user.email"], When: r.clock.Now(), } commit := &object.Commit{ Author: commitMetadata, Committer: commitMetadata, TreeHash: plumbing.NewHash(treeID.String()), Message: message, } refTip, err := r.GetReference(targetRef) if err != nil { if !errors.Is(err, ErrReferenceNotFound) { return ZeroHash, err } } if !refTip.IsZero() { commit.ParentHashes = []plumbing.Hash{plumbing.NewHash(refTip.String())} } commitContents, err := getCommitBytesWithoutSignature(commit) if err != nil { return ZeroHash, err } signature, err := signGitObjectUsingKey(commitContents, signingKeyPEMBytes) if err != nil { return ZeroHash, err } commit.PGPSignature = signature goGitRepo, err := r.GetGoGitRepository() if err != nil { return ZeroHash, err } obj := goGitRepo.Storer.NewEncodedObject() if err := commit.Encode(obj); err != nil { return ZeroHash, err } commitID, err := goGitRepo.Storer.SetEncodedObject(obj) if err != nil { return ZeroHash, err } commitIDHash, err := NewHash(commitID.String()) if err != nil { return ZeroHash, err } return commitIDHash, r.CheckAndSetReference(targetRef, commitIDHash, refTip) } // commitWithParents creates a new commit in the repo but does not update any // references. It is only meant to be used for tests, and therefore accepts // specific parent commit IDs. func (r *Repository) commitWithParents(t *testing.T, treeID Hash, parentIDs []Hash, message string, sign bool) Hash { //nolint:unparam args := []string{"commit-tree", "-m", message} for _, commitID := range parentIDs { args = append(args, "-p", commitID.String()) } if sign { args = append(args, "-S") } args = append(args, treeID.String()) now := r.clock.Now().Format(time.RFC3339) env := []string{fmt.Sprintf("%s=%s", committerTimeKey, now), fmt.Sprintf("%s=%s", authorTimeKey, now)} stdOut, err := r.executor(args...).withEnv(env...).executeString() if err != nil { t.Fatal(fmt.Errorf("unable to create commit: %w", err)) } commitID, err := NewHash(stdOut) if err != nil { t.Fatal(fmt.Errorf("received invalid commit ID: %w", err)) } return commitID } // verifyCommitSignature verifies a signature for the specified commit using // the provided public key. func (r *Repository) verifyCommitSignature(ctx context.Context, commitID Hash, key *signerverifier.SSLibKey) error { goGitRepo, err := r.GetGoGitRepository() if err != nil { return fmt.Errorf("error opening repository: %w", err) } commit, err := goGitRepo.CommitObject(plumbing.NewHash(commitID.String())) if err != nil { return fmt.Errorf("unable to load commit object: %w", err) } switch key.KeyType { case gpg.KeyType: if _, err := commit.Verify(key.KeyVal.Public); err != nil { return ErrIncorrectVerificationKey } return nil case ssh.KeyType: commitContents, err := getCommitBytesWithoutSignature(commit) if err != nil { return errors.Join(ErrVerifyingSSHSignature, err) } commitSignature := []byte(commit.PGPSignature) if err := verifySSHKeySignature(ctx, key, commitContents, commitSignature); err != nil { return errors.Join(ErrIncorrectVerificationKey, err) } return nil case sigstore.KeyType: commitContents, err := getCommitBytesWithoutSignature(commit) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } commitSignature := []byte(commit.PGPSignature) if err := verifyGitsignSignature(ctx, r, key, commitContents, commitSignature); err != nil { return errors.Join(ErrIncorrectVerificationKey, err) } return nil } return ErrUnknownSigningMethod } // GetCommitMessage returns the commit's message. func (r *Repository) GetCommitMessage(commitID Hash) (string, error) { if err := r.ensureIsCommit(commitID); err != nil { return "", err } commitMessage, err := r.executor("show", "-s", "--format=%B", commitID.String()).executeString() if err != nil { return "", fmt.Errorf("unable to identify message for commit '%s': %w", commitID.String(), err) } return commitMessage, nil } // GetCommitTreeID returns the commit's Git tree ID. func (r *Repository) GetCommitTreeID(commitID Hash) (Hash, error) { if err := r.ensureIsCommit(commitID); err != nil { return ZeroHash, err } stdOut, err := r.executor("rev-parse", fmt.Sprintf("%s^{tree}", commitID.String())).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to identify tree for commit '%s': %w", commitID.String(), err) } hash, err := NewHash(stdOut) if err != nil { return ZeroHash, fmt.Errorf("invalid tree for commit ID '%s': %w", commitID, err) } return hash, nil } // GetCommitParentIDs returns the commit's parent commit IDs. func (r *Repository) GetCommitParentIDs(commitID Hash) ([]Hash, error) { if err := r.ensureIsCommit(commitID); err != nil { return nil, err } stdOut, err := r.executor("rev-parse", fmt.Sprintf("%s^@", commitID.String())).executeString() if err != nil { return nil, fmt.Errorf("unable to identify parents for commit '%s': %w", commitID.String(), err) } commitIDSplit := strings.Split(stdOut, "\n") if len(commitIDSplit) == 0 { return nil, nil } commitIDs := []Hash{} for _, commitID := range commitIDSplit { if commitID == "" { continue } hash, err := NewHash(commitID) if err != nil { return nil, fmt.Errorf("invalid parent commit ID '%s': %w", commitID, err) } commitIDs = append(commitIDs, hash) } if len(commitIDs) == 0 { return nil, nil } return commitIDs, nil } // KnowsCommit returns true if the `testCommit` is a descendent of the // `ancestorCommit`. That is, the testCommit _knows_ the ancestorCommit as it // has a path in the commit graph to the ancestorCommit. func (r *Repository) KnowsCommit(testCommitID, ancestorCommitID Hash) (bool, error) { if err := r.ensureIsCommit(testCommitID); err != nil { return false, err } if err := r.ensureIsCommit(ancestorCommitID); err != nil { return false, err } _, err := r.executor("merge-base", "--is-ancestor", ancestorCommitID.String(), testCommitID.String()).executeString() return err == nil, nil } // GetCommonAncestor finds the common ancestor commit for the two supplied // commits. func (r *Repository) GetCommonAncestor(commitAID, commitBID Hash) (Hash, error) { if err := r.ensureIsCommit(commitAID); err != nil { return nil, err } if err := r.ensureIsCommit(commitBID); err != nil { return nil, err } mergeBase, err := r.executor("merge-base", commitAID.String(), commitBID.String()).executeString() if err != nil { return nil, err } mergeBaseID, err := NewHash(mergeBase) if err != nil { return nil, fmt.Errorf("received invalid commit ID: %w", err) } return mergeBaseID, nil } // ensureIsCommit is a helper to check that the ID represents a Git commit // object. func (r *Repository) ensureIsCommit(commitID Hash) error { objType, err := r.executor("cat-file", "-t", commitID.String()).executeString() if err != nil { return fmt.Errorf("unable to inspect if object is commit: %w", err) } else if objType != "commit" { return fmt.Errorf("requested Git ID '%s' is not a commit object", commitID.String()) } return nil } func getCommitBytesWithoutSignature(commit *object.Commit) ([]byte, error) { commitEncoded := memory.NewStorage().NewEncodedObject() if err := commit.EncodeWithoutSignature(commitEncoded); err != nil { return nil, err } r, err := commitEncoded.Reader() if err != nil { return nil, err } return io.ReadAll(r) } gittuf-0.9.0/internal/gitinterface/commit_test.go000066400000000000000000000404341475150141000221630ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "bytes" "context" "os" "path/filepath" "strings" "testing" "time" "github.com/ProtonMail/go-crypto/openpgp" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/stretchr/testify/assert" ) func TestRepositoryCommit(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Write second tree blobID, err := repo.WriteBlob([]byte("Hello, world!\n")) if err != nil { t.Fatal(err) } treeWithContentsID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("README.md", blobID)}) if err != nil { t.Fatal(err) } // Create initial commit with no tree expectedInitialCommitID := "648c569f3958b899e832f04750de52cf5d0db2fa" commitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) assert.Nil(t, err) assert.Equal(t, expectedInitialCommitID, commitID.String()) refHead, err := repo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, expectedInitialCommitID, refHead.String()) // Create second commit with tree expectedSecondCommitID := "3d7200c158ccfedf35a68a7d24842d60cac4ec0d" commitID, err = repo.Commit(treeWithContentsID, refName, "Add README\n", false) assert.Nil(t, err) assert.Equal(t, expectedSecondCommitID, commitID.String()) refHead, err = repo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, expectedSecondCommitID, refHead.String()) // Create third commit with same tree but sign this time expectedThirdCommitID := "eed43c23f781ddc10359ce25e0fc486a000a8c9f" commitID, err = repo.Commit(treeWithContentsID, refName, "Signing this commit\n", true) assert.Nil(t, err) assert.Equal(t, expectedThirdCommitID, commitID.String()) refHead, err = repo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, expectedThirdCommitID, refHead.String()) } func TestRepositoryCommitUsingSpecificKey(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Write second tree blobID, err := repo.WriteBlob([]byte("Hello, world!\n")) if err != nil { t.Fatal(err) } treeWithContentsID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("README.md", blobID)}) if err != nil { t.Fatal(err) } // Create initial commit with no tree expectedInitialCommitID := "b218890d607cdcea53ebf6c640748b4b1c8015ca" commitID, err := repo.CommitUsingSpecificKey(emptyTreeID, refName, "Initial commit\n", artifacts.SSHED25519Private) assert.Nil(t, err) assert.Equal(t, expectedInitialCommitID, commitID.String()) refHead, err := repo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, expectedInitialCommitID, refHead.String()) // Create second commit with tree expectedSecondCommitID := "2b3f8b1f6af0d0d3c37130ba4d054ff4c2e95a3a" commitID, err = repo.CommitUsingSpecificKey(treeWithContentsID, refName, "Add README\n", artifacts.SSHED25519Private) assert.Nil(t, err) assert.Equal(t, expectedSecondCommitID, commitID.String()) refHead, err = repo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, expectedSecondCommitID, refHead.String()) } func TestCommitUsingSpecificKey(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Write second tree blobID, err := repo.WriteBlob([]byte("Hello, world!\n")) if err != nil { t.Fatal(err) } treeWithContentsID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("README.md", blobID)}) if err != nil { t.Fatal(err) } // Create initial commit with no tree expectedInitialCommitID := "648c569f3958b899e832f04750de52cf5d0db2fa" commitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) assert.Nil(t, err) assert.Equal(t, expectedInitialCommitID, commitID.String()) refHead, err := repo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, expectedInitialCommitID, refHead.String()) privateKey := artifacts.SSHRSAPrivate // Create publicKey keyPath := filepath.Join(tempDir, "ssh-key") if err := os.WriteFile(keyPath, artifacts.SSHRSAPublicSSH, 0o600); err != nil { t.Fatal(err) } publicKey, err := ssh.NewKeyFromFile(keyPath) if err != nil { t.Fatal(err) } // Create second commit with tree expectedSecondCommitID := "11020a7c78c4f903d0592ec2e8f73d00a17ec47e" commitID, err = repo.CommitUsingSpecificKey(treeWithContentsID, refName, "Add README\n", privateKey) assert.Nil(t, err) // Verify commit signature using publicKey err = repo.verifyCommitSignature(context.Background(), commitID, publicKey) assert.Nil(t, err) assert.Equal(t, expectedSecondCommitID, commitID.String()) } func TestRepositoryVerifyCommit(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } sshSignedCommitID, err := repo.Commit(emptyTreeID, "refs/heads/main", "Initial commit\n", true) if err != nil { t.Fatal(err) } gpgSignedCommitID := createTestGPGSignedCommit(t, repo) // FIXME: fix gitsign testing gitsignSignedCommitID := createTestSigstoreSignedCommit(t, repo) keyDir := t.TempDir() keyPath := filepath.Join(keyDir, "ssh-key") if err := os.WriteFile(keyPath, artifacts.SSHRSAPublicSSH, 0o600); err != nil { t.Fatal(err) } sshKey, err := ssh.NewKeyFromFile(keyPath) if err != nil { t.Fatal(err) } gpgKey, err := gpg.LoadGPGKeyFromBytes(artifacts.GPGKey1Public) if err != nil { t.Fatal(err) } t.Run("ssh signed commit, verify with ssh key", func(t *testing.T) { err = repo.verifyCommitSignature(context.Background(), sshSignedCommitID, sshKey) assert.Nil(t, err) }) t.Run("ssh signed commit, verify with gpg key", func(t *testing.T) { err = repo.verifyCommitSignature(context.Background(), sshSignedCommitID, gpgKey) assert.ErrorIs(t, err, ErrIncorrectVerificationKey) }) t.Run("gpg signed commit, verify with gpg key", func(t *testing.T) { err = repo.verifyCommitSignature(context.Background(), gpgSignedCommitID, gpgKey) assert.Nil(t, err) }) t.Run("gpg signed commit, verify with ssh key", func(t *testing.T) { err = repo.verifyCommitSignature(context.Background(), gpgSignedCommitID, sshKey) assert.ErrorIs(t, err, ErrIncorrectVerificationKey) }) t.Run("gitsign signed commit, verify with ssh key", func(t *testing.T) { err = repo.verifyCommitSignature(context.Background(), gitsignSignedCommitID, sshKey) assert.ErrorIs(t, err, ErrIncorrectVerificationKey) }) } func TestKnowsCommit(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } firstCommitID, err := repo.Commit(emptyTreeID, refName, "First commit", false) if err != nil { t.Fatal(err) } secondCommitID, err := repo.Commit(emptyTreeID, refName, "Second commit", false) if err != nil { t.Fatal(err) } unknownCommitID, err := repo.Commit(emptyTreeID, "refs/heads/unknown", "Unknown commit", false) if err != nil { t.Fatal(err) } t.Run("check if second commit knows first", func(t *testing.T) { knows, err := repo.KnowsCommit(secondCommitID, firstCommitID) assert.Nil(t, err) assert.True(t, knows) }) t.Run("check that first commit does not know second", func(t *testing.T) { knows, err := repo.KnowsCommit(firstCommitID, secondCommitID) assert.Nil(t, err) assert.False(t, knows) }) t.Run("check that both commits know themselves", func(t *testing.T) { knows, err := repo.KnowsCommit(firstCommitID, firstCommitID) assert.Nil(t, err) assert.True(t, knows) knows, err = repo.KnowsCommit(secondCommitID, secondCommitID) assert.Nil(t, err) assert.True(t, knows) }) t.Run("check that an unknown commit can't know a known commit", func(t *testing.T) { knows, _ := repo.KnowsCommit(unknownCommitID, firstCommitID) assert.False(t, knows) }) } func createTestGPGSignedCommit(t *testing.T, repo *Repository) Hash { t.Helper() goGitRepo, err := repo.GetGoGitRepository() if err != nil { t.Fatal(err) } testCommit := &object.Commit{ Author: object.Signature{ Name: testName, Email: testEmail, When: testClock.Now(), }, Committer: object.Signature{ Name: testName, Email: testEmail, When: testClock.Now(), }, Message: "Test commit\n", TreeHash: plumbing.ZeroHash, } commitEncoded := goGitRepo.Storer.NewEncodedObject() if err := testCommit.EncodeWithoutSignature(commitEncoded); err != nil { t.Fatal(err) } r, err := commitEncoded.Reader() if err != nil { t.Fatal(err) } keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader(artifacts.GPGKey1Private)) if err != nil { t.Fatal(err) } sig := new(strings.Builder) if err := openpgp.ArmoredDetachSign(sig, keyring[0], r, nil); err != nil { t.Fatal(err) } testCommit.PGPSignature = sig.String() // Re-encode with the signature commitEncoded = goGitRepo.Storer.NewEncodedObject() if err := testCommit.Encode(commitEncoded); err != nil { t.Fatal(err) } commitID, err := goGitRepo.Storer.SetEncodedObject(commitEncoded) if err != nil { t.Fatal(err) } commitHash, err := NewHash(commitID.String()) if err != nil { t.Fatal(err) } return commitHash } func createTestSigstoreSignedCommit(t *testing.T, repo *Repository) Hash { t.Helper() goGitRepo, err := repo.GetGoGitRepository() if err != nil { t.Fatal(err) } testCommit := &object.Commit{ Hash: plumbing.NewHash("d6b230478965e25477263aa65f1ca6d23d0c0d97"), Author: object.Signature{ Name: "Aditya Sirish", Email: "aditya@saky.in", When: time.Date(2023, time.August, 1, 15, 44, 23, 0, time.FixedZone("", -4*3600)), }, Committer: object.Signature{ Name: "Aditya Sirish", Email: "aditya@saky.in", When: time.Date(2023, time.August, 1, 15, 44, 23, 0, time.FixedZone("", -4*3600)), }, PGPSignature: `-----BEGIN SIGNED MESSAGE----- MIIEMAYJKoZIhvcNAQcCoIIEITCCBB0CAQExDTALBglghkgBZQMEAgEwCwYJKoZI hvcNAQcBoIIC0DCCAswwggJToAMCAQICFHIJCrBVHxoHlGos++k1xJxcElGaMAoG CCqGSM49BAMDMDcxFTATBgNVBAoTDHNpZ3N0b3JlLmRldjEeMBwGA1UEAxMVc2ln c3RvcmUtaW50ZXJtZWRpYXRlMB4XDTIzMDgwMTE5NDQzMVoXDTIzMDgwMTE5NTQz MVowADBZMBMGByqGSM49AgEGCCqGSM49AwEHA0IABD8d752TJfGtANVYoiJJn+o6 JPKj5NwEZs1AcVRT2qElikVun5t+bQ07iDFa/Xiun5ytZrEK2YJVgqdntLd6hSOj ggFyMIIBbjAOBgNVHQ8BAf8EBAMCB4AwEwYDVR0lBAwwCgYIKwYBBQUHAwMwHQYD VR0OBBYEFAuYzgyBA01YSSN1v0fYenGo7+PcMB8GA1UdIwQYMBaAFN/T6c9WJBGW +ajY6ShVosYuGGQ/MBwGA1UdEQEB/wQSMBCBDmFkaXR5YUBzYWt5LmluMCwGCisG AQQBg78wAQEEHmh0dHBzOi8vZ2l0aHViLmNvbS9sb2dpbi9vYXV0aDAuBgorBgEE AYO/MAEIBCAMHmh0dHBzOi8vZ2l0aHViLmNvbS9sb2dpbi9vYXV0aDCBigYKKwYB BAHWeQIEAgR8BHoAeAB2AN09MGrGxxEyYxkeHJlnNwKiSl643jyt/4eKcoAvKe6O AAABibKhcJgAAAQDAEcwRQIgcWuz6NhFgdL0fNni6j0SOQnAgFpPEaN8jDH70mbD uPMCIQCX8koEnIX4c9crMT1hfoBBf1Z/CHJ6HLLHpQwWfEUMIzAKBggqhkjOPQQD AwNnADBkAjBozIBaBtEu7JUyYLH7Ly698E0o8DdIOmqcUMUYWNC6zyJVdrL5gAla mQSxfObSQasCMHQuw8youTjmFJXT7pNOYX4DW25knt+6P+W/m6zwcRRe3dMjmUAB gdBJb32+XXJMRDGCASYwggEiAgEBME8wNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2 MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUCFHIJCrBVHxoHlGos++k1 xJxcElGaMAsGCWCGSAFlAwQCAaBpMBgGCSqGSIb3DQEJAzELBgkqhkiG9w0BBwEw HAYJKoZIhvcNAQkFMQ8XDTIzMDgwMTE5NDQzMlowLwYJKoZIhvcNAQkEMSIEIBe6 VHcVlkO8jRm/fbUipwxwxNaI7UFDAL38Jl8eUj/5MAoGCCqGSM49BAMCBEgwRgIh AIYiRbnVeWjjgX2XwljDryzQN5RhUQaVH/AcUj+tbvWxAiEAhm9l3BU58tQsgyJW oYBpMWLgg6AUzpxx9mITZ2EKr4c= -----END SIGNED MESSAGE----- `, Message: "Test commit\n", TreeHash: plumbing.NewHash("4b825dc642cb6eb9a060e54bf8d69288fbee4904"), } commitEncoded := goGitRepo.Storer.NewEncodedObject() if err := testCommit.EncodeWithoutSignature(commitEncoded); err != nil { t.Fatal(err) } commitID, err := goGitRepo.Storer.SetEncodedObject(commitEncoded) if err != nil { t.Fatal(err) } commitHash, err := NewHash(commitID.String()) if err != nil { t.Fatal(err) } return commitHash } func TestRepositoryGetCommitMessage(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } message := "Initial commit" commit, err := repo.Commit(emptyTreeID, refName, message, false) if err != nil { t.Fatal(err) } commitMessage, err := repo.GetCommitMessage(commit) assert.Nil(t, err) assert.Equal(t, message, commitMessage) } func TestGetCommitTreeID(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Write second tree blobID, err := repo.WriteBlob([]byte("Hello, world!\n")) if err != nil { t.Fatal(err) } treeWithContentsID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("README.md", blobID)}) if err != nil { t.Fatal(err) } // Create initial commit with no tree initialCommitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) if err != nil { t.Fatal(err) } initialCommitTreeID, err := repo.GetCommitTreeID(initialCommitID) assert.Nil(t, err) assert.Equal(t, emptyTreeID, initialCommitTreeID) // Create second commit with tree secondCommitID, err := repo.Commit(treeWithContentsID, refName, "Add README\n", false) if err != nil { t.Fatal(err) } secondCommitTreeID, err := repo.GetCommitTreeID(secondCommitID) assert.Nil(t, err) assert.Equal(t, treeWithContentsID, secondCommitTreeID) } func TestGetCommitParentIDs(t *testing.T) { // TODO: test with merge commit tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } // Create initial commit initialCommitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) if err != nil { t.Fatal(err) } initialCommitParentIDs, err := repo.GetCommitParentIDs(initialCommitID) assert.Nil(t, err) assert.Empty(t, initialCommitParentIDs) // Create second commit secondCommitID, err := repo.Commit(emptyTreeID, refName, "Add README\n", false) if err != nil { t.Fatal(err) } secondCommitParentIDs, err := repo.GetCommitParentIDs(secondCommitID) assert.Nil(t, err) assert.Equal(t, []Hash{initialCommitID}, secondCommitParentIDs) } func TestGetCommonAncestor(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } initialCommitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) if err != nil { t.Fatal(err) } // Add child commit A commitA, err := repo.Commit(emptyTreeID, refName, "Second commit A\n", false) if err != nil { t.Fatal(err) } // Add child commit B commitB := repo.commitWithParents(t, emptyTreeID, []Hash{initialCommitID}, "Second commit B\n", false) // Test commits, ensure we get back initial commit commonAncestor, err := repo.GetCommonAncestor(commitA, commitB) assert.Nil(t, err) assert.Equal(t, initialCommitID, commonAncestor) // Test with disjoint commit histories commitDisconnected := repo.commitWithParents(t, emptyTreeID, nil, "Disconnected initial commit\n", false) _, err = repo.GetCommonAncestor(commitDisconnected, commitA) assert.NotNil(t, err) } gittuf-0.9.0/internal/gitinterface/common.go000066400000000000000000000042531475150141000211230ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "os" "os/exec" "path/filepath" "testing" "time" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/jonboulle/clockwork" ) const ( testName = "Jane Doe" testEmail = "jane.doe@example.com" ) var ( testClock = clockwork.NewFakeClockAt(time.Date(1995, time.October, 26, 9, 0, 0, 0, time.UTC)) ) // CreateTestGitRepository creates a Git repository in the specified directory. // This is meant to be used by tests across gittuf packages. This helper also // sets up an ED25519 signing key that can be used to create reproducible // commits. func CreateTestGitRepository(t *testing.T, dir string, bare bool) *Repository { t.Helper() repo := setupRepository(t, dir, bare) // Set up author / committer identity if err := repo.SetGitConfig("user.name", testName); err != nil { t.Fatal(err) } if err := repo.SetGitConfig("user.email", testEmail); err != nil { t.Fatal(err) } // Set up signing via SSH key keysDir := t.TempDir() setupSigningKeys(t, keysDir) if err := repo.SetGitConfig("user.signingkey", filepath.Join(keysDir, "key.pub")); err != nil { t.Fatal(err) } if err := repo.SetGitConfig("gpg.format", "ssh"); err != nil { t.Fatal(err) } return repo } func setupRepository(t *testing.T, dir string, bare bool) *Repository { t.Helper() var gitDirPath string args := []string{"init"} if bare { args = append(args, "--bare") gitDirPath = dir } else { gitDirPath = filepath.Join(dir, ".git") } args = append(args, "-b", "main") args = append(args, dir) cmd := exec.Command(binary, args...) if err := cmd.Run(); err != nil { t.Fatal(err) } return &Repository{gitDirPath: gitDirPath, clock: testClock} } func setupSigningKeys(t *testing.T, dir string) { t.Helper() sshPrivateKey := artifacts.SSHRSAPrivate sshPublicKey := artifacts.SSHRSAPublicSSH privateKeyPath := filepath.Join(dir, "key") publicKeyPath := filepath.Join(dir, "key.pub") if err := os.WriteFile(privateKeyPath, sshPrivateKey, 0o600); err != nil { t.Fatal(err) } if err := os.WriteFile(publicKeyPath, sshPublicKey, 0o600); err != nil { t.Fatal(err) } } gittuf-0.9.0/internal/gitinterface/config.go000066400000000000000000000021031475150141000210700ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "strings" ) // GetGitConfig reads the applicable Git config for a repository and returns // it. The "keys" for each config are normalized to lowercase. func (r *Repository) GetGitConfig() (map[string]string, error) { stdOut, err := r.executor("config", "--get-regexp", `.*`).executeString() if err != nil { return nil, fmt.Errorf("unable to read Git config: %w", err) } config := map[string]string{} lines := strings.Split(strings.TrimSpace(stdOut), "\n") for _, line := range lines { split := strings.Split(line, " ") if len(split) < 2 { continue } config[strings.ToLower(split[0])] = strings.Join(split[1:], " ") } return config, nil } // SetGitConfig sets the specified key to the value locally for a repository. func (r *Repository) SetGitConfig(key, value string) error { if _, err := r.executor("config", "--local", key, value).executeString(); err != nil { return fmt.Errorf("unable to set '%s' to '%s': %w", key, value, err) } return nil } gittuf-0.9.0/internal/gitinterface/config_test.go000066400000000000000000000007321475150141000221350ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "testing" "github.com/stretchr/testify/assert" ) func TestGetGitConfig(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) // CreateTestGitRepository sets our test config config, err := repo.GetGitConfig() assert.Nil(t, err) assert.Equal(t, testName, config["user.name"]) assert.Equal(t, testEmail, config["user.email"]) } gittuf-0.9.0/internal/gitinterface/hash.go000066400000000000000000000030751475150141000205570ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "bytes" "crypto/sha1" //nolint:gosec "crypto/sha256" "encoding/hex" "errors" ) var ( zeroSHA1HashBytes = [sha1.Size]byte{} zeroSHA256HashBytes = [sha256.Size]byte{} ) var ( ErrInvalidHashEncoding = errors.New("hash string is not hex encoded") ErrInvalidHashLength = errors.New("hash string is wrong length") ) // Hash represents a Git object hash. It is a lightweight wrapper around the // standard hex encoded representation of a SHA-1 or SHA-256 hash used by Git. type Hash []byte // String returns the hex encoded hash. func (h Hash) String() string { return hex.EncodeToString(h[:]) } // IsZero compares the hash to see if it's the zero hash for either SHA-1 or // SHA-256. func (h Hash) IsZero() bool { return bytes.Equal(h[:], zeroSHA1HashBytes[:]) || bytes.Equal(h[:], zeroSHA256HashBytes[:]) } // Equal compares the hash to another provided Hash to see if they're equal. func (h Hash) Equal(other Hash) bool { return bytes.Equal(h[:], other[:]) } // ZeroHash represents an empty Hash. // TODO: use SHA-256 zero hash for repositories that have that as the default. var ZeroHash = Hash(zeroSHA1HashBytes[:]) // NewHash returns a Hash object after ensuring the input string is correctly // encoded. func NewHash(h string) (Hash, error) { if len(h) != (sha1.Size*2) && len(h) != (sha256.Size*2) { return ZeroHash, ErrInvalidHashLength } hash, err := hex.DecodeString(h) if err != nil { return ZeroHash, ErrInvalidHashEncoding } return Hash(hash), nil } gittuf-0.9.0/internal/gitinterface/hash_test.go000066400000000000000000000035431475150141000216160ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "encoding/hex" "fmt" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestNewHash(t *testing.T) { tests := map[string]struct { hash string expectedError error }{ "correctly encoded SHA-1 hash": { hash: "e69de29bb2d1d6434b8b29ae775ad8c2e48c5391", }, "correctly encoded SHA-256 hash": { hash: "61658570165bc04af68cef20d72da49b070dc9d8cd7c8a526c950b658f4d3ccf", }, "correctly encoded SHA-1 zero hash": { hash: "0000000000000000000000000000000000000000", }, "correctly encoded SHA-256 zero hash": { hash: "0000000000000000000000000000000000000000000000000000000000000000", }, "incorrect length SHA-1 hash": { hash: "e69de29bb2d1d6434b8", expectedError: ErrInvalidHashLength, }, "incorrect length SHA-256 hash": { hash: "61658570165bc04af68cef20d72da49b070dc9d8cd7c8a526c950b658f4d3ccfabcdef", expectedError: ErrInvalidHashLength, }, "incorrectly encoded SHA-1 hash": { hash: "e69de29bb2d1d6434b8b29ae775ad8c2e48c539g", // last char is 'g' expectedError: ErrInvalidHashEncoding, }, "incorrectly encoded SHA-256 hash": { hash: "61658570165bc04af68cef20d72da49b070dc9d8cd7c8a526c950b658f4d3ccg", // last char is 'g' expectedError: ErrInvalidHashEncoding, }, } for name, test := range tests { hash, err := NewHash(test.hash) if test.expectedError == nil { expectedHash, secErr := hex.DecodeString(test.hash) require.Nil(t, secErr) assert.Equal(t, Hash(expectedHash), hash) assert.Equal(t, test.hash, hash.String()) assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) } else { assert.ErrorIs(t, err, test.expectedError, fmt.Sprintf("unexpected error in test '%s'", name)) } } } gittuf-0.9.0/internal/gitinterface/log.go000066400000000000000000000032011475150141000204040ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "sort" "strings" ) // GetCommitsBetweenRange returns the IDs of the commits that exist between the // specified new and old commit identifiers. func (r *Repository) GetCommitsBetweenRange(commitNewID, commitOldID Hash) ([]Hash, error) { var args []string if commitOldID.IsZero() { args = []string{"rev-list", commitNewID.String()} } else { args = []string{"rev-list", fmt.Sprintf("%s..%s", commitOldID.String(), commitNewID.String())} } commitRangeString, err := r.executor(args...).executeString() if err != nil { return nil, fmt.Errorf("unable to enumerate commits in range: %w", err) } commitRangeSplit := strings.Split(commitRangeString, "\n") commitRange := make([]Hash, 0, len(commitRangeSplit)) for _, cID := range commitRangeSplit { if cID == "" { continue } hash, err := NewHash(cID) if err != nil { return nil, err } commitRange = append(commitRange, hash) } // FIXME: we should ideally be sorting this in the order of occurrence // rather than by commit ID. The only reason this is happening is because // the ordering of commitRange by default is not deterministic. Rather than // walking through them and identifying the right order, we're sorting by // commit ID. The intended use case of this function is to get a list of // commits that are then checked for the changes they introduce. At that // point, they must be diffed with their parent directly. sort.Slice(commitRange, func(i, j int) bool { return commitRange[i].String() < commitRange[j].String() }) return commitRange, nil } gittuf-0.9.0/internal/gitinterface/log_test.go000066400000000000000000000275371475150141000214650ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "sort" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestGetCommitsBetweenRangeRepository(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } allCommits := []Hash{} for i := 0; i < 5; i++ { commitHash, err := repo.Commit(emptyTreeID, refName, "Test commit\n", false) require.Nil(t, err) allCommits = append(allCommits, commitHash) } // Git tree structure with their commit trees and their values: // // Commit1 <- Commit2 <- Commit3 <- Commit4 <- Commit5 t.Run("Check range between commits 1 and 5", func(t *testing.T) { commits, err := repo.GetCommitsBetweenRange(allCommits[4], allCommits[0]) assert.Nil(t, err) expectedCommits := []Hash{allCommits[4], allCommits[3], allCommits[2], allCommits[1]} sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) t.Run("Pass in wrong order", func(t *testing.T) { commits, err := repo.GetCommitsBetweenRange(allCommits[0], allCommits[4]) assert.Nil(t, err) assert.Empty(t, commits) }) t.Run("Check range in separate branches", func(t *testing.T) { // 7 // ↙ ↘ // 5 6 // ↓ ↓ // 3 4 // ↓ ↓ // 1 2 // ↘ ↙ // 0 // If we pass in 7 and 1, we expect to get 7, 6, 5, 4, 3, and 2 // If we pass in 1 and 7, we should expect nothing since every node that // is in the subtree of 1 is also in the subtree of 7 // Create two new branches for this mainBranch := testNameToRefName(t.Name()) featureBranch := testNameToRefName(t.Name() + " feature branch") // Add a common commit for both commonCommit, err := repo.Commit(emptyTreeID, mainBranch, "Initial commit\n", false) require.Nil(t, err) if err := repo.SetReference(featureBranch, commonCommit); err != nil { t.Fatal(err) } mainBranchCommits := []Hash{} for i := 0; i < 5; i++ { commitHash, err := repo.Commit(emptyTreeID, mainBranch, fmt.Sprintf("Main commit %d\n", i), false) require.Nil(t, err) mainBranchCommits = append(mainBranchCommits, commitHash) } featureBranchCommits := []Hash{} for i := 0; i < 5; i++ { commitHash, err := repo.Commit(emptyTreeID, featureBranch, fmt.Sprintf("Feature commit %d\n", i), false) require.Nil(t, err) featureBranchCommits = append(featureBranchCommits, commitHash) } // Add a common merge commit mergeCommit := repo.commitWithParents( t, emptyTreeID, []Hash{ mainBranchCommits[len(mainBranchCommits)-1], featureBranchCommits[len(featureBranchCommits)-1], }, "Merge branches\n", false, ) // Check merge to first commit in main branch (not initial common commit) expectedCommits := append([]Hash{mergeCommit}, mainBranchCommits[1:]...) expectedCommits = append(expectedCommits, featureBranchCommits...) sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) commits, err := repo.GetCommitsBetweenRange(mergeCommit, mainBranchCommits[0]) assert.Nil(t, err) assert.Equal(t, expectedCommits, commits) // Check merge to first commit in feature branch (not initial common commit) expectedCommits = append([]Hash{mergeCommit}, featureBranchCommits[1:]...) expectedCommits = append(expectedCommits, mainBranchCommits...) sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) commits, err = repo.GetCommitsBetweenRange(mergeCommit, featureBranchCommits[0]) assert.Nil(t, err) assert.Equal(t, expectedCommits, commits) // Check merge to initial common commit expectedCommits = append([]Hash{mergeCommit}, mainBranchCommits...) expectedCommits = append(expectedCommits, featureBranchCommits...) sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) commits, err = repo.GetCommitsBetweenRange(mergeCommit, commonCommit) assert.Nil(t, err) assert.Equal(t, expectedCommits, commits) // Set both branches to merge commit, diverge again if err := repo.SetReference(mainBranch, mergeCommit); err != nil { t.Fatal(err) } if err := repo.SetReference(featureBranch, mergeCommit); err != nil { t.Fatal(err) } mainBranchCommits = []Hash{} for i := 0; i < 5; i++ { commitHash, err := repo.Commit(emptyTreeID, mainBranch, fmt.Sprintf("Main commit %d\n", i), false) require.Nil(t, err) mainBranchCommits = append(mainBranchCommits, commitHash) } featureBranchCommits = []Hash{} for i := 0; i < 5; i++ { commitHash, err := repo.Commit(emptyTreeID, featureBranch, fmt.Sprintf("Feature commit %d\n", i), false) require.Nil(t, err) featureBranchCommits = append(featureBranchCommits, commitHash) } newMergeCommit := repo.commitWithParents( t, emptyTreeID, []Hash{ mainBranchCommits[len(mainBranchCommits)-1], featureBranchCommits[len(featureBranchCommits)-1], }, "Merge branches\n", false, ) // Check range between two merge commits expectedCommits = append([]Hash{newMergeCommit}, mainBranchCommits...) expectedCommits = append(expectedCommits, featureBranchCommits...) sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) commits, err = repo.GetCommitsBetweenRange(newMergeCommit, mergeCommit) assert.Nil(t, err) assert.Equal(t, expectedCommits, commits) }) t.Run("Get all commits", func(t *testing.T) { commits, err := repo.GetCommitsBetweenRange(allCommits[4], ZeroHash) assert.Nil(t, err) expectedCommits := allCommits sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) t.Run("Get commits from invalid range", func(t *testing.T) { _, err := repo.GetCommitsBetweenRange(ZeroHash, ZeroHash) assert.NotNil(t, err) }) t.Run("Get commits from non-existent commit", func(t *testing.T) { nonExistentHash, err := repo.WriteBlob([]byte{}) assert.Nil(t, err) commits, err := repo.GetCommitsBetweenRange(nonExistentHash, ZeroHash) assert.Nil(t, err) assert.Empty(t, commits) }) } func TestGetCommitsBetweenRangeForMergeCommits(t *testing.T) { // Creating a tree with merge commits tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) commitIDs := make([]Hash, 0, 6) emptyBlobHash, err := repo.WriteBlob(nil) if err != nil { t.Fatal(err) } treeHashes := createTestTrees(t, repo, emptyBlobHash, 6) if err != nil { t.Fatal(err) } // creating the first commit commitID := repo.commitWithParents(t, treeHashes[0], nil, fmt.Sprintf("Test commit %v", 1), false) commitIDs = append(commitIDs, commitID) // creating two children from the first commit // in the visual, these will be commit 2 and commit 3 children := createChildrenCommits(t, repo, treeHashes, commitID, 2) commitIDs = append(commitIDs, children...) // creating a child for commit 2, which in the visual will be commit 4 commitID = repo.commitWithParents(t, treeHashes[3], []Hash{children[0]}, fmt.Sprintf("Test commit %v", 4), false) commitIDs = append(commitIDs, commitID) // creating a merge commit from the two children, which in the visual will be commit 5 commitID = repo.commitWithParents(t, treeHashes[4], children, fmt.Sprintf("Test commit %v", 5), false) commitIDs = append(commitIDs, commitID) // creating a child for commit 3, which in the visual will be commit 6 commitID = repo.commitWithParents(t, treeHashes[5], []Hash{children[1]}, fmt.Sprintf("Test commit %v", 6), false) commitIDs = append(commitIDs, commitID) // Git tree with merge commit structure without its commit trees and its values: // // commit 4 commit 5 commit 6 // │ │ │ │ // └─► commit 2 ◄─┘ └─► commit 3 ◄─┘ // │ │ // └─► commit 1 ◄─┘ t.Run("Test commit 1", func(t *testing.T) { // commit 1 is the first commit, so it should be the only commit returned commits, err := repo.GetCommitsBetweenRange(commitIDs[0], ZeroHash) assert.Nil(t, err) expectedCommits := []Hash{commitIDs[0]} assert.Equal(t, expectedCommits, commits) }) t.Run("Test commit 2", func(t *testing.T) { // commit 2 is the first child of commit 1, so only it and commit 1 should be returned commits, err := repo.GetCommitsBetweenRange(commitIDs[1], ZeroHash) assert.Nil(t, err) expectedCommits := []Hash{commitIDs[1], commitIDs[0]} sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) t.Run("Test commit 3", func(t *testing.T) { // commit 3 is the second child of commit 1, so only it and commit 1 should be returned commits, err := repo.GetCommitsBetweenRange(commitIDs[2], ZeroHash) assert.Nil(t, err) expectedCommits := []Hash{commitIDs[0], commitIDs[2]} sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) t.Run("Test commit 4", func(t *testing.T) { // commit 4 is the child of commit 2, so only it, commit 2, and commit 2's parent commit 1 should be returned commits, err := repo.GetCommitsBetweenRange(commitIDs[3], ZeroHash) assert.Nil(t, err) expectedCommits := []Hash{commitIDs[1], commitIDs[0], commitIDs[3]} sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) t.Run("Test commit 5, the merge commit", func(t *testing.T) { // commit 5 is the merge commit of commit 2 and commit 3, so it should return commit 5, commit 2, commit 3, and commit 1 (the parent of commit 2 and commit 3) commits, err := repo.GetCommitsBetweenRange(commitIDs[4], ZeroHash) assert.Nil(t, err) expectedCommits := []Hash{commitIDs[4], commitIDs[1], commitIDs[0], commitIDs[2]} sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) t.Run("Test commit 6", func(t *testing.T) { // commit 6 is the child of commit 3, so it should return commit 6, commit 3, and commit 1 (the parent of commit 3) commits, err := repo.GetCommitsBetweenRange(commitIDs[5], ZeroHash) assert.Nil(t, err) expectedCommits := []Hash{commitIDs[0], commitIDs[5], commitIDs[2]} sort.Slice(expectedCommits, func(i, j int) bool { return expectedCommits[i].String() < expectedCommits[j].String() }) assert.Equal(t, expectedCommits, commits) }) } func createTestTrees(t *testing.T, repo *Repository, emptyBlobHash Hash, num int) []Hash { t.Helper() treeBuilder := NewTreeBuilder(repo) treeHashes := make([]Hash, 0, num) for i := 1; i <= num; i++ { objects := []TreeEntry{} for j := 0; j < i; j++ { objects = append(objects, NewEntryBlob(fmt.Sprintf("%d", j+1), emptyBlobHash)) } treeHash, err := treeBuilder.WriteTreeFromEntries(objects) if err != nil { t.Fatal(err) } treeHashes = append(treeHashes, treeHash) } return treeHashes } func createChildrenCommits(t *testing.T, repo *Repository, treeHashes []Hash, parentHash Hash, numChildren int) []Hash { t.Helper() children := make([]Hash, 0, numChildren) for i := 1; i <= numChildren; i++ { commitID := repo.commitWithParents(t, treeHashes[i], []Hash{parentHash}, fmt.Sprintf("Test commit %v", i+1), false) children = append(children, commitID) } return children } gittuf-0.9.0/internal/gitinterface/object.go000066400000000000000000000005211475150141000210730ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface // HasObject returns true if an object with the specified Git ID exists in the // repository. func (r *Repository) HasObject(objectID Hash) bool { _, err := r.executor("cat-file", "-e", objectID.String()).executeString() return err == nil } gittuf-0.9.0/internal/gitinterface/object_test.go000066400000000000000000000035421475150141000221400ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "testing" "github.com/stretchr/testify/assert" ) func TestHasObject(t *testing.T) { tempDir1 := t.TempDir() repo := CreateTestGitRepository(t, tempDir1, true) // Create a backup repo to compute Git IDs we test in repo tempDir2 := t.TempDir() backupRepo := CreateTestGitRepository(t, tempDir2, true) blobID, err := backupRepo.WriteBlob([]byte("hello")) if err != nil { t.Fatal(err) } assert.True(t, backupRepo.HasObject(blobID)) // backup has it assert.False(t, repo.HasObject(blobID)) // repo does not if _, err := repo.WriteBlob([]byte("hello")); err != nil { t.Fatal(err) } assert.True(t, repo.HasObject(blobID)) // now repo has it too backupRepoTreeBuilder := NewTreeBuilder(backupRepo) treeID, err := backupRepoTreeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("file", blobID)}) if err != nil { t.Fatal(err) } assert.True(t, backupRepo.HasObject(treeID)) // backup has it assert.False(t, repo.HasObject(treeID)) // repo does not repoTreeBuilder := NewTreeBuilder(repo) if _, err := repoTreeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("file", blobID)}); err != nil { t.Fatal(err) } assert.True(t, repo.HasObject(treeID)) // now repo has it too commitID, err := backupRepo.Commit(treeID, "refs/heads/main", "Initial commit\n", false) if err != nil { t.Fatal(err) } assert.True(t, backupRepo.HasObject(commitID)) // backup has it assert.False(t, repo.HasObject(commitID)) // repo does not if _, err := repo.Commit(treeID, "refs/heads/main", "Initial commit\n", false); err != nil { t.Fatal(err) } // Note: This test passes because we control timestamps in // CreateTestGitRepository. So, commit ID in both repos is the same. assert.True(t, repo.HasObject(commitID)) // now repo has it too } gittuf-0.9.0/internal/gitinterface/references.go000066400000000000000000000155431475150141000217600ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "errors" "fmt" "os" "path" "strings" ) const ( RefPrefix = "refs/" BranchRefPrefix = "refs/heads/" TagRefPrefix = "refs/tags/" RemoteRefPrefix = "refs/remotes/" ) var ( ErrReferenceNotFound = errors.New("requested Git reference not found") ) // GetReference returns the tip of the specified Git reference. func (r *Repository) GetReference(refName string) (Hash, error) { refTipID, err := r.executor("rev-parse", refName).executeString() if err != nil { if strings.Contains(err.Error(), "unknown revision or path not in the working tree") { return ZeroHash, ErrReferenceNotFound } return ZeroHash, fmt.Errorf("unable to read reference '%s': %w", refName, err) } hash, err := NewHash(refTipID) if err != nil { return ZeroHash, fmt.Errorf("invalid Git ID for reference '%s': %w", refName, err) } return hash, nil } // SetReference sets the specified reference to the provided Git ID. func (r *Repository) SetReference(refName string, gitID Hash) error { _, err := r.executor("update-ref", "--create-reflog", refName, gitID.String()).executeString() if err != nil { return fmt.Errorf("unable to set Git reference '%s' to '%s': %w", refName, gitID.String(), err) } return nil } // DeleteReference deletes the specified Git reference. func (r *Repository) DeleteReference(refName string) error { _, err := r.executor("update-ref", "-d", refName).executeString() if err != nil { return fmt.Errorf("unable to delete Git reference '%s': %w", refName, err) } return nil } // CheckAndSetReference sets the specified reference to the provided Git ID if // the reference is currently set to `oldGitID`. func (r *Repository) CheckAndSetReference(refName string, newGitID, oldGitID Hash) error { _, err := r.executor("update-ref", "--create-reflog", refName, newGitID.String(), oldGitID.String()).executeString() if err != nil { return fmt.Errorf("unable to set Git reference '%s' to '%s': %w", refName, newGitID.String(), err) } return nil } // GetSymbolicReferenceTarget returns the name of the Git reference the provided // symbolic Git reference is pointing to. func (r *Repository) GetSymbolicReferenceTarget(refName string) (string, error) { symTarget, err := r.executor("symbolic-ref", refName).executeString() if err != nil { return "", fmt.Errorf("unable to resolve %s: %w", refName, err) } return symTarget, nil } // SetSymbolicReference sets the specified symbolic reference to the specified // target reference. func (r *Repository) SetSymbolicReference(symRefName, targetRefName string) error { _, err := r.executor("symbolic-ref", symRefName, targetRefName).executeString() if err != nil { return fmt.Errorf("unable to set symbolic Git reference '%s' to '%s': %w", symRefName, targetRefName, err) } return nil } // AbsoluteReference returns the fully qualified reference path for the provided // Git ref. // Source: https://git-scm.com/docs/gitrevisions#Documentation/gitrevisions.txt-emltrefnamegtemegemmasterememheadsmasterememrefsheadsmasterem func (r *Repository) AbsoluteReference(target string) (string, error) { _, err := os.Stat(path.Join(r.gitDirPath, target)) if err == nil { if strings.HasPrefix(target, RefPrefix) { // not symbolic ref return target, nil } // symbolic ref such as .git/HEAD return r.GetSymbolicReferenceTarget(target) } // We may have a ref that isn't available locally but is still ref-prefixed. if strings.HasPrefix(target, RefPrefix) { return target, nil } // If target is a full ref already and it's stored in the GIT_DIR/refs // directory, we don't reach this point. Below, we handle cases where the // ref may be packed. // Check if custom reference customName := CustomReferenceName(target) _, err = r.GetReference(customName) if err == nil { return customName, nil } if !errors.Is(err, ErrReferenceNotFound) { return "", err } // Check if tag tagName := TagReferenceName(target) _, err = r.GetReference(tagName) if err == nil { return tagName, nil } if !errors.Is(err, ErrReferenceNotFound) { return "", err } // Check if branch branchName := BranchReferenceName(target) _, err = r.GetReference(branchName) if err == nil { return branchName, nil } if !errors.Is(err, ErrReferenceNotFound) { return "", err } // Check if remote tracker ref remoteRefName := RemoteReferenceName(target) _, err = r.GetReference(remoteRefName) if err == nil { return branchName, nil } if !errors.Is(err, ErrReferenceNotFound) { return "", err } remoteRefHEAD := path.Join(remoteRefName, "HEAD") _, err = r.GetReference(remoteRefHEAD) if err == nil { return branchName, nil } if !errors.Is(err, ErrReferenceNotFound) { return "", err } return "", ErrReferenceNotFound } // RefSpec creates a Git refspec for the specified ref. For more information on // the Git refspec, please consult: // https://git-scm.com/book/en/v2/Git-Internals-The-Refspec. func (r *Repository) RefSpec(refName, remoteName string, fastForwardOnly bool) (string, error) { var ( refPath string err error ) refPath = refName if !strings.HasPrefix(refPath, RefPrefix) { refPath, err = r.AbsoluteReference(refName) if err != nil { return "", err } } if strings.HasPrefix(refPath, TagRefPrefix) { // TODO: check if this is correct, AFAICT tags aren't tracked in the // remotes namespace. fastForwardOnly = true } // local is always refPath, destination depends on remoteName localPath := refPath var remotePath string if len(remoteName) > 0 { remotePath = RemoteRef(refPath, remoteName) } else { remotePath = refPath } refSpecString := fmt.Sprintf("%s:%s", localPath, remotePath) if !fastForwardOnly { refSpecString = fmt.Sprintf("+%s", refSpecString) } return refSpecString, nil } // CustomReferenceName returns the full reference name in the form // `refs/`. func CustomReferenceName(customName string) string { if strings.HasPrefix(customName, RefPrefix) { return customName } return fmt.Sprintf("%s%s", RefPrefix, customName) } // TagReferenceName returns the full reference name for the specified tag in the // form `refs/tags/`. func TagReferenceName(tagName string) string { if strings.HasPrefix(tagName, TagRefPrefix) { return tagName } return fmt.Sprintf("%s%s", TagRefPrefix, tagName) } // BranchReferenceName returns the full reference name for the specified branch // in the form `refs/heads/`. func BranchReferenceName(branchName string) string { if strings.HasPrefix(branchName, BranchRefPrefix) { return branchName } return fmt.Sprintf("%s%s", BranchRefPrefix, branchName) } // RemoteReferenceName returns the full reference name in the form // `refs/remotes/`. func RemoteReferenceName(name string) string { if strings.HasPrefix(name, RemoteRefPrefix) { return name } return fmt.Sprintf("%s%s", RemoteRefPrefix, name) } gittuf-0.9.0/internal/gitinterface/references_test.go000066400000000000000000000246221475150141000230150ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestGetReference(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) require.Nil(t, err) refTip, err := repo.GetReference(refName) assert.Nil(t, err) assert.Equal(t, commitID, refTip) } func TestSetReference(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } firstCommitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) require.Nil(t, err) // Create second commit with tree secondCommitID, err := repo.Commit(emptyTreeID, refName, "Add README\n", false) require.Nil(t, err) refTip, err := repo.GetReference(refName) require.Nil(t, err) require.Equal(t, secondCommitID, refTip) err = repo.SetReference(refName, firstCommitID) assert.Nil(t, err) refTip, err = repo.GetReference(refName) require.Nil(t, err) assert.Equal(t, firstCommitID, refTip) } func TestCheckAndSetReference(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } firstCommitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) require.Nil(t, err) // Create second commit with tree secondCommitID, err := repo.Commit(emptyTreeID, refName, "Add README\n", false) require.Nil(t, err) refTip, err := repo.GetReference(refName) require.Nil(t, err) require.Equal(t, secondCommitID, refTip) err = repo.CheckAndSetReference(refName, firstCommitID, secondCommitID) assert.Nil(t, err) refTip, err = repo.GetReference(refName) require.Nil(t, err) assert.Equal(t, firstCommitID, refTip) } func TestGetSymbolicReferenceTarget(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } _, err = repo.Commit(emptyTreeID, refName, "Initial commit\n", false) require.Nil(t, err) // HEAD must be set to the main branch -> this is handled by git init head, err := repo.GetSymbolicReferenceTarget("HEAD") assert.Nil(t, err) assert.Equal(t, refName, head) } func TestSetSymbolicReference(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/not-main" // we want to ensure it's set to something other than the default main treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } _, err = repo.Commit(emptyTreeID, refName, "Initial commit\n", false) require.Nil(t, err) head, err := repo.GetSymbolicReferenceTarget("HEAD") require.Nil(t, err) assert.Equal(t, "refs/heads/main", head) err = repo.SetSymbolicReference("HEAD", refName) assert.Nil(t, err) head, err = repo.GetSymbolicReferenceTarget("HEAD") require.Nil(t, err) assert.Equal(t, refName, head) // not main anymore } func TestRepositoryRefSpec(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) shortRefName := "master" qualifiedRefName := "refs/heads/master" qualifiedRemoteRefName := "refs/remotes/origin/master" treeBuilder := NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeHash, qualifiedRefName, "Test Commit", false) if err != nil { t.Fatal(err) } refHash, err := repo.GetReference(qualifiedRefName) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, refHash, "unexpected value configuring test repo") tests := map[string]struct { repo *Repository refName string remoteName string fastForwardOnly bool expectedRefSpec string expectedError error }{ "standard branch, not fast forward only, no remote": { refName: "refs/heads/main", expectedRefSpec: "+refs/heads/main:refs/heads/main", }, "standard branch, fast forward only, no remote": { refName: "refs/heads/main", fastForwardOnly: true, expectedRefSpec: "refs/heads/main:refs/heads/main", }, "standard branch, not fast forward only, remote": { refName: "refs/heads/main", remoteName: "origin", expectedRefSpec: "+refs/heads/main:refs/remotes/origin/main", }, "standard branch, fast forward only, remote": { refName: "refs/heads/main", remoteName: "origin", fastForwardOnly: true, expectedRefSpec: "refs/heads/main:refs/remotes/origin/main", }, "non-standard branch, not fast forward only, no remote": { refName: "refs/heads/foo/bar", expectedRefSpec: "+refs/heads/foo/bar:refs/heads/foo/bar", }, "non-standard branch, fast forward only, no remote": { refName: "refs/heads/foo/bar", fastForwardOnly: true, expectedRefSpec: "refs/heads/foo/bar:refs/heads/foo/bar", }, "non-standard branch, not fast forward only, remote": { refName: "refs/heads/foo/bar", remoteName: "origin", expectedRefSpec: "+refs/heads/foo/bar:refs/remotes/origin/foo/bar", }, "non-standard branch, fast forward only, remote": { refName: "refs/heads/foo/bar", remoteName: "origin", fastForwardOnly: true, expectedRefSpec: "refs/heads/foo/bar:refs/remotes/origin/foo/bar", }, "short branch, not fast forward only, no remote": { refName: shortRefName, repo: repo, expectedRefSpec: fmt.Sprintf("+%s:%s", qualifiedRefName, qualifiedRefName), }, "short branch, fast forward only, no remote": { refName: shortRefName, repo: repo, fastForwardOnly: true, expectedRefSpec: fmt.Sprintf("%s:%s", qualifiedRefName, qualifiedRefName), }, "short branch, not fast forward only, remote": { refName: shortRefName, repo: repo, remoteName: "origin", expectedRefSpec: fmt.Sprintf("+%s:%s", qualifiedRefName, qualifiedRemoteRefName), }, "short branch, fast forward only, remote": { refName: shortRefName, repo: repo, fastForwardOnly: true, remoteName: "origin", expectedRefSpec: fmt.Sprintf("%s:%s", qualifiedRefName, qualifiedRemoteRefName), }, "custom namespace, not fast forward only, no remote": { refName: "refs/foo/bar", expectedRefSpec: "+refs/foo/bar:refs/foo/bar", }, "custom namespace, fast forward only, no remote": { refName: "refs/foo/bar", fastForwardOnly: true, expectedRefSpec: "refs/foo/bar:refs/foo/bar", }, "custom namespace, not fast forward only, remote": { refName: "refs/foo/bar", remoteName: "origin", expectedRefSpec: "+refs/foo/bar:refs/remotes/origin/foo/bar", }, "custom namespace, fast forward only, remote": { refName: "refs/foo/bar", remoteName: "origin", fastForwardOnly: true, expectedRefSpec: "refs/foo/bar:refs/remotes/origin/foo/bar", }, "tag, not fast forward only, no remote": { refName: "refs/tags/v1.0.0", fastForwardOnly: false, expectedRefSpec: "refs/tags/v1.0.0:refs/tags/v1.0.0", }, "tag, fast forward only, no remote": { refName: "refs/tags/v1.0.0", fastForwardOnly: true, expectedRefSpec: "refs/tags/v1.0.0:refs/tags/v1.0.0", }, "tag, not fast forward only, remote": { refName: "refs/tags/v1.0.0", remoteName: "origin", fastForwardOnly: false, expectedRefSpec: "refs/tags/v1.0.0:refs/tags/v1.0.0", }, "tag, fast forward only, remote": { refName: "refs/tags/v1.0.0", remoteName: "origin", fastForwardOnly: true, expectedRefSpec: "refs/tags/v1.0.0:refs/tags/v1.0.0", }, } for name, test := range tests { refSpec, err := test.repo.RefSpec(test.refName, test.remoteName, test.fastForwardOnly) assert.ErrorIs(t, err, test.expectedError, fmt.Sprintf("unexpected error in test '%s'", name)) assert.Equal(t, test.expectedRefSpec, refSpec, fmt.Sprintf("unexpected refspec returned in test '%s'", name)) } } func TestBranchReferenceName(t *testing.T) { tests := map[string]struct { branchName string expectedReferenceName string }{ "short name": { branchName: "main", expectedReferenceName: "refs/heads/main", }, "reference name": { branchName: "refs/heads/main", expectedReferenceName: "refs/heads/main", }, } for name, test := range tests { referenceName := BranchReferenceName(test.branchName) assert.Equal(t, test.expectedReferenceName, referenceName, fmt.Sprintf("unexpected branch reference received in test '%s'", name)) } } func TestTagReferenceName(t *testing.T) { tests := map[string]struct { tagName string expectedReferenceName string }{ "short name": { tagName: "v1", expectedReferenceName: "refs/tags/v1", }, "reference name": { tagName: "refs/tags/v1", expectedReferenceName: "refs/tags/v1", }, } for name, test := range tests { referenceName := TagReferenceName(test.tagName) assert.Equal(t, test.expectedReferenceName, referenceName, fmt.Sprintf("unexpected tag reference received in test '%s'", name)) } } func TestDeleteReference(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" treeBuilder := NewTreeBuilder(repo) emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeID, refName, "Initial commit\n", false) require.Nil(t, err) refTip, err := repo.GetReference(refName) require.Nil(t, err) require.Equal(t, commitID, refTip) err = repo.DeleteReference(refName) assert.Nil(t, err) _, err = repo.GetReference(refName) assert.ErrorIs(t, err, ErrReferenceNotFound) } gittuf-0.9.0/internal/gitinterface/remote.go000066400000000000000000000010231475150141000211160ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface func (r *Repository) AddRemote(remoteName, url string) error { _, err := r.executor("remote", "add", remoteName, url).executeString() return err } func (r *Repository) RemoveRemote(remoteName string) error { _, err := r.executor("remote", "remove", remoteName).executeString() return err } func (r *Repository) GetRemoteURL(remoteName string) (string, error) { return r.executor("remote", "get-url", remoteName).executeString() } gittuf-0.9.0/internal/gitinterface/remote_test.go000066400000000000000000000024461475150141000221670ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) func TestRemote(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) output, err := repo.executor("remote").executeString() if err != nil { t.Fatal(err) } assert.Equal(t, "", output) // no output because there are no remotes remoteName := "origin" remoteURL := "git@example.com:repo.git" // Test AddRemote err = repo.AddRemote(remoteName, remoteURL) assert.Nil(t, err) output, err = repo.executor("remote", "-v").executeString() if err != nil { t.Fatal(err) } expectedOutput := fmt.Sprintf("%s\t%s (fetch)\n%s\t%s (push)", remoteName, remoteURL, remoteName, remoteURL) assert.Equal(t, expectedOutput, output) // Test GetRemoteURL returnedRemoteURL, err := repo.GetRemoteURL(remoteName) assert.Nil(t, err) assert.Equal(t, remoteURL, returnedRemoteURL) _, err = repo.GetRemoteURL("does-not-exist") assert.ErrorContains(t, err, "No such remote") // Test RemoveRemote err = repo.RemoveRemote(remoteName) assert.Nil(t, err) output, err = repo.executor("remote").executeString() if err != nil { t.Fatal(err) } assert.Equal(t, "", output) // no output because there are no remotes } gittuf-0.9.0/internal/gitinterface/repository.go000066400000000000000000000110621475150141000220460ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "bytes" "fmt" "io" "os" "os/exec" "strings" "github.com/go-git/go-git/v5" "github.com/jonboulle/clockwork" ) const ( binary = "git" committerTimeKey = "GIT_COMMITTER_DATE" authorTimeKey = "GIT_AUTHOR_DATE" ) // Repository is a lightweight wrapper around a Git repository. It stores the // location of the repository's GIT_DIR. type Repository struct { gitDirPath string clock clockwork.Clock } // GetGoGitRepository returns the go-git representation of a repository. We use // this in certain signing and verifying workflows. func (r *Repository) GetGoGitRepository() (*git.Repository, error) { return git.PlainOpenWithOptions(r.gitDirPath, &git.PlainOpenOptions{DetectDotGit: true}) } // GetGitDir returns the GIT_DIR path for the repository. func (r *Repository) GetGitDir() string { return r.gitDirPath } // IsBare returns true if the repository is a bare repository. func (r *Repository) IsBare() bool { // TODO: this may not work when the repo is cloned with GIT_DIR set // elsewhere. We don't support this at the moment, so it's probably okay? return !strings.HasSuffix(r.gitDirPath, ".git") } // LoadRepository returns a Repository instance using the current working // directory. It also inspects the PATH to ensure Git is installed. func LoadRepository() (*Repository, error) { _, err := exec.LookPath(binary) if err != nil { return nil, fmt.Errorf("unable to find Git binary, is Git installed?") } repo := &Repository{clock: clockwork.NewRealClock()} envVar := os.Getenv("GIT_DIR") if envVar != "" { repo.gitDirPath = envVar return repo, nil } stdOut, stdErr, err := repo.executor("rev-parse", "--git-dir").execute() if err != nil { errContents, newErr := io.ReadAll(stdErr) if newErr != nil { return nil, fmt.Errorf("unable to read original err '%w' when loading repository: %w", err, newErr) } return nil, fmt.Errorf("unable to identify GIT_DIR: %w: %s", err, strings.TrimSpace(string(errContents))) } stdOutContents, err := io.ReadAll(stdOut) if err != nil { return nil, fmt.Errorf("unable to identify GIT_DIR: %w", err) } repo.gitDirPath = strings.TrimSpace(string(stdOutContents)) return repo, nil } // executor is a lightweight wrapper around exec.Cmd to run Git commands. It // accepts the arguments to the `git` binary, but the binary itself must not be // specified. type executor struct { r *Repository args []string env []string stdIn io.Reader } // executor initializes a new executor instance to run a Git command with the // specified arguments. func (r *Repository) executor(args ...string) *executor { return &executor{r: r, args: args, env: os.Environ()} } // withEnv adds the specified environment variables. Each environment variable // must be specified in the form of `key=value`. func (e *executor) withEnv(env ...string) *executor { e.env = append(e.env, env...) return e } // withStdIn sets the contents of stdin to be passed in to the command. func (e *executor) withStdIn(stdIn *bytes.Buffer) *executor { e.stdIn = stdIn return e } // executeString runs the constructed Git command and returns the contents of // stdout. Leading and trailing spaces and newlines are removed. This function // should be used almost every time; the only exception is when the output is // desired without any processing such as the removal of space characters. func (e *executor) executeString() (string, error) { stdOut, stdErr, err := e.execute() if err != nil { stdErrContents, newErr := io.ReadAll(stdErr) if newErr != nil { return "", fmt.Errorf("unable to read stderr contents: %w; original err: %w", newErr, err) } return "", fmt.Errorf("%w when executing `git %s`: %s", err, strings.Join(e.args, " "), string(stdErrContents)) } stdOutContents, err := io.ReadAll(stdOut) if err != nil { return "", fmt.Errorf("unable to read stdout contents: %w", err) } return strings.TrimSpace(string(stdOutContents)), nil } // execute runs the constructed Git command and returns the raw stdout and // stderr contents. It adds the `--git-dir` argument if the repository has a // path set. func (e *executor) execute() (io.Reader, io.Reader, error) { if e.r.gitDirPath != "" { e.args = append([]string{"--git-dir", e.r.gitDirPath}, e.args...) } cmd := exec.Command(binary, e.args...) //nolint:gosec cmd.Env = e.env var ( stdOut bytes.Buffer stdErr bytes.Buffer ) cmd.Stdout = &stdOut cmd.Stderr = &stdErr if e.stdIn != nil { cmd.Stdin = e.stdIn } err := cmd.Run() return &stdOut, &stdErr, err } gittuf-0.9.0/internal/gitinterface/repository_test.go000066400000000000000000000010651475150141000231070ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "testing" "github.com/stretchr/testify/assert" ) func TestRepository(t *testing.T) { t.Run("repository.isBare", func(t *testing.T) { t.Run("bare=true", func(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, true) assert.True(t, repo.IsBare()) }) t.Run("bare=false", func(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) assert.False(t, repo.IsBare()) }) }) } gittuf-0.9.0/internal/gitinterface/signature.go000066400000000000000000000175271475150141000216440ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "bytes" "context" "crypto/x509" "encoding/pem" "errors" "fmt" "log/slog" "os" "strings" "github.com/ProtonMail/go-crypto/openpgp" "github.com/gittuf/gittuf/internal/signerverifier/common" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" sslibsvssh "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/hiddeco/sshsig" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" "github.com/sigstore/cosign/v2/pkg/cosign" gitsignVerifier "github.com/sigstore/gitsign/pkg/git" gitsignRekor "github.com/sigstore/gitsign/pkg/rekor" "github.com/sigstore/sigstore/pkg/fulcioroots" "golang.org/x/crypto/ssh" ) const ( rekorPublicGoodInstance = "https://rekor.sigstore.dev" namespaceSSHSignature string = "git" gpgPrivateKeyPEMHeader string = "PGP PRIVATE KEY" opensshPrivateKeyPEMHeader string = "OPENSSH PRIVATE KEY" rsaPrivateKeyPEMHeader string = "RSA PRIVATE KEY" genericPrivateKeyPEMHeader string = "PRIVATE KEY" signingFormatGPG string = "gpg" signingFormatSSH string = "ssh" ) var ( ErrNotCommitOrTag = errors.New("invalid object type, expected commit or tag for signature verification") ErrSigningKeyNotSpecified = errors.New("signing key not specified in git config") ErrUnknownSigningMethod = errors.New("unknown signing method (not one of gpg, ssh, x509)") ErrIncorrectVerificationKey = errors.New("incorrect key provided to verify signature") ErrVerifyingSigstoreSignature = errors.New("unable to verify Sigstore signature") ErrVerifyingSSHSignature = errors.New("unable to verify SSH signature") ErrInvalidSignature = errors.New("unable to parse signature / signature has unexpected header") ) // CanSign inspects the Git configuration to determine if commit / tag signing // is possible. func (r *Repository) CanSign() error { config, err := r.GetGitConfig() if err != nil { return err } // Format is one of GPG, SSH, X509 format := getSigningMethod(config) // If format is GPG or X509, the signing key parameter is optional // However, for SSH, the signing key must be set if format == signingFormatSSH { keyInfo := getSigningKeyInfo(config) if keyInfo == "" { return ErrSigningKeyNotSpecified } } return nil } // VerifySignature verifies the cryptographic signature associated with the // specified object. The `objectID` must point to a Git commit or tag object. func (r *Repository) VerifySignature(ctx context.Context, objectID Hash, key *signerverifier.SSLibKey) error { if err := r.ensureIsCommit(objectID); err == nil { return r.verifyCommitSignature(ctx, objectID, key) } if err := r.ensureIsTag(objectID); err == nil { return r.verifyTagSignature(ctx, objectID, key) } return ErrNotCommitOrTag } func signGitObjectUsingKey(contents, pemKeyBytes []byte) (string, error) { block, _ := pem.Decode(pemKeyBytes) if block == nil { // openpgp implements its own armor-decode method, pem.Decode considers // the input invalid. We haven't tested if this is universal, so in case // pem.Decode does succeed on a GPG key, we catch it below. return signGitObjectUsingGPGKey(contents, pemKeyBytes) } switch block.Type { case gpgPrivateKeyPEMHeader: return signGitObjectUsingGPGKey(contents, pemKeyBytes) case opensshPrivateKeyPEMHeader, rsaPrivateKeyPEMHeader, genericPrivateKeyPEMHeader: return signGitObjectUsingSSHKey(contents, pemKeyBytes) } return "", ErrUnknownSigningMethod } func signGitObjectUsingGPGKey(contents, pemKeyBytes []byte) (string, error) { reader := bytes.NewReader(contents) keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader(pemKeyBytes)) if err != nil { return "", err } sig := new(strings.Builder) if err := openpgp.ArmoredDetachSign(sig, keyring[0], reader, nil); err != nil { return "", err } return sig.String(), nil } func signGitObjectUsingSSHKey(contents, pemKeyBytes []byte) (string, error) { signer, err := ssh.ParsePrivateKey(pemKeyBytes) if err != nil { return "", err } sshSig, err := sshsig.Sign(bytes.NewReader(contents), signer, sshsig.HashSHA512, namespaceSSHSignature) if err != nil { return "", err } sigBytes := sshsig.Armor(sshSig) return string(sigBytes), nil } // verifyGitsignSignature handles the Sigstore-specific workflow involved in // verifying commit or tag signatures issued by gitsign. func verifyGitsignSignature(ctx context.Context, repo *Repository, key *signerverifier.SSLibKey, data, signature []byte) error { checkOpts := &cosign.CheckOpts{ Identities: []cosign.Identity{{ Issuer: key.KeyVal.Issuer, Subject: key.KeyVal.Identity, }}, } var verifier *gitsignVerifier.CertVerifier sigstoreRootFilePath := os.Getenv(sigstore.EnvSigstoreRootFile) if sigstoreRootFilePath == "" { root, err := fulcioroots.Get() if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } intermediate, err := fulcioroots.GetIntermediates() if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } checkOpts.RootCerts = root checkOpts.IntermediateCerts = intermediate verifier, err = gitsignVerifier.NewCertVerifier( gitsignVerifier.WithRootPool(root), gitsignVerifier.WithIntermediatePool(intermediate), ) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } } else { slog.Debug("Using environment variables to establish trust for Sigstore instance...") rootCerts, err := common.LoadCertsFromPath(sigstoreRootFilePath) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } root := x509.NewCertPool() for _, cert := range rootCerts { root.AddCert(cert) } checkOpts.RootCerts = root verifier, err = gitsignVerifier.NewCertVerifier( gitsignVerifier.WithRootPool(root), ) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } } verifiedCert, err := verifier.Verify(ctx, data, signature, true) if err != nil { return ErrIncorrectVerificationKey } rekorURL := rekorPublicGoodInstance // Check git config to see if rekor server must be overridden config, err := repo.GetGitConfig() if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } if configValue, has := config[sigstore.GitConfigRekor]; has { slog.Debug(fmt.Sprintf("Using '%s' as Rekor instance...", configValue)) rekorURL = configValue } // gitsignRekor.NewWithOptions invokes cosign.GetRekorPubs which looks at // the env var, so we don't have to do anything here rekor, err := gitsignRekor.NewWithOptions(ctx, rekorURL) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } checkOpts.RekorClient = rekor.Rekor checkOpts.RekorPubKeys = rekor.PublicKeys() // cosign.GetCTLogPubs already looks at the env var, so we don't have to do // anything here ctPub, err := cosign.GetCTLogPubs(ctx) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } checkOpts.CTLogPubKeys = ctPub if _, err := cosign.ValidateAndUnpackCert(verifiedCert, checkOpts); err != nil { return errors.Join(ErrIncorrectVerificationKey, err) } return nil } // verifySSHKeySignature verifies Git signatures issued by SSH keys. func verifySSHKeySignature(ctx context.Context, key *signerverifier.SSLibKey, data, signature []byte) error { verifier, err := sslibsvssh.NewVerifierFromKey(key) if err != nil { return errors.Join(ErrVerifyingSSHSignature, err) } if err := verifier.Verify(ctx, data, signature); err != nil { return errors.Join(ErrVerifyingSSHSignature, err) } return nil } func getSigningMethod(gitConfig map[string]string) string { format, ok := gitConfig["gpg.format"] if !ok { return signingFormatGPG // default to gpg } return format } func getSigningKeyInfo(gitConfig map[string]string) string { keyInfo, ok := gitConfig["user.signingkey"] if !ok { return "" } return keyInfo } gittuf-0.9.0/internal/gitinterface/signature_test.go000066400000000000000000000036061475150141000226740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "testing" "github.com/stretchr/testify/assert" ) func TestCanSign(t *testing.T) { // Note: This is currently not testing the one scenario where CanSign // returns an error: when gpg.format=ssh but user.signingkey is undefined. // This is because on developer machines, there's a very good chance // user.signingkey is set globally, which gets picked up during the test. // This also means we can't reliably test the case when no signing specific // configuration is set (which defaults to gpg + the default key). // :( tests := map[string]struct { config map[string]string }{ "explicit gpg, no key": { config: map[string]string{ "gpg.format": "gpg", }, }, "explicit gpg, explicit key": { config: map[string]string{ "gpg.format": "gpg", "user.signingkey": "gpg-fingerprint", }, }, "no signing method, explicit key": { config: map[string]string{ "user.signingkey": "gpg-fingerprint", }, }, "explicit ssh, explicit key": { config: map[string]string{ "gpg.format": "ssh", "user.signingkey": "ssh/key/path", }, }, "explicit x509, no key": { config: map[string]string{ "gpg.format": "x509", }, }, "explicit x509, explicit key": { config: map[string]string{ "gpg.format": "x509", "user.signingkey": "x509-signing-info", }, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { tmpDir := t.TempDir() repo := setupRepository(t, tmpDir, false) // explicitly not using CreateTestGitRepository as that includes signing configurations for key, value := range test.config { if err := repo.SetGitConfig(key, value); err != nil { t.Fatal(err) } } err := repo.CanSign() assert.Nil(t, err, fmt.Sprintf("unexpected result in test '%s'", name)) }) } } gittuf-0.9.0/internal/gitinterface/status.go000066400000000000000000000065511475150141000211610ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "errors" "fmt" "os" "strings" ) // See https://git-scm.com/docs/git-status#_porcelain_format_version_1. var ( ErrInvalidStatusCodeLength = errors.New("status code string must be of length 1") ErrInvalidStatusCode = errors.New("status code string is unrecognized") ) type StatusCode uint const ( StatusCodeUnmodified StatusCode = iota + 1 // we use 0 as error code StatusCodeModified StatusCodeTypeChanged StatusCodeAdded StatusCodeDeleted StatusCodeRenamed StatusCodeCopied StatusCodeUpdatedUnmerged StatusCodeUntracked StatusCodeIgnored ) func (s StatusCode) String() string { switch s { case StatusCodeUnmodified: return " " // is this actually a space or empty string? case StatusCodeModified: return "M" case StatusCodeTypeChanged: return "T" case StatusCodeAdded: return "A" case StatusCodeDeleted: return "D" case StatusCodeRenamed: return "R" case StatusCodeCopied: return "C" case StatusCodeUpdatedUnmerged: return "U" case StatusCodeUntracked: return "?" case StatusCodeIgnored: return "!" default: return "invalid-code" } } func NewStatusCodeFromByte(s byte) (StatusCode, error) { switch s { case ' ': return StatusCodeUnmodified, nil case 'M': return StatusCodeModified, nil case 'T': return StatusCodeTypeChanged, nil case 'A': return StatusCodeAdded, nil case 'D': return StatusCodeDeleted, nil case 'C': return StatusCodeCopied, nil case 'U': return StatusCodeUpdatedUnmerged, nil case '?': return StatusCodeUntracked, nil case '!': return StatusCodeIgnored, nil default: return 0, ErrInvalidStatusCode } } type FileStatus struct { X StatusCode Y StatusCode } func (f *FileStatus) Untracked() bool { return f.X == StatusCodeUntracked || f.Y == StatusCodeUntracked } func (r *Repository) Status() (map[string]FileStatus, error) { worktree := r.gitDirPath if !r.IsBare() { worktree = strings.TrimSuffix(worktree, ".git") // TODO: this doesn't support detached git dir } cwd, err := os.Getwd() if err != nil { return nil, err } if err := os.Chdir(worktree); err != nil { return nil, err } defer os.Chdir(cwd) //nolint:errcheck output, err := r.executor("status", "--porcelain=1", "-z", "--untracked-files=all", "--ignored").executeString() if err != nil { return nil, fmt.Errorf("unable to check status of repository: %w", err) } statuses := map[string]FileStatus{} lines := strings.Split(output, string('\000')) for _, line := range lines { if len(line) == 0 { continue } // first two characters are status codes, find the corresponding // statuses xb := line[0] yb := line[1] // Note: we identify the status after inspecting the path so we can // provide better error messages // then, we have a single space followed by the path, ignore space and // read in the rest as the filepath filePath := strings.TrimSpace(line[2:]) xStatus, err := NewStatusCodeFromByte(xb) if err != nil { return nil, fmt.Errorf("unable to parse status code '%c' for path '%s': %w", xb, filePath, err) } yStatus, err := NewStatusCodeFromByte(yb) if err != nil { return nil, fmt.Errorf("unable to parse status code '%c' for path '%s': %w", yb, filePath, err) } status := FileStatus{X: xStatus, Y: yStatus} statuses[filePath] = status } return statuses, nil } gittuf-0.9.0/internal/gitinterface/status_test.go000066400000000000000000000072631475150141000222210ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "os" "testing" "github.com/stretchr/testify/assert" ) func TestStatus(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) cwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(tmpDir); err != nil { t.Fatal(err) } defer os.Chdir(cwd) //nolint:errcheck // NOTE: we don't use traditional methods like WriteBlob or TreeBuilder so // we can more closely simulate user actions, such as with updating index, // etc. filename := "foo" filename2 := "bar" if err := os.WriteFile(filename, []byte("foo"), 0o644); err != nil { //nolint:gosec t.Fatal(err) } statuses, err := repo.Status() assert.Nil(t, err) assert.Equal(t, map[string]FileStatus{filename: {X: StatusCodeUntracked, Y: StatusCodeUntracked}}, statuses) // Add item to index if _, err := repo.executor("add", filename).executeString(); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Equal(t, map[string]FileStatus{filename: {X: StatusCodeAdded, Y: StatusCodeUnmodified}}, statuses) // Modify file that has been staged if err := os.WriteFile(filename, []byte("bar"), 0o644); err != nil { //nolint:gosec t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Equal(t, map[string]FileStatus{filename: {X: StatusCodeAdded, Y: StatusCodeModified}}, statuses) // Add and commit if _, err := repo.executor("add", filename).executeString(); err != nil { t.Fatal(err) } if _, err := repo.executor("commit", "-m", "Commit\n").executeString(); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Empty(t, statuses) // Modify file again if err := os.WriteFile(filename, []byte("foo"), 0o644); err != nil { //nolint:gosec t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Equal(t, map[string]FileStatus{filename: {X: StatusCodeModified, Y: StatusCodeUnmodified}}, statuses) // Remove file if err := os.Remove(filename); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Equal(t, map[string]FileStatus{filename: {X: StatusCodeDeleted, Y: StatusCodeUnmodified}}, statuses) // Commit if _, err := repo.executor("add", filename).executeString(); err != nil { t.Fatal(err) } if _, err := repo.executor("commit", "-m", "Commit\n", "--allow-empty").executeString(); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Empty(t, statuses) // Add two files, commit, then make one a symlink to the other if err := os.WriteFile(filename, []byte("foo"), 0o644); err != nil { //nolint:gosec t.Fatal(err) } if err := os.WriteFile(filename2, []byte("foo"), 0o644); err != nil { //nolint:gosec t.Fatal(err) } if _, err := repo.executor("add", filename, filename2).executeString(); err != nil { t.Fatal(err) } if _, err := repo.executor("commit", "-m", "Commit\n").executeString(); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Empty(t, statuses) if err := os.Remove(filename2); err != nil { t.Fatal(err) } if err := os.Symlink(filename, filename2); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Equal(t, map[string]FileStatus{filename2: {X: StatusCodeTypeChanged, Y: StatusCodeUnmodified}}, statuses) // Add and commit if _, err := repo.executor("add", filename2).executeString(); err != nil { t.Fatal(err) } if _, err := repo.executor("commit", "-m", "Commit\n").executeString(); err != nil { t.Fatal(err) } statuses, err = repo.Status() assert.Nil(t, err) assert.Empty(t, statuses) } gittuf-0.9.0/internal/gitinterface/sync.go000066400000000000000000000046161475150141000206120ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "path" "strings" "github.com/jonboulle/clockwork" ) const DefaultRemoteName = "origin" func (r *Repository) PushRefSpec(remoteName string, refSpecs []string) error { args := []string{"push", remoteName} args = append(args, refSpecs...) _, err := r.executor(args...).executeString() if err != nil { return fmt.Errorf("unable to push: %w", err) } return nil } func (r *Repository) Push(remoteName string, refs []string) error { refSpecs := make([]string, 0, len(refs)) for _, ref := range refs { refSpec, err := r.RefSpec(ref, "", true) if err != nil { return err } refSpecs = append(refSpecs, refSpec) } return r.PushRefSpec(remoteName, refSpecs) } func (r *Repository) FetchRefSpec(remoteName string, refSpecs []string) error { args := []string{"fetch", remoteName} args = append(args, refSpecs...) _, err := r.executor(args...).executeString() if err != nil { return fmt.Errorf("unable to fetch: %w", err) } return nil } func (r *Repository) Fetch(remoteName string, refs []string, fastForwardOnly bool) error { refSpecs := make([]string, 0, len(refs)) for _, ref := range refs { refSpec, err := r.RefSpec(ref, "", fastForwardOnly) if err != nil { return err } refSpecs = append(refSpecs, refSpec) } return r.FetchRefSpec(remoteName, refSpecs) } func CloneAndFetchRepository(remoteURL, dir, initialBranch string, refs []string, bare bool) (*Repository, error) { if dir == "" { return nil, fmt.Errorf("target directory must be specified") } repo := &Repository{clock: clockwork.NewRealClock()} args := []string{"clone", remoteURL} if initialBranch != "" { initialBranch = strings.TrimPrefix(initialBranch, BranchRefPrefix) args = append(args, "--branch", initialBranch) } args = append(args, dir) if bare { args = append(args, "--bare") repo.gitDirPath = dir } else { repo.gitDirPath = path.Join(dir, ".git") } _, stdErr, err := repo.executor(args...).execute() if err != nil { return nil, fmt.Errorf("unable to clone repository: %s", stdErr) } return repo, repo.Fetch(DefaultRemoteName, refs, true) } func (r *Repository) CreateRemote(remoteName, remoteURL string) error { _, err := r.executor("remote", "add", remoteName, remoteURL).executeString() if err != nil { return fmt.Errorf("unable to add remote: %w", err) } return nil } gittuf-0.9.0/internal/gitinterface/sync_test.go000066400000000000000000000610061475150141000216450ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "os" "strings" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestPushRefSpecRepository(t *testing.T) { remoteName := "origin" refName := "refs/heads/main" refSpecs := fmt.Sprintf("%s:%s", refName, refName) t.Run("assert remote repo does not have object until it is pushed", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, false) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, true) localTreeBuilder := NewTreeBuilder(localRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the local repository emptyBlobHash, err := localRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := localTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } // Check that the tree is not present on the remote repository _, err = remoteRepo.GetAllFilesInTree(tree) assert.Contains(t, err.Error(), "fatal: not a tree object") // tree doesn't exist if _, err := localRepo.Commit(tree, refName, "Test commit\n", false); err != nil { t.Fatal(err) } err = localRepo.PushRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) expectedFiles := map[string]Hash{"foo": emptyBlobHash} remoteEntries, err := remoteRepo.GetAllFilesInTree(tree) assert.Nil(t, err) assert.Equal(t, expectedFiles, remoteEntries) }) t.Run("assert after push that src and dst refs match", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, false) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, true) localTreeBuilder := NewTreeBuilder(localRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the local repository emptyBlobHash, err := localRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := localTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } if _, err := localRepo.Commit(tree, refName, "Test commit\n", false); err != nil { t.Fatal(err) } err = localRepo.PushRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) }) t.Run("assert no error when there are no updates to push", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, false) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, true) localTreeBuilder := NewTreeBuilder(localRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the local repository emptyBlobHash, err := localRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := localTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } if _, err := localRepo.Commit(tree, refName, "Test commit\n", false); err != nil { t.Fatal(err) } err = localRepo.PushRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) // Push again; nothing to push err = localRepo.PushRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) }) } func TestPushRepository(t *testing.T) { remoteName := "origin" refName := "refs/heads/main" t.Run("assert remote repo does not have object until it is pushed", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, false) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, true) localTreeBuilder := NewTreeBuilder(localRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the local repository emptyBlobHash, err := localRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := localTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } // Check that the tree is not present on the remote repository _, err = remoteRepo.GetAllFilesInTree(tree) assert.Contains(t, err.Error(), "fatal: not a tree object") // tree doesn't exist if _, err := localRepo.Commit(tree, refName, "Test commit\n", false); err != nil { t.Fatal(err) } err = localRepo.Push(remoteName, []string{refName}) assert.Nil(t, err) expectedFiles := map[string]Hash{"foo": emptyBlobHash} remoteEntries, err := remoteRepo.GetAllFilesInTree(tree) assert.Nil(t, err) assert.Equal(t, expectedFiles, remoteEntries) }) t.Run("assert after push that src and dst refs match", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, false) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, true) localTreeBuilder := NewTreeBuilder(localRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the local repository emptyBlobHash, err := localRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := localTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } if _, err := localRepo.Commit(tree, refName, "Test commit\n", false); err != nil { t.Fatal(err) } err = localRepo.Push(remoteName, []string{refName}) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) }) t.Run("assert no error when there are no updates to push", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, false) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, true) localTreeBuilder := NewTreeBuilder(localRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the local repository emptyBlobHash, err := localRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := localTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } if _, err := localRepo.Commit(tree, refName, "Test commit\n", false); err != nil { t.Fatal(err) } err = localRepo.Push(remoteName, []string{refName}) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) // Push again; nothing to push err = localRepo.Push(remoteName, []string{refName}) assert.Nil(t, err) }) } func TestFetchRefSpecRepository(t *testing.T) { remoteName := "origin" refName := "refs/heads/main" refSpecs := fmt.Sprintf("+%s:%s", refName, refName) t.Run("assert local repo does not have object until fetched", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, true) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the remote repository emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } // Check that the tree is not present on the local repository _, err = localRepo.GetAllFilesInTree(tree) assert.Contains(t, err.Error(), "fatal: not a tree object") // tree doesn't exist _, err = remoteRepo.Commit(tree, refName, "Test commit\n", false) if err != nil { t.Fatal(err) } err = localRepo.FetchRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) expectedFiles := map[string]Hash{"foo": emptyBlobHash} localEntries, err := localRepo.GetAllFilesInTree(tree) assert.Nil(t, err) assert.Equal(t, expectedFiles, localEntries) }) t.Run("assert after fetch that both refs match", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, true) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the remote repository emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } _, err = remoteRepo.Commit(tree, refName, "Test commit\n", false) if err != nil { t.Fatal(err) } err = localRepo.FetchRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) }) t.Run("assert no error when there are no updates to fetch", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, true) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the remote repository emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } _, err = remoteRepo.Commit(tree, refName, "Test commit\n", false) if err != nil { t.Fatal(err) } err = localRepo.FetchRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) // Fetch again, nothing to fetch err = localRepo.FetchRefSpec(remoteName, []string{refSpecs}) assert.Nil(t, err) newLocalRef, err := localRepo.GetReference(refName) require.Nil(t, err) assert.Equal(t, localRef, newLocalRef) }) } func TestFetchRepository(t *testing.T) { remoteName := "origin" refName := "refs/heads/main" t.Run("assert local repo does not have object until fetched", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, true) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the remote repository emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } // Check that the tree is not present on the local repository _, err = localRepo.GetAllFilesInTree(tree) assert.Contains(t, err.Error(), "fatal: not a tree object") // tree doesn't exist _, err = remoteRepo.Commit(tree, refName, "Test commit\n", false) if err != nil { t.Fatal(err) } err = localRepo.Fetch(remoteName, []string{refName}, true) assert.Nil(t, err) expectedFiles := map[string]Hash{"foo": emptyBlobHash} localEntries, err := localRepo.GetAllFilesInTree(tree) assert.Nil(t, err) assert.Equal(t, expectedFiles, localEntries) }) t.Run("assert after fetch that both refs match", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, true) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the remote repository emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } _, err = remoteRepo.Commit(tree, refName, "Test commit\n", false) if err != nil { t.Fatal(err) } err = localRepo.Fetch(remoteName, []string{refName}, true) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) }) t.Run("assert no error when there are no updates to fetch", func(t *testing.T) { // Create local and remote repositories localTmpDir := t.TempDir() remoteTmpDir := t.TempDir() localRepo := CreateTestGitRepository(t, localTmpDir, true) remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) // Create the remote on the local repository if err := localRepo.CreateRemote(remoteName, remoteTmpDir); err != nil { t.Fatal(err) } // Create a tree in the remote repository emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } _, err = remoteRepo.Commit(tree, refName, "Test commit\n", false) if err != nil { t.Fatal(err) } err = localRepo.Fetch(remoteName, []string{refName}, true) assert.Nil(t, err) localRef, err := localRepo.GetReference(refName) if err != nil { t.Fatal(err) } remoteRef, err := remoteRepo.GetReference(refName) if err != nil { t.Fatal(err) } assert.Equal(t, localRef, remoteRef) // Fetch again, nothing to fetch err = localRepo.Fetch(remoteName, []string{refName}, true) assert.Nil(t, err) newLocalRef, err := localRepo.GetReference(refName) require.Nil(t, err) assert.Equal(t, localRef, newLocalRef) }) } func TestCloneAndFetchRepository(t *testing.T) { refName := "refs/heads/main" anotherRefName := "refs/heads/feature" t.Run("clone and fetch remote repository, verify refs match, not bare", func(t *testing.T) { remoteTmpDir := t.TempDir() localTmpDir := t.TempDir() remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } mainCommit, err := remoteRepo.Commit(tree, refName, "Commit to main", false) if err != nil { t.Fatal(err) } otherCommit, err := remoteRepo.Commit(tree, anotherRefName, "Commit to feature", false) if err != nil { t.Fatal(err) } if err := remoteRepo.SetReference("HEAD", mainCommit); err != nil { t.Fatal(err) } localRepo, err := CloneAndFetchRepository(remoteTmpDir, localTmpDir, refName, []string{anotherRefName}, false) if err != nil { t.Fatal(err) } localMainCommit, err := localRepo.GetReference(refName) assert.Nil(t, err) localOtherCommit, err := localRepo.GetReference(anotherRefName) assert.Nil(t, err) assert.Equal(t, mainCommit, localMainCommit) assert.Equal(t, otherCommit, localOtherCommit) assert.True(t, strings.HasSuffix(localRepo.gitDirPath, ".git")) dirEntries, err := os.ReadDir(strings.TrimSuffix(localRepo.gitDirPath, ".git")) if err != nil { t.Fatal(err) } assert.Equal(t, "foo", dirEntries[1].Name()) // [0] will be the entry for the .git directory }) t.Run("clone and fetch remote repository without specifying initial branch, verify refs match, not bare", func(t *testing.T) { remoteTmpDir := t.TempDir() localTmpDir := t.TempDir() remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } mainCommit, err := remoteRepo.Commit(tree, refName, "Commit to main", false) if err != nil { t.Fatal(err) } otherCommit, err := remoteRepo.Commit(tree, anotherRefName, "Commit to feature", false) if err != nil { t.Fatal(err) } if err := remoteRepo.SetReference("HEAD", mainCommit); err != nil { t.Fatal(err) } localRepo, err := CloneAndFetchRepository(remoteTmpDir, localTmpDir, "", []string{anotherRefName}, false) if err != nil { t.Fatal(err) } localMainCommit, err := localRepo.GetReference(refName) assert.Nil(t, err) localOtherCommit, err := localRepo.GetReference(anotherRefName) assert.Nil(t, err) assert.Equal(t, mainCommit, localMainCommit) assert.Equal(t, otherCommit, localOtherCommit) assert.True(t, strings.HasSuffix(localRepo.gitDirPath, ".git")) dirEntries, err := os.ReadDir(strings.TrimSuffix(localRepo.gitDirPath, ".git")) if err != nil { t.Fatal(err) } assert.Equal(t, "foo", dirEntries[1].Name()) // [0] will be the entry for the .git directory }) t.Run("clone and fetch remote repository with only one ref, verify refs match, not bare", func(t *testing.T) { remoteTmpDir := t.TempDir() localTmpDir := t.TempDir() remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } mainCommit, err := remoteRepo.Commit(tree, refName, "Commit to main", false) if err != nil { t.Fatal(err) } if err := remoteRepo.SetReference("HEAD", mainCommit); err != nil { t.Fatal(err) } localRepo, err := CloneAndFetchRepository(remoteTmpDir, localTmpDir, "", []string{}, false) if err != nil { t.Fatal(err) } localMainCommit, err := localRepo.GetReference(refName) assert.Nil(t, err) assert.Equal(t, mainCommit, localMainCommit) assert.True(t, strings.HasSuffix(localRepo.gitDirPath, ".git")) dirEntries, err := os.ReadDir(strings.TrimSuffix(localRepo.gitDirPath, ".git")) if err != nil { t.Fatal(err) } assert.Equal(t, "foo", dirEntries[1].Name()) // [0] will be the entry for the .git directory }) t.Run("clone and fetch remote repository, verify refs match, bare", func(t *testing.T) { remoteTmpDir := t.TempDir() localTmpDir := t.TempDir() remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } mainCommit, err := remoteRepo.Commit(tree, refName, "Commit to main", false) if err != nil { t.Fatal(err) } otherCommit, err := remoteRepo.Commit(tree, anotherRefName, "Commit to feature", false) if err != nil { t.Fatal(err) } if err := remoteRepo.SetReference("HEAD", mainCommit); err != nil { t.Fatal(err) } localRepo, err := CloneAndFetchRepository(remoteTmpDir, localTmpDir, refName, []string{anotherRefName}, true) if err != nil { t.Fatal(err) } localMainCommit, err := localRepo.GetReference(refName) assert.Nil(t, err) localOtherCommit, err := localRepo.GetReference(anotherRefName) assert.Nil(t, err) assert.Equal(t, mainCommit, localMainCommit) assert.Equal(t, otherCommit, localOtherCommit) assert.False(t, strings.HasSuffix(localRepo.gitDirPath, ".git")) dirEntries, err := os.ReadDir(localRepo.gitDirPath) if err != nil { t.Fatal(err) } assert.Equal(t, "FETCH_HEAD", dirEntries[0].Name()) }) t.Run("clone and fetch remote repository without specifying initial branch, verify refs match, bare", func(t *testing.T) { remoteTmpDir := t.TempDir() localTmpDir := t.TempDir() remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } mainCommit, err := remoteRepo.Commit(tree, refName, "Commit to main", false) if err != nil { t.Fatal(err) } otherCommit, err := remoteRepo.Commit(tree, anotherRefName, "Commit to feature", false) if err != nil { t.Fatal(err) } if err := remoteRepo.SetReference("HEAD", mainCommit); err != nil { t.Fatal(err) } localRepo, err := CloneAndFetchRepository(remoteTmpDir, localTmpDir, "", []string{anotherRefName}, true) if err != nil { t.Fatal(err) } localMainCommit, err := localRepo.GetReference(refName) assert.Nil(t, err) localOtherCommit, err := localRepo.GetReference(anotherRefName) assert.Nil(t, err) assert.Equal(t, mainCommit, localMainCommit) assert.Equal(t, otherCommit, localOtherCommit) assert.False(t, strings.HasSuffix(localRepo.gitDirPath, ".git")) dirEntries, err := os.ReadDir(localRepo.gitDirPath) if err != nil { t.Fatal(err) } assert.Equal(t, "FETCH_HEAD", dirEntries[0].Name()) }) t.Run("clone and fetch remote repository with only one ref, verify refs match, bare", func(t *testing.T) { remoteTmpDir := t.TempDir() localTmpDir := t.TempDir() remoteRepo := CreateTestGitRepository(t, remoteTmpDir, false) remoteTreeBuilder := NewTreeBuilder(remoteRepo) emptyBlobHash, err := remoteRepo.WriteBlob(nil) require.Nil(t, err) entries := []TreeEntry{NewEntryBlob("foo", emptyBlobHash)} tree, err := remoteTreeBuilder.WriteTreeFromEntries(entries) if err != nil { t.Fatal(err) } mainCommit, err := remoteRepo.Commit(tree, refName, "Commit to main", false) if err != nil { t.Fatal(err) } if err := remoteRepo.SetReference("HEAD", mainCommit); err != nil { t.Fatal(err) } localRepo, err := CloneAndFetchRepository(remoteTmpDir, localTmpDir, "", []string{}, true) if err != nil { t.Fatal(err) } localMainCommit, err := localRepo.GetReference(refName) assert.Nil(t, err) assert.Equal(t, mainCommit, localMainCommit) assert.False(t, strings.HasSuffix(localRepo.gitDirPath, ".git")) dirEntries, err := os.ReadDir(localRepo.gitDirPath) if err != nil { t.Fatal(err) } assert.Equal(t, "FETCH_HEAD", dirEntries[0].Name()) }) } gittuf-0.9.0/internal/gitinterface/tag.go000066400000000000000000000111551475150141000204050ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "context" "errors" "fmt" "io" "strings" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/go-git/go-git/v5/plumbing" "github.com/go-git/go-git/v5/plumbing/object" "github.com/go-git/go-git/v5/storage/memory" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) var ( ErrTagAlreadyExists = errors.New("tag already exists") ) // TagUsingSpecificKey creates a Git tag signed using the specified, PEM encoded // SSH or GPG key. It is primarily intended for use with testing. As of now, // gittuf is not expected to be used to create tags in developer workflows, // though this may change with command compatibility. func (r *Repository) TagUsingSpecificKey(target Hash, name, message string, signingKeyPEMBytes []byte) (Hash, error) { gitConfig, err := r.GetGitConfig() if err != nil { return ZeroHash, err } goGitRepo, err := r.GetGoGitRepository() if err != nil { return ZeroHash, err } targetObj, err := goGitRepo.Object(plumbing.AnyObject, plumbing.NewHash(target.String())) if err != nil { return ZeroHash, err } if !strings.HasSuffix(message, "\n") { message += "\n" } tag := &object.Tag{ Name: name, Tagger: object.Signature{ Name: gitConfig["user.name"], Email: gitConfig["user.email"], When: r.clock.Now(), }, Message: message, TargetType: targetObj.Type(), Target: targetObj.ID(), } tagContents, err := getTagBytesWithoutSignature(tag) if err != nil { return ZeroHash, err } signature, err := signGitObjectUsingKey(tagContents, signingKeyPEMBytes) if err != nil { return ZeroHash, err } tag.PGPSignature = signature obj := goGitRepo.Storer.NewEncodedObject() if err := tag.Encode(obj); err != nil { return ZeroHash, err } tagID, err := goGitRepo.Storer.SetEncodedObject(obj) if err != nil { return ZeroHash, err } tagIDHash, err := NewHash(tagID.String()) if err != nil { return ZeroHash, err } return tagIDHash, r.SetReference(TagReferenceName(name), tagIDHash) } // GetTagTarget returns the ID of the Git object a tag points to. func (r *Repository) GetTagTarget(tagID Hash) (Hash, error) { targetID, err := r.executor("rev-list", "-n", "1", tagID.String()).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to resolve tag's target ID: %w", err) } hash, err := NewHash(targetID) if err != nil { return ZeroHash, fmt.Errorf("invalid format for target ID: %w", err) } return hash, nil } // verifyTagSignature verifies a signature for the specified tag using the // provided public key. func (r *Repository) verifyTagSignature(ctx context.Context, tagID Hash, key *signerverifier.SSLibKey) error { goGitRepo, err := r.GetGoGitRepository() if err != nil { return fmt.Errorf("error opening repository: %w", err) } tag, err := goGitRepo.TagObject(plumbing.NewHash(tagID.String())) if err != nil { return fmt.Errorf("unable to load commit object: %w", err) } switch key.KeyType { case gpg.KeyType: if _, err := tag.Verify(key.KeyVal.Public); err != nil { return ErrIncorrectVerificationKey } return nil case ssh.KeyType: tagContents, err := getTagBytesWithoutSignature(tag) if err != nil { return errors.Join(ErrVerifyingSSHSignature, err) } tagSignature := []byte(tag.PGPSignature) if err := verifySSHKeySignature(ctx, key, tagContents, tagSignature); err != nil { return errors.Join(ErrIncorrectVerificationKey, err) } return nil case sigstore.KeyType: tagContents, err := getTagBytesWithoutSignature(tag) if err != nil { return errors.Join(ErrVerifyingSigstoreSignature, err) } tagSignature := []byte(tag.PGPSignature) if err := verifyGitsignSignature(ctx, r, key, tagContents, tagSignature); err != nil { return errors.Join(ErrIncorrectVerificationKey, err) } return nil } return ErrUnknownSigningMethod } func (r *Repository) ensureIsTag(tagID Hash) error { objType, err := r.executor("cat-file", "-t", tagID.String()).executeString() if err != nil { return fmt.Errorf("unable to inspect if object is tag: %w", err) } else if objType != "tag" { return fmt.Errorf("requested Git ID '%s' is not a tag object", tagID.String()) } return nil } func getTagBytesWithoutSignature(tag *object.Tag) ([]byte, error) { tagEncoded := memory.NewStorage().NewEncodedObject() if err := tag.EncodeWithoutSignature(tagEncoded); err != nil { return nil, err } r, err := tagEncoded.Reader() if err != nil { return nil, err } return io.ReadAll(r) } gittuf-0.9.0/internal/gitinterface/tag_test.go000066400000000000000000000045431475150141000214470ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "context" "os" "path/filepath" "testing" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/stretchr/testify/assert" ) func TestGetTagTarget(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeID, "refs/heads/main", "Initial commit\n", true) if err != nil { t.Fatal(err) } tagID, err := repo.TagUsingSpecificKey(commitID, "test-tag", "test-tag\n", artifacts.SSHED25519Private) if err != nil { t.Fatal(err) } targetID, err := repo.GetTagTarget(tagID) assert.Nil(t, err) assert.Equal(t, commitID, targetID) } func TestRepositoryVerifyTag(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) treeBuilder := NewTreeBuilder(repo) // Write empty tree emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeID, "refs/heads/main", "Initial commit\n", true) if err != nil { t.Fatal(err) } sshSignedTag, err := repo.TagUsingSpecificKey(commitID, "test-tag-ssh", "test-tag-ssh\n", artifacts.SSHED25519Private) if err != nil { t.Fatal(err) } keyDir := t.TempDir() keyPath := filepath.Join(keyDir, "ssh-key.pub") if err := os.WriteFile(keyPath, artifacts.SSHED25519PublicSSH, 0o600); err != nil { t.Fatal(err) } sshKey, err := ssh.NewKeyFromFile(keyPath) if err != nil { t.Fatal(err) } gpgSignedTag, err := repo.TagUsingSpecificKey(commitID, "test-tag-gpg", "test-tag-gpg\n", artifacts.GPGKey1Private) if err != nil { t.Fatal(err) } gpgKey, err := gpg.LoadGPGKeyFromBytes(artifacts.GPGKey1Public) if err != nil { t.Fatal(err) } t.Run("ssh signed tag, verify with ssh key", func(t *testing.T) { err = repo.verifyTagSignature(context.Background(), sshSignedTag, sshKey) assert.Nil(t, err) }) t.Run("gpg signed tag, verify with gpg key", func(t *testing.T) { err = repo.verifyTagSignature(context.Background(), gpgSignedTag, gpgKey) assert.Nil(t, err) }) } gittuf-0.9.0/internal/gitinterface/tree.go000066400000000000000000000434101475150141000205700ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "bytes" "errors" "fmt" "os" "path" "strings" ) var ( ErrTreeDoesNotHavePath = errors.New("tree does not have requested path") ErrCopyingBlobIDsDoNotMatch = errors.New("blob ID in local repository does not match upstream repository") ErrCannotCreateSubtreeIntoRootTree = errors.New("subtree path target cannot be empty or root of tree") ) func (r *Repository) EmptyTree() (Hash, error) { treeID, err := r.executor("hash-object", "-t", "tree", "--stdin").executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to hash empty tree: %w", err) } hash, err := NewHash(treeID) if err != nil { return ZeroHash, fmt.Errorf("empty tree has invalid Git ID: %w", err) } return hash, nil } // GetPathIDInTree returns the Git ID pointed to by the path in the specified // tree if the path exists. If not, a corresponding error is returned. For // example, if the tree contains a single blob `foo/bar/baz`, querying the ID // for `foo/bar/baz` will return the blob ID for baz. Querying the ID for // `foo/bar` will return the intermediate tree ID for bar, while querying for // `foo/baz` will return an error. func (r *Repository) GetPathIDInTree(treePath string, treeID Hash) (Hash, error) { treePath = strings.TrimSuffix(treePath, "/") components := strings.Split(treePath, "/") currentTreeID := treeID for len(components) != 0 { items, err := r.GetTreeItems(currentTreeID) if err != nil { return nil, err } entryID, has := items[components[0]] if !has { return nil, fmt.Errorf("%w: %s", ErrTreeDoesNotHavePath, treePath) } currentTreeID = entryID components = components[1:] } return currentTreeID, nil } // GetTreeItems returns the items in a specified Git tree without recursively // expanding subtrees. func (r *Repository) GetTreeItems(treeID Hash) (map[string]Hash, error) { // From Git 2.36, we can use --format here. However, it appears a not // insignificant number of developers are still on Git 2.34.1, a side effect // of being on Ubuntu 22.04. 22.04 is still widely used in WSL2 environments. // So, we're removing --format and parsing the output differently to handle // the extra information for each entry we don't need. stdOut, err := r.executor("ls-tree", treeID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to enumerate items in tree '%s': %w", treeID.String(), err) } if stdOut == "" { return nil, nil // alternatively, just check if treeID is empty tree? } entries := strings.Split(stdOut, "\n") if len(entries) == 0 { return nil, nil } items := map[string]Hash{} for _, entry := range entries { // Without --format, the output is in the following format: // SP SP TAB // From: https://git-scm.com/docs/git-ls-tree/2.34.1#_output_format entrySplit := strings.Split(entry, " ") // entrySplit[0] is -- discard // entrySplit[1] is -- discard // entrySplit[2] is TAB -- keep entrySplit = strings.Split(entrySplit[2], "\t") // is really the object ID hash, err := NewHash(entrySplit[0]) if err != nil { return nil, fmt.Errorf("invalid Git ID '%s' for path '%s': %w", entrySplit[0], entrySplit[1], err) } items[entrySplit[1]] = hash } return items, nil } // GetAllFilesInTree returns all filepaths and the corresponding blob hashes in // the specified tree. func (r *Repository) GetAllFilesInTree(treeID Hash) (map[string]Hash, error) { // From Git 2.36, we can use --format here. However, it appears a not // insignificant number of developers are still on Git 2.34.1, a side effect // of being on Ubuntu 22.04. 22.04 is still widely used in WSL2 environments. // So, we're removing --format and parsing the output differently to handle // the extra information for each entry we don't need. stdOut, err := r.executor("ls-tree", "-r", treeID.String()).executeString() if err != nil { return nil, fmt.Errorf("unable to enumerate all files in tree: %w", err) } if stdOut == "" { return nil, nil // alternatively, just check if treeID is empty tree? } entries := strings.Split(stdOut, "\n") if len(entries) == 0 { return nil, nil } files := map[string]Hash{} for _, entry := range entries { // Without --format, the output is in the following format: // SP SP TAB // From: https://git-scm.com/docs/git-ls-tree/2.34.1#_output_format entrySplit := strings.Split(entry, " ") // entrySplit[0] is -- discard // entrySplit[1] is -- discard // entrySplit[2] is TAB -- keep entrySplit = strings.Split(entrySplit[2], "\t") // is really the object ID hash, err := NewHash(entrySplit[0]) if err != nil { return nil, fmt.Errorf("invalid Git ID '%s' for path '%s': %w", entrySplit[0], entrySplit[1], err) } files[entrySplit[1]] = hash } return files, nil } // GetMergeTree computes the merge tree for the commits passed in. The tree is // not written to the object store. Assuming a typical merge workflow, the first // commit is expected to be the tip of the base branch. As such, the second // commit is expected to be merged into the first. If the first commit is zero, // the second commit's tree is returned. func (r *Repository) GetMergeTree(commitAID, commitBID Hash) (Hash, error) { if err := r.ensureIsCommit(commitBID); err != nil { return ZeroHash, err } if commitAID.IsZero() { // fast-forward merge -> use tree ID from commitB return r.GetCommitTreeID(commitBID) } // Only commitB needs to be non-zero, we can allow fast-forward merges when // the base commit is zero. So, check this only after above if err := r.ensureIsCommit(commitAID); err != nil { return ZeroHash, err } niceGit, err := isNiceGitVersion() if err != nil { return ZeroHash, err } var stdOut string if !niceGit { // Older Git versions do not support merge-tree, and, as such, require // quite a long workaround to find what the merge tree is. This // workaround boils down to: // Create new branch > Merge into said branch > Extract tree hash currentBranch, err := r.executor("branch", "--show-current").executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to determine current branch: %w", err) } if currentBranch == "" { return ZeroHash, fmt.Errorf("currently in detached HEAD state, please switch to a branch") } _, err = r.executor("checkout", commitAID.String()).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to enter detached HEAD state: %w", err) } _, err = r.executor("merge", "-m", "Computing merge tree", commitBID.String()).executeString() if err != nil { // Attempt to abort the merge in all cases as a failsafe _, abrtErr := r.executor("merge", "--abort").executeString() if abrtErr != nil { return ZeroHash, fmt.Errorf("unable to perform merge, and unable to abort merge: %w, %w", err, abrtErr) } return ZeroHash, fmt.Errorf("unable to perform merge: %w", err) } stdOut, err = r.executor("show", "-s", "--format=%T").executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to extract tree hash of merge commit: %w", err) } // Switch back to the branch the user was on _, err = r.executor("checkout", currentBranch).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to switch back to original branch: %w", err) } } else { stdOut, err = r.executor("merge-tree", commitAID.String(), commitBID.String()).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to compute merge tree: %w", err) } } treeHash, err := NewHash(stdOut) if err != nil { return ZeroHash, fmt.Errorf("invalid merge tree ID: %w", err) } return treeHash, nil } // CreateSubtreeFromUpstreamRepository accepts an upstream repository handler // and a commit ID in the upstream repository. This information is used to copy // the entire contents of the commit's Git tree into the specified localPath in // the localRef. A new commit is added to localRef with the changes made to // localPath. localPath represents a directory path where the changes are copied // to. Existing items in that directory are overwritten in the subsequently // created commit in localRef. localPath must be specified, if left blank (say // to imply copying into the root directory of the downstream repository), // creating a subtree will fail. func (r *Repository) CreateSubtreeFromUpstreamRepository(upstream *Repository, upstreamCommitID Hash, localRef, localPath string) (Hash, error) { if localPath == "" { return nil, ErrCannotCreateSubtreeIntoRootTree } currentTip, err := r.GetReference(localRef) if err != nil { if !errors.Is(err, ErrReferenceNotFound) { return nil, err } } entries := []TreeEntry{} if !currentTip.IsZero() { currentRefTree, err := r.GetCommitTreeID(currentTip) if err != nil { return nil, err } currentFiles, err := r.GetAllFilesInTree(currentRefTree) if err != nil { return nil, err } // Ignore entries for `localPath` to account for upstream deletions // If localPath is foo/, we want to ignore all items under foo/ // If localPath is foo, we want to ignore all items under foo/ // If localPath is foo, we DO NOT want to remove all items under foobar/ // So, add the / suffix if necessary to localPath if !strings.HasSuffix(localPath, "/") { localPath += "/" } // Create list of TreeEntry objects representing all blobs except those // currently under localPath for filePath, blobID := range currentFiles { if !strings.HasPrefix(filePath, localPath) { entries = append(entries, NewEntryBlob(filePath, blobID)) } } } // Remove trailing "/" now localPath = strings.TrimSuffix(localPath, "/") treeID, err := upstream.GetCommitTreeID(upstreamCommitID) if err != nil { return nil, err } if r.HasObject(treeID) { // Use existing intermediate tree entries = append(entries, NewEntryTree(localPath, treeID)) } else { // We have to create the intermediate tree for localPath filesToCopy, err := upstream.GetAllFilesInTree(treeID) if err != nil { return nil, err } for blobPath, blobID := range filesToCopy { // if blob already exists, we don't need to carry out expensive // read/write if !r.HasObject(blobID) { blob, err := upstream.ReadBlob(blobID) if err != nil { return nil, err } localBlobID, err := r.WriteBlob(blob) if err != nil { return nil, err } if !localBlobID.Equal(blobID) { return nil, ErrCopyingBlobIDsDoNotMatch } } // add blob to entries, with the path including the localPath prefix entries = append(entries, NewEntryBlob(path.Join(localPath, blobPath), blobID)) } } treeBuilder := NewTreeBuilder(r) newTreeID, err := treeBuilder.WriteTreeFromEntries(entries) if err != nil { return nil, err } commitID, err := r.Commit(newTreeID, localRef, fmt.Sprintf("Update contents of '%s'\n", localPath), false) if err != nil { return nil, err } if !r.IsBare() { head, err := r.GetSymbolicReferenceTarget("HEAD") if err != nil { return nil, err } if head == localRef { worktree := strings.TrimSuffix(r.gitDirPath, ".git") // TODO: this doesn't support detached git dir cwd, err := os.Getwd() if err != nil { return nil, err } if err := os.Chdir(worktree); err != nil { return nil, err } defer os.Chdir(cwd) //nolint:errcheck if _, err := r.executor("restore", "--staged", localPath).executeString(); err != nil { return nil, err } if _, err := r.executor("restore", localPath).executeString(); err != nil { return nil, err } } } return commitID, nil } // TreeBuilder is used to create multi-level trees in a repository. Based on // `buildTreeHelper` in go-git. type TreeBuilder struct { repo *Repository trees map[string]*entryTree entries map[string]TreeEntry } func NewTreeBuilder(repo *Repository) *TreeBuilder { return &TreeBuilder{repo: repo} } // WriteTreeFromEntries accepts list of TreeEntry representations, and returns // the Git ID of the tree that contains these entries. It constructs the // required intermediate trees. func (t *TreeBuilder) WriteTreeFromEntries(files []TreeEntry) (Hash, error) { rootNodeKey := "" t.trees = map[string]*entryTree{rootNodeKey: {}} t.entries = map[string]TreeEntry{} for _, entry := range files { t.identifyIntermediates(entry) } return t.writeTrees(rootNodeKey, t.trees[rootNodeKey]) } // identifyIntermediates identifies the intermediate trees that must be // constructed for the specified path. func (t *TreeBuilder) identifyIntermediates(entry TreeEntry) { parts := strings.Split(entry.getName(), "/") var fullPath string for _, part := range parts { parent := fullPath fullPath = path.Join(fullPath, part) t.populateTree(parent, fullPath, entry) } } // populateTree populates tree and entry information for each tree that must be // created. func (t *TreeBuilder) populateTree(parent, fullPath string, entry TreeEntry) { if _, ok := t.trees[fullPath]; ok { return } if _, ok := t.entries[fullPath]; ok { return } var entryObj TreeEntry if fullPath == entry.getName() { // => This is a leaf node // However, gitID _may_ be a tree ID, and we've inserted an existing // tree object as a subtree here, we want to support this so that we // don't have to recreate trees that already exist if err := t.repo.ensureIsTree(entry.getID()); err == nil { // gitID represents tree entryObj = &entryTree{ name: path.Base(fullPath), gitID: entry.getID(), alreadyExists: true, } } else { // gitID is not for a tree entryObj = &entryBlob{ name: path.Base(fullPath), gitID: entry.getID(), } } } else { // => This is an intermediate node, has to be a tree that we must build entryObj = &entryTree{ name: path.Base(fullPath), gitID: ZeroHash, alreadyExists: false, } t.trees[fullPath] = &entryTree{} } t.trees[parent].entries = append(t.trees[parent].entries, entryObj) } // writeTrees recursively stores each tree that must be created in the // repository's object store. It returns the ID of the tree created at each // invocation. func (t *TreeBuilder) writeTrees(parent string, tree *entryTree) (Hash, error) { for i, e := range tree.entries { switch e := e.(type) { case *entryTree: if e.alreadyExists { // The tree already exists and we don't need to write it again. continue } p := path.Join(parent, e.name) entryID, err := t.writeTrees(p, t.trees[p]) if err != nil { return ZeroHash, err } e.gitID = entryID tree.entries[i] = e case *entryBlob: continue } } return t.writeTree(tree.entries) } // writeTree creates a tree in the repository for the specified entries. It // only supports a typical blob with permission 0o644 and a subtree. This is // because it is only intended for use with gittuf specific metadata and tests. // Generic tree creation is left to invocations of the Git binary by the user. func (t *TreeBuilder) writeTree(entries []TreeEntry) (Hash, error) { input := "" for _, entry := range entries { // this is very opinionated about the modes right now because the plan // is to use it for gittuf metadata, which requires regular files and // subdirectories switch entry := entry.(type) { case *entryTree: input += "040000 tree " + entry.gitID.String() + "\t" + entry.name case *entryBlob: // TODO: support entryBlob's permissions here input += "100644 blob " + entry.gitID.String() + "\t" + entry.name } input += "\n" } stdOut, err := t.repo.executor("mktree").withStdIn(bytes.NewBufferString(input)).executeString() if err != nil { return ZeroHash, fmt.Errorf("unable to write Git tree: %w", err) } treeID, err := NewHash(stdOut) if err != nil { return ZeroHash, fmt.Errorf("invalid tree ID: %w", err) } return treeID, nil } // TreeEntry represents an entry in a Git tree. type TreeEntry interface { getName() string getID() Hash } // entryTree implements TreeEntry and indicates the entry is for a Git tree. type entryTree struct { name string gitID Hash alreadyExists bool entries []TreeEntry } func (e *entryTree) getName() string { return e.name } func (e *entryTree) getID() Hash { return e.gitID } // NewEntryTree creates a TreeEntry that represents a Git tree. If the tree // doesn't exist, i.e., it must be created, gitID must be set to ZeroHash. The // name must be set to the full path of the tree object. func NewEntryTree(name string, gitID Hash) TreeEntry { entry := &entryTree{name: name, gitID: gitID} if gitID == nil || !gitID.IsZero() { entry.alreadyExists = true } return entry } // entryBlob implements TreeEntry and indicates the entry is for a Git blob. type entryBlob struct { name string gitID Hash permissions os.FileMode //nolint:unused } func (e *entryBlob) getName() string { return e.name } func (e *entryBlob) getID() Hash { return e.gitID } // NewEntryBlob creates a TreeEntry that represents a Git blob. func NewEntryBlob(name string, gitID Hash) TreeEntry { return &entryBlob{name: name, gitID: gitID, permissions: 0o644} } // NewEntryBlobWithPermissions creates a TreeEntry that represents a Git blob. // The permissions parameter can be used to set custom permissions. func NewEntryBlobWithPermissions(name string, gitID Hash, permissions os.FileMode) TreeEntry { return &entryBlob{name: name, gitID: gitID, permissions: permissions} } // ensureIsTree is a helper to check that the ID represents a Git tree // object. func (r *Repository) ensureIsTree(treeID Hash) error { objType, err := r.executor("cat-file", "-t", treeID.String()).executeString() if err != nil { return fmt.Errorf("unable to inspect if object is tree: %w", err) } else if objType != "tree" { return fmt.Errorf("requested Git ID '%s' is not a tree object", treeID.String()) } return nil } gittuf-0.9.0/internal/gitinterface/tree_test.go000066400000000000000000000625641475150141000216420ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "os" "testing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) func TestRepositoryEmptyTree(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) hash, err := repo.EmptyTree() assert.Nil(t, err) // SHA-1 ID used by Git to denote an empty tree // $ git hash-object -t tree --stdin < /dev/null assert.Equal(t, "4b825dc642cb6eb9a060e54bf8d69288fbee4904", hash.String()) } func TestGetPathIDInTree(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) treeBuilder := NewTreeBuilder(repo) blobAID, err := repo.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := repo.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } emptyTreeID := "4b825dc642cb6eb9a060e54bf8d69288fbee4904" t.Run("no items", func(t *testing.T) { treeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } assert.Equal(t, emptyTreeID, treeID.String()) pathID, err := repo.GetPathIDInTree("a", treeID) assert.ErrorIs(t, err, ErrTreeDoesNotHavePath) assert.Nil(t, pathID) }) t.Run("no subdirectories", func(t *testing.T) { exhaustiveItems := []TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("b", blobBID), } treeID, err := treeBuilder.WriteTreeFromEntries(exhaustiveItems) if err != nil { t.Fatal(err) } itemID, err := repo.GetPathIDInTree("a", treeID) assert.Nil(t, err) assert.Equal(t, blobAID, itemID) }) t.Run("one file in root tree, one file in subdirectory", func(t *testing.T) { exhaustiveItems := []TreeEntry{ NewEntryBlob("foo/a", blobAID), NewEntryBlob("b", blobBID), } treeID, err := treeBuilder.WriteTreeFromEntries(exhaustiveItems) if err != nil { t.Fatal(err) } itemID, err := repo.GetPathIDInTree("foo/a", treeID) assert.Nil(t, err) assert.Equal(t, blobAID, itemID) }) t.Run("multiple levels", func(t *testing.T) { exhaustiveItems := []TreeEntry{ NewEntryBlob("foo/bar/foobar/a", blobAID), NewEntryBlob("foobar/foo/bar/b", blobBID), } treeID, err := treeBuilder.WriteTreeFromEntries(exhaustiveItems) if err != nil { t.Fatal(err) } // find tree ID for foo/bar/foobar expectedItemID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobAID)}) if err != nil { t.Fatal(err) } itemID, err := repo.GetPathIDInTree("foo/bar/foobar", treeID) assert.Nil(t, err) assert.Equal(t, expectedItemID, itemID) // find tree ID for foo/bar expectedItemID, err = treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("foobar/a", blobAID)}) if err != nil { t.Fatal(err) } itemID, err = repo.GetPathIDInTree("foo/bar", treeID) assert.Nil(t, err) assert.Equal(t, expectedItemID, itemID) // find tree ID for foobar/foo expectedItemID, err = treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("bar/b", blobBID)}) if err != nil { t.Fatal(err) } itemID, err = repo.GetPathIDInTree("foobar/foo", treeID) assert.Nil(t, err) assert.Equal(t, expectedItemID, itemID) itemID, err = repo.GetPathIDInTree("foobar/foo/foobar", treeID) assert.ErrorIs(t, err, ErrTreeDoesNotHavePath) assert.Nil(t, itemID) }) } func TestGetTreeItems(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) treeBuilder := NewTreeBuilder(repo) blobAID, err := repo.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := repo.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } emptyTreeID := "4b825dc642cb6eb9a060e54bf8d69288fbee4904" t.Run("no items", func(t *testing.T) { treeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } assert.Equal(t, emptyTreeID, treeID.String()) treeItems, err := repo.GetTreeItems(treeID) assert.Nil(t, err) assert.Nil(t, treeItems) }) t.Run("no subdirectories", func(t *testing.T) { exhaustiveItems := []TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("b", blobBID), } treeID, err := treeBuilder.WriteTreeFromEntries(exhaustiveItems) if err != nil { t.Fatal(err) } expectedOutput := map[string]Hash{ "a": blobAID, "b": blobBID, } treeItems, err := repo.GetTreeItems(treeID) assert.Nil(t, err) assert.Equal(t, expectedOutput, treeItems) }) t.Run("one file in root tree, one file in subdirectory", func(t *testing.T) { exhaustiveItems := []TreeEntry{ NewEntryBlob("foo/a", blobAID), NewEntryBlob("b", blobBID), } treeID, err := treeBuilder.WriteTreeFromEntries(exhaustiveItems) if err != nil { t.Fatal(err) } fooTreeID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobAID)}) if err != nil { t.Fatal(err) } expectedTreeItems := map[string]Hash{ "foo": fooTreeID, "b": blobBID, } treeItems, err := repo.GetTreeItems(treeID) assert.Nil(t, err) assert.Equal(t, expectedTreeItems, treeItems) }) t.Run("one file in foo tree, one file in bar", func(t *testing.T) { exhaustiveItems := []TreeEntry{ NewEntryBlob("foo/a", blobAID), NewEntryBlob("bar/b", blobBID), } treeID, err := treeBuilder.WriteTreeFromEntries(exhaustiveItems) if err != nil { t.Fatal(err) } fooTreeID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobAID)}) if err != nil { t.Fatal(err) } barTreeID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("b", blobBID)}) if err != nil { t.Fatal(err) } expectedTreeItems := map[string]Hash{ "foo": fooTreeID, "bar": barTreeID, } treeItems, err := repo.GetTreeItems(treeID) assert.Nil(t, err) assert.Equal(t, expectedTreeItems, treeItems) }) } func TestGetMergeTree(t *testing.T) { t.Run("no conflict", func(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) // We meed to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(tmpDir); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck emptyBlobID, err := repo.WriteBlob(nil) if err != nil { t.Fatal(err) } treeBuilder := NewTreeBuilder(repo) emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } treeAID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", emptyBlobID)}) if err != nil { t.Fatal(err) } treeBID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("b", emptyBlobID)}) if err != nil { t.Fatal(err) } combinedTreeID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{ NewEntryBlob("a", emptyBlobID), NewEntryBlob("b", emptyBlobID), }) if err != nil { t.Fatal(err) } mainRef := "refs/heads/main" featureRef := "refs/heads/feature" // Add commits to the main branch baseCommitID, err := repo.Commit(emptyTreeID, mainRef, "Initial commit", false) if err != nil { t.Fatal(err) } commitAID, err := repo.Commit(treeAID, mainRef, "Commit A", false) if err != nil { t.Fatal(err) } // Add commits to the feature branch if err := repo.SetReference(featureRef, baseCommitID); err != nil { t.Fatal(err) } commitBID, err := repo.Commit(treeBID, featureRef, "Commit B", false) if err != nil { t.Fatal(err) } // fix up checked out worktree if _, err := repo.executor("restore", "--staged", ".").executeString(); err != nil { t.Fatal(err) } if _, err := repo.executor("checkout", "--", ".").executeString(); err != nil { t.Fatal(err) } mergeTreeID, err := repo.GetMergeTree(commitAID, commitBID) assert.Nil(t, err) if !combinedTreeID.Equal(mergeTreeID) { mergeTreeContents, err := repo.GetAllFilesInTree(mergeTreeID) if err != nil { t.Fatalf("unexpected error when debugging non-matched merge trees: %s", err.Error()) } t.Log("merge tree contents:", mergeTreeContents) t.Error("merge trees don't match") } }) t.Run("merge conflict", func(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) // We meed to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(tmpDir); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck emptyBlobID, err := repo.WriteBlob(nil) if err != nil { t.Fatal(err) } treeBuilder := NewTreeBuilder(repo) emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } blobAID, err := repo.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := repo.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } treeAID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("a", blobAID)}) if err != nil { t.Fatal(err) } treeBID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{ NewEntryBlob("a", blobBID), NewEntryBlob("b", emptyBlobID), }) if err != nil { t.Fatal(err) } mainRef := "refs/heads/main" featureRef := "refs/heads/feature" // Add commits to the main branch baseCommitID, err := repo.Commit(emptyTreeID, mainRef, "Initial commit", false) if err != nil { t.Fatal(err) } commitAID, err := repo.Commit(treeAID, mainRef, "Commit A", false) if err != nil { t.Fatal(err) } // Add commits to the feature branch if err := repo.SetReference(featureRef, baseCommitID); err != nil { t.Fatal(err) } commitBID, err := repo.Commit(treeBID, featureRef, "Commit B", false) if err != nil { t.Fatal(err) } // fix up checked out worktree if _, err := repo.executor("restore", "--staged", ".").executeString(); err != nil { t.Fatal(err) } if _, err := repo.executor("checkout", "--", ".").executeString(); err != nil { t.Fatal(err) } _, err = repo.GetMergeTree(commitAID, commitBID) assert.NotNil(t, err) }) t.Run("fast forward merge", func(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, false) // We meed to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(tmpDir); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck emptyBlobID, err := repo.WriteBlob(nil) if err != nil { t.Fatal(err) } treeBuilder := NewTreeBuilder(repo) treeID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("empty", emptyBlobID)}) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(treeID, "refs/heads/main", "Initial commit\n", false) if err != nil { t.Fatal(err) } mergeTreeID, err := repo.GetMergeTree(ZeroHash, commitID) assert.Nil(t, err) assert.Equal(t, treeID, mergeTreeID) }) } func TestCreateSubtreeFromUpstreamRepository(t *testing.T) { t.Run("subtree into HEAD", func(t *testing.T) { tmpDir1 := t.TempDir() downstreamRepository := CreateTestGitRepository(t, tmpDir1, false) blobAID, err := downstreamRepository.WriteBlob([]byte("a")) require.Nil(t, err) blobBID, err := downstreamRepository.WriteBlob([]byte("b")) require.Nil(t, err) downstreamTreeBuilder := NewTreeBuilder(downstreamRepository) // The downstream tree (if set as exists in test below) is: // oof/a -> blobA // b -> blobB downstreamTreeEntries := []TreeEntry{ NewEntryBlob("oof/a", blobAID), NewEntryBlob("b", blobBID), } downstreamTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries(downstreamTreeEntries) require.Nil(t, err) downstreamCommitID, err := downstreamRepository.Commit(downstreamTreeID, "refs/heads/main", "Initial commit\n", false) require.Nil(t, err) err = downstreamRepository.SetSymbolicReference("HEAD", "refs/heads/main") require.Nil(t, err) downstreamRepository.RestoreWorktree(t) tmpDir2 := t.TempDir() upstreamRepository := CreateTestGitRepository(t, tmpDir2, true) _, err = upstreamRepository.WriteBlob([]byte("a")) require.Nil(t, err) _, err = upstreamRepository.WriteBlob([]byte("b")) require.Nil(t, err) upstreamTreeBuilder := NewTreeBuilder(upstreamRepository) // The upstream tree is: // a -> blobA // foo/a -> blobA // foo/b -> blobB // foobar/foo/bar/b -> blobB upstreamTreeID, err := upstreamTreeBuilder.WriteTreeFromEntries([]TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("foo/a", blobAID), NewEntryBlob("foo/b", blobBID), NewEntryBlob("foobar/foo/bar/b", blobBID), }) require.Nil(t, err) upstreamRef := "refs/heads/main" upstreamCommitID, err := upstreamRepository.Commit(upstreamTreeID, upstreamRef, "Initial commit\n", false) require.Nil(t, err) downstreamCommitIDNew, err := downstreamRepository.CreateSubtreeFromUpstreamRepository(upstreamRepository, upstreamCommitID, "refs/heads/main", "upstream") assert.Nil(t, err) assert.NotEqual(t, downstreamCommitID, downstreamCommitIDNew) statuses, err := downstreamRepository.Status() require.Nil(t, err) assert.Empty(t, statuses) }) t.Run("various other subtree scenarios", func(t *testing.T) { tmpDir1 := t.TempDir() downstreamRepository := CreateTestGitRepository(t, tmpDir1, false) blobAID, err := downstreamRepository.WriteBlob([]byte("a")) require.Nil(t, err) blobBID, err := downstreamRepository.WriteBlob([]byte("b")) require.Nil(t, err) downstreamTreeBuilder := NewTreeBuilder(downstreamRepository) // The downstream tree (if set as exists in test below) is: // oof/a -> blobA // b -> blobB downstreamTreeEntries := []TreeEntry{ NewEntryBlob("oof/a", blobAID), NewEntryBlob("b", blobBID), } downstreamTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries(downstreamTreeEntries) require.Nil(t, err) tmpDir2 := t.TempDir() upstreamRepository := CreateTestGitRepository(t, tmpDir2, true) _, err = upstreamRepository.WriteBlob([]byte("a")) require.Nil(t, err) _, err = upstreamRepository.WriteBlob([]byte("b")) require.Nil(t, err) upstreamTreeBuilder := NewTreeBuilder(upstreamRepository) // The upstream tree is: // a -> blobA // foo/a -> blobA // foo/b -> blobB // foobar/foo/bar/b -> blobB upstreamTreeID, err := upstreamTreeBuilder.WriteTreeFromEntries([]TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("foo/a", blobAID), NewEntryBlob("foo/b", blobBID), NewEntryBlob("foobar/foo/bar/b", blobBID), }) require.Nil(t, err) upstreamRef := "refs/heads/main" upstreamCommitID, err := upstreamRepository.Commit(upstreamTreeID, upstreamRef, "Initial commit\n", false) require.Nil(t, err) tests := map[string]struct { localPath string refExists bool // refExists -> we must check for other files but no prior propagation has happened priorPropagation bool // priorPropagation -> localPath is already populated, mutually exclusive with refExists err error }{ "directory in root, no trailing slash, ref does not exist": { localPath: "upstream", refExists: false, priorPropagation: false, }, "directory in root, trailing slash, ref does not exist": { localPath: "upstream/", refExists: false, priorPropagation: false, }, "directory in root, no trailing slash, ref exists": { localPath: "upstream", refExists: true, priorPropagation: false, }, "directory in root, trailing slash, ref exists": { localPath: "upstream/", refExists: true, priorPropagation: false, }, "directory in root, no trailing slash, prior propagation exists": { localPath: "upstream", refExists: false, priorPropagation: true, }, "directory in root, trailing slash, prior propagation exists": { localPath: "upstream/", refExists: false, priorPropagation: true, }, "directory in subdirectory, no trailing slash, ref does not exist": { localPath: "foo/upstream", refExists: false, priorPropagation: false, }, "directory in subdirectory, trailing slash, ref does not exist": { localPath: "foo/upstream/", refExists: false, priorPropagation: false, }, "directory in subdirectory, no trailing slash, ref exists": { localPath: "foo/upstream", refExists: true, priorPropagation: false, }, "directory in subdirectory, trailing slash, ref exists": { localPath: "foo/upstream/", refExists: true, priorPropagation: false, }, "directory in subdirectory, no trailing slash, prior propagation exists": { localPath: "foo/upstream", refExists: false, priorPropagation: true, }, "directory in subdirectory, trailing slash, prior propagation exists": { localPath: "foo/upstream/", refExists: false, priorPropagation: true, }, "empty localPath": { err: ErrCannotCreateSubtreeIntoRootTree, }, } for name, test := range tests { t.Run(name, func(t *testing.T) { require.False(t, test.refExists && test.priorPropagation, "refExists and priorPropagation can't both be true") if test.refExists { _, err := downstreamRepository.Commit(downstreamTreeID, testNameToRefName(name), "Initial commit\n", false) require.Nil(t, err) } else if test.priorPropagation { // We set the upstream path to contain the same tree as the // downstreamTree, so: // oof/a -> blobA // b -> blobB // /oof/a -> blobA // /b -> blobB entries := []TreeEntry{NewEntryTree(test.localPath, downstreamTreeID)} entries = append(entries, downstreamTreeEntries...) rootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries(entries) require.Nil(t, err) _, err = downstreamRepository.Commit(rootTreeID, testNameToRefName(name), "Initial commit\n", false) require.Nil(t, err) } downstreamCommitID, err := downstreamRepository.CreateSubtreeFromUpstreamRepository(upstreamRepository, upstreamCommitID, testNameToRefName(name), test.localPath) if test.err != nil { assert.ErrorIs(t, err, test.err) } else { assert.Nil(t, err) rootTreeID, err := downstreamRepository.GetCommitTreeID(downstreamCommitID) require.Nil(t, err) itemID, err := downstreamRepository.GetPathIDInTree(test.localPath, rootTreeID) require.Nil(t, err) assert.Equal(t, upstreamTreeID, itemID) if test.refExists { // check that other items are still present itemID, err := downstreamRepository.GetPathIDInTree("oof/a", downstreamTreeID) require.Nil(t, err) assert.Equal(t, blobAID, itemID) itemID, err = downstreamRepository.GetPathIDInTree("b", downstreamTreeID) require.Nil(t, err) assert.Equal(t, blobBID, itemID) } // We don't need to similarly check when test.priorPropagation is // true because we already checked that those contents don't exist // in that localPath when we checked the tree ID patches // upstreamTreeID } }) } }) } func TestTreeBuilder(t *testing.T) { tempDir := t.TempDir() repo := CreateTestGitRepository(t, tempDir, false) blobAID, err := repo.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := repo.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } emptyTreeID := "4b825dc642cb6eb9a060e54bf8d69288fbee4904" t.Run("no blobs", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) treeID, err := treeBuilder.WriteTreeFromEntries(nil) assert.Nil(t, err) assert.Equal(t, emptyTreeID, treeID.String()) treeID, err = treeBuilder.WriteTreeFromEntries(nil) assert.Nil(t, err) assert.Equal(t, emptyTreeID, treeID.String()) }) t.Run("both blobs in the root directory", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) input := []TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(input) assert.Nil(t, err) files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } expectedOutput := map[string]Hash{ "a": blobAID, "b": blobBID, } assert.Equal(t, expectedOutput, files) }) t.Run("both blobs in same subdirectory", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) input := []TreeEntry{ NewEntryBlob("dir/a", blobAID), NewEntryBlob("dir/b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(input) assert.Nil(t, err) files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } expectedOutput := map[string]Hash{ "dir/a": blobAID, "dir/b": blobBID, } assert.Equal(t, expectedOutput, files) }) t.Run("same blobs in the multiple directories", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) input := []TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("b", blobBID), NewEntryBlob("foo/a", blobAID), NewEntryBlob("foo/b", blobBID), NewEntryBlob("bar/a", blobAID), NewEntryBlob("bar/b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(input) assert.Nil(t, err) files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } expectedOutput := map[string]Hash{ "a": blobAID, "b": blobBID, "foo/a": blobAID, "foo/b": blobBID, "bar/a": blobAID, "bar/b": blobBID, } assert.Equal(t, expectedOutput, files) }) t.Run("both blobs in different subdirectories", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) input := []TreeEntry{ NewEntryBlob("foo/a", blobAID), NewEntryBlob("bar/b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(input) assert.Nil(t, err) files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } expectedOutput := map[string]Hash{ "foo/a": blobAID, "bar/b": blobBID, } assert.Equal(t, expectedOutput, files) }) t.Run("blobs in mix of root directory and subdirectories", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) input := []TreeEntry{ NewEntryBlob("a", blobAID), NewEntryBlob("foo/bar/foobar/b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(input) assert.Nil(t, err) files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } expectedOutput := map[string]Hash{ "a": blobAID, "foo/bar/foobar/b": blobBID, } assert.Equal(t, expectedOutput, files) }) t.Run("build tree from intermediate tree", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) intermediateTreeInput := []TreeEntry{ NewEntryBlob("a", blobAID), } intermediateTreeID, err := treeBuilder.WriteTreeFromEntries(intermediateTreeInput) assert.Nil(t, err) rootTreeInput := []TreeEntry{ NewEntryTree("intermediate", intermediateTreeID), NewEntryBlob("b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(rootTreeInput) assert.Nil(t, err) expectedFiles := map[string]Hash{ "intermediate/a": blobAID, "b": blobBID, } files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } assert.Equal(t, expectedFiles, files) }) t.Run("build tree from nested intermediate tree", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) intermediateTreeInput := []TreeEntry{ NewEntryBlob("a", blobAID), } intermediateTreeID, err := treeBuilder.WriteTreeFromEntries(intermediateTreeInput) assert.Nil(t, err) rootTreeInput := []TreeEntry{ NewEntryTree("foo/intermediate", intermediateTreeID), NewEntryBlob("b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(rootTreeInput) assert.Nil(t, err) expectedFiles := map[string]Hash{ "foo/intermediate/a": blobAID, "b": blobBID, } files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } assert.Equal(t, expectedFiles, files) }) t.Run("build tree from nested multi-level intermediate tree", func(t *testing.T) { treeBuilder := NewTreeBuilder(repo) intermediateTreeInput := []TreeEntry{ NewEntryBlob("intermediate/a", blobAID), } intermediateTreeID, err := treeBuilder.WriteTreeFromEntries(intermediateTreeInput) assert.Nil(t, err) rootTreeInput := []TreeEntry{ NewEntryTree("foo/intermediate", intermediateTreeID), NewEntryBlob("b", blobBID), } rootTreeID, err := treeBuilder.WriteTreeFromEntries(rootTreeInput) assert.Nil(t, err) expectedFiles := map[string]Hash{ "foo/intermediate/intermediate/a": blobAID, "b": blobBID, } files, err := repo.GetAllFilesInTree(rootTreeID) if err != nil { t.Fatal(err) } assert.Equal(t, expectedFiles, files) }) } func TestEnsureIsTree(t *testing.T) { tmpDir := t.TempDir() repo := CreateTestGitRepository(t, tmpDir, true) blobID, err := repo.WriteBlob([]byte("foo")) if err != nil { t.Fatal(err) } treeBuilder := NewTreeBuilder(repo) treeID, err := treeBuilder.WriteTreeFromEntries([]TreeEntry{NewEntryBlob("foo", blobID)}) if err != nil { t.Fatal(err) } err = repo.ensureIsTree(treeID) assert.Nil(t, err) err = repo.ensureIsTree(blobID) assert.NotNil(t, err) } gittuf-0.9.0/internal/gitinterface/utils.go000066400000000000000000000056401475150141000207740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gitinterface import ( "fmt" "os" "os/exec" "path" "strings" "testing" ) // ResetDueToError reverses a change applied to a ref to the specified target // ID. It is used to ensure a gittuf operation is atomic: if a gittuf operation // fails, any changes made to the repository in refs/gittuf can be rolled back. // Worktrees are not updated. func (r *Repository) ResetDueToError(cause error, refName string, commitID Hash) error { if err := r.SetReference(refName, commitID); err != nil { return fmt.Errorf("unable to reset %s to %s, caused by following error: %w", refName, commitID.String(), cause) } return cause } func RemoteRef(refName, remoteName string) string { var remotePath string switch { case strings.HasPrefix(refName, BranchRefPrefix): // refs/heads/ -> refs/remotes// rest := strings.TrimPrefix(refName, BranchRefPrefix) remotePath = path.Join(RemoteRefPrefix, remoteName, rest) case strings.HasPrefix(refName, TagRefPrefix): // refs/tags/ -> refs/tags/ remotePath = refName default: // refs/ -> refs/remotes// rest := strings.TrimPrefix(refName, RefPrefix) remotePath = path.Join(RemoteRefPrefix, remoteName, rest) } return remotePath } // RestoreWorktree is a test helper to fix the worktree in tests where we need // to operate in a checked out copy of the repository. This is primarily needed // for support with older Git versions. func (r *Repository) RestoreWorktree(t *testing.T) { t.Helper() worktree := r.gitDirPath if !r.IsBare() { worktree = strings.TrimSuffix(worktree, ".git") // TODO: this doesn't support detached git dir } cwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(worktree); err != nil { t.Fatal(err) } defer os.Chdir(cwd) //nolint:errcheck if _, err := r.executor("restore", "--staged", ".").executeString(); err != nil { t.Fatal(err) } if _, err := r.executor("restore", ".").executeString(); err != nil { t.Fatal(err) } } // IsNiceGitVersion determines whether the version of git is "nice". Certain Git // subcommands that gittuf uses were added in newer versions than some common // client versions. Instead of using a workaround for all clients, we determine // if we can use the newer features or instead need to use workarounds. func isNiceGitVersion() (bool, error) { cmd := exec.Command("git", "--version") output, err := cmd.Output() if err != nil { return false, err } versionString := strings.TrimPrefix(strings.TrimSpace(string(output)), "git version ") var major, minor, patch int _, err = fmt.Sscanf(versionString, "%d.%d.%d", &major, &minor, &patch) if err != nil { return false, err } if major >= 2 && minor >= 38 { return true, nil } return false, nil } func testNameToRefName(testName string) string { return BranchReferenceName(strings.ReplaceAll(testName, " ", "__")) } gittuf-0.9.0/internal/policy/000077500000000000000000000000001475150141000161335ustar00rootroot00000000000000gittuf-0.9.0/internal/policy/helpers_test.go000066400000000000000000000551351475150141000211740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "os" "path/filepath" "testing" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" ) var ( testCtx = context.Background() rootKeyBytes = artifacts.SSHRSAPrivate rootPubKeyBytes = artifacts.SSHRSAPublicSSH targets1KeyBytes = artifacts.SSHECDSAPrivate targets1PubKeyBytes = artifacts.SSHECDSAPublicSSH targets2KeyBytes = artifacts.SSHED25519Private targets2PubKeyBytes = artifacts.SSHED25519PublicSSH gpgKeyBytes = artifacts.GPGKey1Private gpgPubKeyBytes = artifacts.GPGKey1Public gpgUnauthorizedKeyBytes = artifacts.GPGKey2Private gpgUnauthorizedPubKeyBytes = artifacts.GPGKey2Public ) func createTestRepository(t *testing.T, stateCreator func(*testing.T) *State) (*gitinterface.Repository, *State) { t.Helper() state := stateCreator(t) tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) state.repository = repo if err := state.Commit(repo, "Create test state", false); err != nil { t.Fatal(err) } if err := Apply(testCtx, repo, false); err != nil { t.Fatal(err) } return repo, state } func createTestStateWithOnlyRoot(t *testing.T) *State { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) //nolint:staticcheck key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } return &State{ RootPublicKeys: []tuf.Principal{key}, RootEnvelope: rootEnv, } } func createTestStateWithPolicy(t *testing.T) *State { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key); err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) targetsMetadata := InitializeTargetsMetadata() if err := targetsMetadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("protect-main", []string{gpgKey.KeyID}, []string{"git:refs/heads/main"}, 1); err != nil { t.Fatal(err) } // Add a file protection rule. When used with common.AddNTestCommitsToSpecifiedRef, we have files with names 1, 2, 3,...n. if err := targetsMetadata.AddRule("protect-files-1-and-2", []string{gpgKey.KeyID}, []string{"file:1", "file:2"}, 1); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state := &State{ RootEnvelope: rootEnv, TargetsEnvelope: targetsEnv, RootPublicKeys: []tuf.Principal{key}, } if err := state.preprocess(); err != nil { t.Fatal(err) } return state } // createTestStateWithGlobalConstraintThreshold creates a policy state with no // explicit branch protection rules but with a two-approval constraint on // changes to the main branch. The two keys trusted are `rootPubKeyBytes` and // `gpgPubKeyBytes`. func createTestStateWithGlobalConstraintThreshold(t *testing.T) *State { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key); err != nil { t.Fatal(err) } if err := rootMetadata.AddGlobalRule(tufv01.NewGlobalRuleThreshold("threshold-2-main", []string{"git:refs/heads/main"}, 2)); err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) targetsMetadata := InitializeTargetsMetadata() if err := targetsMetadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state := &State{ RootEnvelope: rootEnv, TargetsEnvelope: targetsEnv, RootPublicKeys: []tuf.Principal{key}, } if err := state.preprocess(); err != nil { t.Fatal(err) } return state } // createTestStateWithGlobalConstraintBlockForcePushes creates a policy state // with no explicit branch protection rules but with a rule that blocks force // pushes to main. func createTestStateWithGlobalConstraintBlockForcePushes(t *testing.T) *State { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key); err != nil { t.Fatal(err) } forcePushesGlobalRule, err := tufv01.NewGlobalRuleBlockForcePushes("block-force-pushes-main", []string{"git:refs/heads/main"}) if err != nil { t.Fatal(err) } if err := rootMetadata.AddGlobalRule(forcePushesGlobalRule); err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) targetsMetadata := InitializeTargetsMetadata() if err := targetsMetadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state := &State{ RootEnvelope: rootEnv, TargetsEnvelope: targetsEnv, RootPublicKeys: []tuf.Principal{key}, } if err := state.preprocess(); err != nil { t.Fatal(err) } return state } func createTestStateWithPolicyUsingPersons(t *testing.T) *State { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key); err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) person := &tufv02.Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*tufv02.Key{ gpgKey.KeyID: gpgKey, }, } targetsMetadata := InitializeTargetsMetadata() if err := targetsMetadata.AddPrincipal(person); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("protect-main", []string{person.PersonID}, []string{"git:refs/heads/main"}, 1); err != nil { t.Fatal(err) } // Add a file protection rule. When used with common.AddNTestCommitsToSpecifiedRef, we have files with names 1, 2, 3,...n. if err := targetsMetadata.AddRule("protect-files-1-and-2", []string{person.PersonID}, []string{"file:1", "file:2"}, 1); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state := &State{ RootEnvelope: rootEnv, TargetsEnvelope: targetsEnv, RootPublicKeys: []tuf.Principal{key}, } if err := state.preprocess(); err != nil { t.Fatal(err) } return state } func createTestStateWithDelegatedPolicies(t *testing.T) *State { t.Helper() signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key); err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) // Create the root targets metadata targetsMetadata := InitializeTargetsMetadata() if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("1", []string{key.KeyID}, []string{"file:1/*"}, 1); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("2", []string{key.KeyID}, []string{"file:2/*"}, 1); err != nil { t.Fatal(err) } // Create the targets envelope targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } // Create the second level of delegations delegation1Metadata := InitializeTargetsMetadata() if err := delegation1Metadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } if err := delegation1Metadata.AddRule("3", []string{gpgKey.KeyID}, []string{"file:1/subpath1/*"}, 1); err != nil { t.Fatal(err) } if err := delegation1Metadata.AddRule("4", []string{gpgKey.KeyID}, []string{"file:1/subpath2/*"}, 1); err != nil { t.Fatal(err) } // Create the delegation envelope delegation1Env, err := dsse.CreateEnvelope(delegation1Metadata) if err != nil { t.Fatal(err) } delegation1Env, err = dsse.SignEnvelope(context.Background(), delegation1Env, signer) if err != nil { t.Fatal(err) } curState := &State{ RootEnvelope: rootEnv, TargetsEnvelope: targetsEnv, DelegationEnvelopes: map[string]*sslibdsse.Envelope{}, RootPublicKeys: []tuf.Principal{key}, } // Add the delegation envelopes to the state curState.DelegationEnvelopes["1"] = delegation1Env // delegation structure // // targets // /\ // 1 2 // /\ // 3 4 if err := curState.preprocess(); err != nil { t.Fatal(err) } return curState } func createTestStateWithThresholdPolicy(t *testing.T) *State { t.Helper() state := createTestStateWithPolicy(t) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) approverKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(approverKey); err != nil { t.Fatal(err) } // Set threshold = 2 for existing rule with the added key if err := targetsMetadata.UpdateRule("protect-main", []string{gpgKey.KeyID, approverKey.KeyID}, []string{"git:refs/heads/main"}, 2); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = targetsEnv return state } // createTestStateWithThresholdPolicyAndGitHubAppTrust sets up a test policy // with threshold rules. It uses v0.2 (and higher) policy metadata to support // GitHub apps. // // Usage notes: // - The app key is targets1PubKeyBytes // - The two authorized persons are "jane.doe" and "john.doe" // - jane.doe's signing key is gpgPubKeyBytes // - john.doe's signing key is targets2PubKeyBytes // - The protected namespace is the main branch // - Use either of them as the approver for the app, with the app's signing key // ID set as the app name func createTestStateWithThresholdPolicyAndGitHubAppTrust(t *testing.T) *State { t.Helper() t.Setenv(dev.DevModeKey, "1") t.Setenv(tufv02.AllowV02MetadataKey, "1") state := createTestStateWithPolicyUsingPersons(t) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) appName := tuf.GitHubAppRoleName // TODO: this should be generalized more rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } appKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey); err != nil { t.Fatal(err) } rootMetadata.EnableGitHubAppApprovals() state.githubAppApprovalsTrusted = true state.githubAppKeys = []tuf.Principal{appKey} state.githubAppRoleName = appName rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } state.RootEnvelope = rootEnv targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) person := &tufv02.Person{ PersonID: "jane.doe", PublicKeys: map[string]*tufv02.Key{gpgKey.KeyID: gpgKey}, AssociatedIdentities: map[string]string{appName: "jane.doe"}, } if err := targetsMetadata.AddPrincipal(person); err != nil { t.Fatal(err) } approverKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) approver := &tufv02.Person{ PersonID: "john.doe", PublicKeys: map[string]*tufv02.Key{approverKey.KeyID: approverKey}, AssociatedIdentities: map[string]string{appName: "john.doe"}, } if err := targetsMetadata.AddPrincipal(approver); err != nil { t.Fatal(err) } // Set threshold = 2 for existing rule with the added key if err := targetsMetadata.UpdateRule("protect-main", []string{person.ID(), approver.ID()}, []string{"git:refs/heads/main"}, 2); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = targetsEnv return state } // createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations sets // up a test policy with threshold rules. It uses v0.2 (and higher) policy // metadata to support GitHub apps. // // Usage notes: // - The app key is targets1PubKeyBytes // - The three authorized persons are "jane.doe", "john.doe", and "jill.doe" // - jane.doe's signing key is gpgPubKeyBytes // - john.doe's signing key is targets2PubKeyBytes // - jill.doe's signing key is gpgUnauthorizedPubKeyBytes // - The protected namespace is the main branch // - Use any of them as the approver for the app, with the app's signing key // ID set as the app name func createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations(t *testing.T) *State { t.Helper() t.Setenv(dev.DevModeKey, "1") t.Setenv(tufv02.AllowV02MetadataKey, "1") state := createTestStateWithPolicyUsingPersons(t) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) appName := tuf.GitHubAppRoleName // TODO: this should be generalized more rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } appKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey); err != nil { t.Fatal(err) } rootMetadata.EnableGitHubAppApprovals() state.githubAppApprovalsTrusted = true state.githubAppKeys = []tuf.Principal{appKey} state.githubAppRoleName = appName rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } state.RootEnvelope = rootEnv targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) person := &tufv02.Person{ PersonID: "jane.doe", PublicKeys: map[string]*tufv02.Key{gpgKey.KeyID: gpgKey}, AssociatedIdentities: map[string]string{appName: "jane.doe"}, } if err := targetsMetadata.AddPrincipal(person); err != nil { t.Fatal(err) } approver1Key := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(approver1Key); err != nil { t.Fatal(err) } approver1 := &tufv02.Person{ PersonID: "john.doe", PublicKeys: map[string]*tufv02.Key{approver1Key.KeyID: approver1Key}, AssociatedIdentities: map[string]string{appName: "john.doe"}, } if err := targetsMetadata.AddPrincipal(approver1); err != nil { t.Fatal(err) } approver2KeyR, err := gpg.LoadGPGKeyFromBytes(gpgUnauthorizedPubKeyBytes) if err != nil { t.Fatal(err) } approver2Key := tufv01.NewKeyFromSSLibKey(approver2KeyR) approver2 := &tufv02.Person{ PersonID: "jill.doe", PublicKeys: map[string]*tufv02.Key{approver2Key.KeyID: approver2Key}, AssociatedIdentities: map[string]string{appName: "jill.doe"}, } if err := targetsMetadata.AddPrincipal(approver2); err != nil { t.Fatal(err) } // Set threshold = 3 for existing rule with the added principals if err := targetsMetadata.UpdateRule("protect-main", []string{person.PersonID, approver1.PersonID, approver2.PersonID}, []string{"git:refs/heads/main"}, 3); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = targetsEnv return state } func createTestStateWithTagPolicy(t *testing.T) *State { t.Helper() state := createTestStateWithPolicy(t) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("protect-tags", []string{gpgKey.KeyID}, []string{"git:refs/tags/*"}, 1); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = targetsEnv if err := state.preprocess(); err != nil { t.Fatal(err) } return state } func createTestStateWithThresholdTagPolicy(t *testing.T) *State { t.Helper() state := createTestStateWithPolicy(t) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) approverKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(gpgKey); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(approverKey); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("protect-tags", []string{gpgKey.KeyID, approverKey.KeyID}, []string{"git:refs/tags/*"}, 2); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = targetsEnv if err := state.preprocess(); err != nil { t.Fatal(err) } return state } func createTestStateWithTagPolicyForUnauthorizedTest(t *testing.T) *State { t.Helper() state := createTestStateWithPolicy(t) rootKey := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(rootKey); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("protect-tags", []string{rootKey.KeyID}, []string{"git:refs/tags/*"}, 1); err != nil { t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = targetsEnv if err := state.preprocess(); err != nil { t.Fatal(err) } return state } func setupSSHKeysForSigning(t *testing.T, privateBytes, publicBytes []byte) *ssh.Signer { t.Helper() keysDir := t.TempDir() privKeyPath := filepath.Join(keysDir, "key") pubKeyPath := filepath.Join(keysDir, "key.pub") if err := os.WriteFile(privKeyPath, privateBytes, 0o600); err != nil { t.Fatal(err) } if err := os.WriteFile(pubKeyPath, publicBytes, 0o600); err != nil { t.Fatal(err) } signer, err := ssh.NewSignerFromFile(privKeyPath) if err != nil { t.Fatal(err) } return signer } gittuf-0.9.0/internal/policy/list.go000066400000000000000000000060421475150141000174370ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/tuf" ) type DelegationWithDepth struct { Delegation tuf.Rule Depth int } // ListRules returns a list of all the rules as an array of the delegations in a // pre-order traversal of the delegation tree, with the depth of each // delegation. func ListRules(ctx context.Context, repo *gitinterface.Repository, targetRef string) ([]*DelegationWithDepth, error) { state, err := LoadCurrentState(ctx, repo, targetRef) if err != nil { return nil, err } if !state.HasTargetsRole(TargetsRoleName) { return nil, nil } topLevelTargetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, true) if err != nil { return nil, err } delegationsToSearch := []*DelegationWithDepth{} allDelegations := []*DelegationWithDepth{} for _, topLevelDelegation := range topLevelTargetsMetadata.GetRules() { if topLevelDelegation.ID() == tuf.AllowRuleName { continue } delegationsToSearch = append(delegationsToSearch, &DelegationWithDepth{Delegation: topLevelDelegation, Depth: 0}) } seenRoles := map[string]bool{TargetsRoleName: true} for len(delegationsToSearch) > 0 { currentDelegation := delegationsToSearch[0] delegationsToSearch = delegationsToSearch[1:] // allDelegations will be the returned list of all the delegations in pre-order traversal, no delegations will be popped off allDelegations = append(allDelegations, currentDelegation) if _, seen := seenRoles[currentDelegation.Delegation.ID()]; seen { continue } if state.HasTargetsRole(currentDelegation.Delegation.ID()) { currentMetadata, err := state.GetTargetsMetadata(currentDelegation.Delegation.ID(), true) if err != nil { return nil, err } seenRoles[currentDelegation.Delegation.ID()] = true // We construct localDelegations first so that we preserve the order // of delegations in currentMetadata in delegationsToSearch localDelegations := []*DelegationWithDepth{} for _, delegation := range currentMetadata.GetRules() { if delegation.ID() == tuf.AllowRuleName { continue } localDelegations = append(localDelegations, &DelegationWithDepth{Delegation: delegation, Depth: currentDelegation.Depth + 1}) } if len(localDelegations) > 0 { delegationsToSearch = append(localDelegations, delegationsToSearch...) } } } return allDelegations, nil } // ListPrincipals returns the principals present in the specified rule file. // `targetRef` can be used to control which policy reference is used. func ListPrincipals(ctx context.Context, repo *gitinterface.Repository, targetRef, policyName string) (map[string]tuf.Principal, error) { state, err := LoadCurrentState(ctx, repo, targetRef) if err != nil { return nil, err } if !state.HasTargetsRole(policyName) { return nil, ErrPolicyNotFound } metadata, err := state.GetTargetsMetadata(policyName, false) if err != nil { return nil, err } return metadata.GetPrincipals(), nil } gittuf-0.9.0/internal/policy/list_test.go000066400000000000000000000072321475150141000205000ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "testing" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/tuf" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" "github.com/stretchr/testify/assert" ) func TestListRules(t *testing.T) { t.Run("no delegations", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) rules, err := ListRules(context.Background(), repo, PolicyRef) assert.Nil(t, err) expectedRules := []*DelegationWithDepth{ { Delegation: &tufv02.Delegation{ Name: "protect-main", Paths: []string{"git:refs/heads/main"}, Terminating: false, Custom: nil, Role: tufv02.Role{ PrincipalIDs: set.NewSetFromItems("157507bbe151e378ce8126c1dcfe043cdd2db96e"), Threshold: 1, }, }, Depth: 0, }, { Delegation: &tufv02.Delegation{ Name: "protect-files-1-and-2", Paths: []string{"file:1", "file:2"}, Terminating: false, Custom: nil, Role: tufv02.Role{ PrincipalIDs: set.NewSetFromItems("157507bbe151e378ce8126c1dcfe043cdd2db96e"), Threshold: 1, }, }, Depth: 0, }, } assert.Equal(t, expectedRules, rules) }) t.Run("with delegations", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithDelegatedPolicies) rules, err := ListRules(context.Background(), repo, PolicyRef) assert.Nil(t, err) expectedRules := []*DelegationWithDepth{ { Delegation: &tufv02.Delegation{ Name: "1", Paths: []string{"file:1/*"}, Terminating: false, Custom: nil, Role: tufv02.Role{ PrincipalIDs: set.NewSetFromItems("SHA256:ESJezAOo+BsiEpddzRXS6+wtF16FID4NCd+3gj96rFo"), Threshold: 1, }, }, Depth: 0, }, { Delegation: &tufv02.Delegation{ Name: "3", Paths: []string{"file:1/subpath1/*"}, Terminating: false, Custom: nil, Role: tufv02.Role{ PrincipalIDs: set.NewSetFromItems("157507bbe151e378ce8126c1dcfe043cdd2db96e"), Threshold: 1, }, }, Depth: 1, }, { Delegation: &tufv02.Delegation{ Name: "4", Paths: []string{"file:1/subpath2/*"}, Terminating: false, Custom: nil, Role: tufv02.Role{ PrincipalIDs: set.NewSetFromItems("157507bbe151e378ce8126c1dcfe043cdd2db96e"), Threshold: 1, }, }, Depth: 1, }, { Delegation: &tufv02.Delegation{ Name: "2", Paths: []string{"file:2/*"}, Terminating: false, Custom: nil, Role: tufv02.Role{ PrincipalIDs: set.NewSetFromItems("SHA256:ESJezAOo+BsiEpddzRXS6+wtF16FID4NCd+3gj96rFo"), Threshold: 1, }, }, Depth: 0, }, } assert.Equal(t, expectedRules, rules) }) } func TestListPrincipals(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) t.Run("policy exists", func(t *testing.T) { pubKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } pubKey := tufv02.NewKeyFromSSLibKey(pubKeyR) expectedPrincipals := map[string]tuf.Principal{pubKey.KeyID: pubKey} principals, err := ListPrincipals(context.Background(), repo, PolicyRef, tuf.TargetsRoleName) assert.Nil(t, err) assert.Equal(t, expectedPrincipals, principals) }) t.Run("policy does not exist", func(t *testing.T) { principals, err := ListPrincipals(testCtx, repo, PolicyRef, "does-not-exist") assert.ErrorIs(t, err, ErrPolicyNotFound) assert.Nil(t, principals) }) } gittuf-0.9.0/internal/policy/policy.go000066400000000000000000000773651475150141000200030ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "encoding/json" "errors" "fmt" "log/slog" "path" "reflect" "sort" "strings" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" "github.com/gittuf/gittuf/internal/tuf/migrations" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" ) const ( // PolicyRef defines the Git namespace used for gittuf policies. PolicyRef = "refs/gittuf/policy" // PolicyStagingRef defines the Git namespace used as a staging area when creating or updating gittuf policies. PolicyStagingRef = "refs/gittuf/policy-staging" // RootRoleName defines the expected name for the gittuf root of trust. RootRoleName = "root" // TargetsRoleName defines the expected name for the top level gittuf policy file. TargetsRoleName = "targets" // DefaultCommitMessage defines the fallback message to use when updating the policy ref if an action specific message is unavailable. DefaultCommitMessage = "Update policy state" metadataTreeEntryName = "metadata" gitReferenceRuleScheme = "git" fileRuleScheme = "file" ) var ( ErrMetadataNotFound = errors.New("unable to find requested metadata file; has it been initialized?") ErrDanglingDelegationMetadata = errors.New("unreachable targets metadata found") ErrPolicyNotFound = errors.New("cannot find policy") ErrUnableToMatchRootKeys = errors.New("unable to match root public keys, gittuf policy is in a broken state") ErrNotAncestor = errors.New("cannot apply changes since policy is not an ancestor of the policy staging") ) // State contains the full set of metadata and root keys present in a policy // state. type State struct { RootEnvelope *sslibdsse.Envelope TargetsEnvelope *sslibdsse.Envelope DelegationEnvelopes map[string]*sslibdsse.Envelope RootPublicKeys []tuf.Principal githubAppApprovalsTrusted bool githubAppKeys []tuf.Principal githubAppRoleName string repository *gitinterface.Repository verifiersCache map[string][]*SignatureVerifier ruleNames *set.Set[string] allPrincipals map[string]tuf.Principal hasFileRule bool globalRules []tuf.GlobalRule } // LoadState returns the State of the repository's policy corresponding to the // entry. It verifies the root of trust for the state from the initial policy // entry in the RSL. If no policy states are found and the entry is for the // policy-staging ref, that entry is returned with no verification. func LoadState(ctx context.Context, repo *gitinterface.Repository, requestedEntry rsl.ReferenceUpdaterEntry) (*State, error) { // Regardless of whether we've been asked for policy ref or staging ref, // we want to examine and verify consecutive policy states that appear // before the entry. This is why we don't just load the state and return // if entry is for the staging ref. slog.Debug(fmt.Sprintf("Loading policy at entry '%s'...", requestedEntry.GetID().String())) // TODO: should this searcher be inherited when invoked via Verifier? searcher := newSearcher(repo) slog.Debug("Finding first policy entry...") firstPolicyEntry, err := searcher.FindFirstPolicyEntry() if err != nil { if errors.Is(err, ErrPolicyNotFound) { // we don't have a policy entry yet // we just return the state for the requested entry slog.Debug("No applied policy found, loading requested policy without further verification...") return loadStateForEntry(repo, requestedEntry) } return nil, err } if firstPolicyEntry.GetID().Equal(requestedEntry.GetID()) { slog.Debug("Requested policy's entry is the same as first policy entry, loading it without further verification...") slog.Debug(fmt.Sprintf("Trusting root of trust for initial policy '%s'...", firstPolicyEntry.GetID().String())) return loadStateForEntry(repo, requestedEntry) } // check if firstPolicyEntry is **after** requested entry // this can happen when the requested entry is for policy-staging before // Apply() was ever called slog.Debug("Checking if first policy entry was after requested policy's entry...") knows, err := repo.KnowsCommit(firstPolicyEntry.GetID(), requestedEntry.GetID()) if err != nil { return nil, err } if knows { // the first policy entry knows the requested entry, meaning the // requested entry is an ancestor of the first policy entry // we just return the state for the requested entry slog.Debug("Requested policy's entry was before first applied policy, loading requested policy without verification...") return loadStateForEntry(repo, requestedEntry) } // If requestedEntry.RefName == policy, then allPolicyEntries includes requestedEntry // If requestedEntry.RefName == policy-staging, then allPolicyEntries does not include requestedEntry slog.Debug("Finding all policies between first policy and requested policy...") allPolicyEntries, err := searcher.FindPolicyEntriesInRange(firstPolicyEntry, requestedEntry) if err != nil { return nil, err } // We load the very first policy entry with no additional verification, // the root keys are implicitly trusted initialPolicyState, err := loadStateForEntry(repo, firstPolicyEntry) if err != nil { return nil, err } slog.Debug(fmt.Sprintf("Trusting root of trust for initial policy '%s'...", firstPolicyEntry.GetID().String())) verifiedState := initialPolicyState for _, entry := range allPolicyEntries[1:] { if entry.GetRefName() != PolicyRef { // The searcher _may_ include refs/gittuf/attestations // etc. which should be skipped continue } underTestState, err := loadStateForEntry(repo, entry) if err != nil { return nil, err } slog.Debug(fmt.Sprintf("Verifying root of trust for policy '%s'...", entry.GetID().String())) if err := verifiedState.VerifyNewState(ctx, underTestState); err != nil { return nil, fmt.Errorf("unable to verify roots of trust for policy states: %w", err) } verifiedState = underTestState } if requestedEntry.GetRefName() == PolicyRef { // We've already loaded it and done successive verification as // it was included in allPolicyEntries // This state is stored in verifiedState, we can do an internal // verification check and return slog.Debug("Validating requested policy's state...") if err := verifiedState.Verify(ctx); err != nil { return nil, fmt.Errorf("requested state has invalidly signed metadata: %w", err) } slog.Debug(fmt.Sprintf("Successfully loaded policy at entry '%s'!", requestedEntry.GetID().String())) return verifiedState, nil } // This is reached when requestedEntry is for staging ref // We've checked that all the policy states prior to this staging entry // are good (with their root of trust) return loadStateForEntry(repo, requestedEntry) } // LoadCurrentState returns the State corresponding to the repository's current // active policy. It verifies the root of trust for the state starting from the // initial policy entry in the RSL. func LoadCurrentState(ctx context.Context, repo *gitinterface.Repository, ref string) (*State, error) { entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo, rsl.ForReference(ref)) if err != nil { return nil, err } return LoadState(ctx, repo, entry) } // LoadFirstState returns the State corresponding to the repository's first // active policy. It does not verify the root of trust since it is the initial policy. func LoadFirstState(ctx context.Context, repo *gitinterface.Repository) (*State, error) { firstEntry, _, err := rsl.GetFirstReferenceUpdaterEntryForRef(repo, PolicyRef) if err != nil { return nil, err } return LoadState(ctx, repo, firstEntry) } // GetStateForCommit scans the RSL to identify the first time a commit was seen // in the repository. The policy preceding that RSL entry is returned as the // State to be used for verifying the commit's signature. If the commit hasn't // been seen in the repository previously, no policy state is returned. Also, no // error is returned. Identifying the policy in this case is left to the calling // workflow. func GetStateForCommit(ctx context.Context, repo *gitinterface.Repository, commitID gitinterface.Hash) (*State, error) { firstSeenEntry, _, err := rsl.GetFirstReferenceUpdaterEntryForCommit(repo, commitID) if err != nil { if errors.Is(err, rsl.ErrNoRecordOfCommit) { return nil, nil } return nil, err } commitPolicyEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo, rsl.ForReference(PolicyRef), rsl.BeforeEntryID(firstSeenEntry.GetID())) if err != nil { return nil, err } return LoadState(ctx, repo, commitPolicyEntry) } // FindVerifiersForPath identifies the trusted set of verifiers for the // specified path. While walking the delegation graph for the path, signatures // for delegated metadata files are verified using the verifier context. func (s *State) FindVerifiersForPath(path string) ([]*SignatureVerifier, error) { if s.verifiersCache == nil { slog.Debug("Initializing path cache in policy...") s.verifiersCache = map[string][]*SignatureVerifier{} } else if verifiers, cacheHit := s.verifiersCache[path]; cacheHit { // Cache hit for this path in this policy slog.Debug(fmt.Sprintf("Found cached verifiers for path '%s'", path)) return verifiers, nil } verifiers, err := s.findVerifiersForPathIfProtected(path) if err != nil { return nil, err } if len(verifiers) > 0 { // protected, we have specific set of verifiers to return slog.Debug(fmt.Sprintf("Path '%s' is explicitly protected, returning corresponding verifiers...", path)) // add to cache s.verifiersCache[path] = verifiers // return verifiers return verifiers, nil } slog.Debug("Checking if any global constraints exist") if len(s.globalRules) == 0 { slog.Debug("No global constraints found") s.verifiersCache[path] = verifiers return verifiers, nil } slog.Debug("Global constraints found, returning exhaustive verifier...") // At least one global rule exists, return an exhaustive verifier verifier := &SignatureVerifier{ repository: s.repository, name: tuf.ExhaustiveVerifierName, principals: []tuf.Principal{}, // we'll add all principals below // threshold doesn't matter since we set verifyExhaustively to true threshold: 1, verifyExhaustively: true, // very important! } for _, principal := range s.allPrincipals { verifier.principals = append(verifier.principals, principal) } verifiers = []*SignatureVerifier{verifier} // Note: we could loop through all global constraints and create a // verifier with all principals but targeting a specific constraint (or // an aggregate constraint that has the highest threshold requirement of // all the constraints that match path). However, this probably paints // us into a corner (only threshold requirements between two constraints // can be compared, we may have uncomparable constraints later), and we // would also want to verify every applicable global constraint for // safety, so we would be doing extra work for no reason. // add to cache s.verifiersCache[path] = verifiers // return verifiers return verifiers, nil } func (s *State) findVerifiersForPathIfProtected(path string) ([]*SignatureVerifier, error) { if !s.HasTargetsRole(TargetsRoleName) { // No policies exist return nil, ErrMetadataNotFound } // This envelope is verified when state is loaded, as this is // the start for all delegation graph searches targetsMetadata, err := s.GetTargetsMetadata(TargetsRoleName, true) // migrating is fine since this is purely a query, let's start using tufv02 metadata if err != nil { return nil, err } allPrincipals := targetsMetadata.GetPrincipals() // each entry is a list of delegations from a particular metadata file groupedDelegations := [][]tuf.Rule{ targetsMetadata.GetRules(), } seenRoles := map[string]bool{TargetsRoleName: true} var currentDelegationGroup []tuf.Rule verifiers := []*SignatureVerifier{} for { if len(groupedDelegations) == 0 { return verifiers, nil } currentDelegationGroup = groupedDelegations[0] groupedDelegations = groupedDelegations[1:] for { if len(currentDelegationGroup) <= 1 { // Only allow rule found in the current group break } delegation := currentDelegationGroup[0] currentDelegationGroup = currentDelegationGroup[1:] if delegation.Matches(path) { verifier := &SignatureVerifier{ repository: s.repository, name: delegation.ID(), principals: make([]tuf.Principal, 0, delegation.GetPrincipalIDs().Len()), threshold: delegation.GetThreshold(), } for _, principalID := range delegation.GetPrincipalIDs().Contents() { verifier.principals = append(verifier.principals, allPrincipals[principalID]) } verifiers = append(verifiers, verifier) if _, seen := seenRoles[delegation.ID()]; seen { continue } if s.HasTargetsRole(delegation.ID()) { delegatedMetadata, err := s.GetTargetsMetadata(delegation.ID(), true) // migrating is fine since this is purely a query, let's start using tufv02 metadata if err != nil { return nil, err } seenRoles[delegation.ID()] = true for principalID, principal := range delegatedMetadata.GetPrincipals() { allPrincipals[principalID] = principal } // Add the current metadata's further delegations upfront to // be depth-first groupedDelegations = append([][]tuf.Rule{delegatedMetadata.GetRules()}, groupedDelegations...) if delegation.IsLastTrustedInRuleFile() { // Stop processing current delegation group, but proceed // with other groups break } } } } } } func (s *State) GetAllPrincipals() map[string]tuf.Principal { return s.allPrincipals } // Verify verifies the contents of the State for internal consistency. // Specifically, it checks that the root keys in the root role match the ones // stored on disk in the state. Further, it also verifies the signatures of the // top level Targets role and all reachable delegated Targets roles. Any // unreachable role returns an error. func (s *State) Verify(ctx context.Context) error { // Check consistency of root keys rootKeys, err := s.GetRootKeys() if err != nil { return err } // TODO: do we need this? if !verifyRootKeysMatch(rootKeys, s.RootPublicKeys) { return ErrUnableToMatchRootKeys } rootVerifier, err := s.getRootVerifier() if err != nil { return err } if _, err := rootVerifier.Verify(ctx, gitinterface.ZeroHash, s.RootEnvelope); err != nil { return err } // Check GitHub app approvals rootMetadata, err := s.GetRootMetadata(false) // don't migrate: this may be for a write and we don't want to write tufv02 metadata yet if err != nil { return err } if rootMetadata.IsGitHubAppApprovalTrusted() { // Check that the GitHub app role is declared _, err := rootMetadata.GetGitHubAppPrincipals() if err != nil { return err } } // Check top-level targets if s.TargetsEnvelope == nil { return nil } targetsVerifier, err := s.getTargetsVerifier() if err != nil { return err } if _, err := targetsVerifier.Verify(ctx, gitinterface.ZeroHash, s.TargetsEnvelope); err != nil { return err } targetsMetadata, err := s.GetTargetsMetadata(TargetsRoleName, false) // don't migrate: this may be for a write and we don't want to write tufv02 metadata yet if err != nil { return err } // Check reachable delegations reachedDelegations := map[string]bool{} for delegatedRoleName := range s.DelegationEnvelopes { reachedDelegations[delegatedRoleName] = false } delegationsQueue := targetsMetadata.GetRules() delegationKeys := targetsMetadata.GetPrincipals() for { // The last entry in the queue is always the allow rule, which we don't // process during DFS if len(delegationsQueue) <= 1 { break } delegation := delegationsQueue[0] delegationsQueue = delegationsQueue[1:] if s.HasTargetsRole(delegation.ID()) { reachedDelegations[delegation.ID()] = true env := s.DelegationEnvelopes[delegation.ID()] principals := []tuf.Principal{} for _, principalID := range delegation.GetPrincipalIDs().Contents() { principals = append(principals, delegationKeys[principalID]) } verifier := &SignatureVerifier{ repository: s.repository, name: delegation.ID(), principals: principals, threshold: delegation.GetThreshold(), } if _, err := verifier.Verify(ctx, gitinterface.ZeroHash, env); err != nil { return err } delegatedMetadata, err := s.GetTargetsMetadata(delegation.ID(), false) // don't migrate: this may be for a write and we don't want to write tufv02 metadata yet if err != nil { return err } delegationsQueue = append(delegatedMetadata.GetRules(), delegationsQueue...) for keyID, key := range delegatedMetadata.GetPrincipals() { delegationKeys[keyID] = key } } } for _, reached := range reachedDelegations { if !reached { return ErrDanglingDelegationMetadata } } return nil } // Commit verifies and writes the State to the policy-staging namespace. It also creates // an RSL entry recording the new tip of the policy-staging namespace. func (s *State) Commit(repo *gitinterface.Repository, commitMessage string, signCommit bool) error { if len(commitMessage) == 0 { commitMessage = DefaultCommitMessage } metadata := map[string]*sslibdsse.Envelope{} metadata[RootRoleName] = s.RootEnvelope if s.TargetsEnvelope != nil { metadata[TargetsRoleName] = s.TargetsEnvelope } if s.DelegationEnvelopes != nil { for k, v := range s.DelegationEnvelopes { metadata[k] = v } } allTreeEntries := []gitinterface.TreeEntry{} for name, env := range metadata { envContents, err := json.Marshal(env) if err != nil { return err } blobID, err := repo.WriteBlob(envContents) if err != nil { return err } allTreeEntries = append(allTreeEntries, gitinterface.NewEntryBlob(path.Join(metadataTreeEntryName, name+".json"), blobID)) } treeBuilder := gitinterface.NewTreeBuilder(repo) policyRootTreeID, err := treeBuilder.WriteTreeFromEntries(allTreeEntries) if err != nil { return err } originalCommitID, err := repo.GetReference(PolicyStagingRef) if err != nil { if !errors.Is(err, gitinterface.ErrReferenceNotFound) { return err } } commitID, err := repo.Commit(policyRootTreeID, PolicyStagingRef, commitMessage, signCommit) if err != nil { return err } // We must reset to original policy commit if err != nil from here onwards. if err := rsl.NewReferenceEntry(PolicyStagingRef, commitID).Commit(repo, signCommit); err != nil { if !originalCommitID.IsZero() { return repo.ResetDueToError(err, PolicyStagingRef, originalCommitID) } return err } return nil } // Apply takes valid changes from the policy staging ref, and fast-forward // merges it into the policy ref. Apply only takes place if the latest state on // the policy staging ref is valid. This prevents invalid changes to the policy // taking affect, and allowing new changes, that until signed by multiple users // would be invalid to be made, by utilizing the policy staging ref. func Apply(ctx context.Context, repo *gitinterface.Repository, signRSLEntry bool) error { // Get the reference for the PolicyRef policyTip, err := repo.GetReference(PolicyRef) if err != nil { if !errors.Is(err, gitinterface.ErrReferenceNotFound) { return fmt.Errorf("failed to get policy reference %s: %w", PolicyRef, err) } } // Get the reference for the PolicyStagingRef policyStagingTip, err := repo.GetReference(PolicyStagingRef) if err != nil { return fmt.Errorf("failed to get policy staging reference %s: %w", PolicyStagingRef, err) } // Check if the PolicyStagingRef is ahead of PolicyRef (fast-forward) if !policyTip.IsZero() { // This check ensures that the policy staging branch is a direct forward progression of the policy branch, // preventing any overwrites of policy history and maintaining a linear policy evolution, since a // fast-forward merge does not work with a non-linear history. // This is only being checked if there are no problems finding the tip of the policy ref, since if there // is no tip, then it cannot be an ancestor of the tip of the policy staging ref isAncestor, err := repo.KnowsCommit(policyStagingTip, policyTip) if err != nil { return fmt.Errorf("failed to check if policy commit is ancestor of policy staging commit: %w", err) } if !isAncestor { return ErrNotAncestor } } // using LoadCurrentState to load and verify if the PolicyStagingRef's // latest state is valid state, err := LoadCurrentState(ctx, repo, PolicyStagingRef) if err != nil { return fmt.Errorf("failed to load current state: %w", err) } if err := state.Verify(ctx); err != nil { return fmt.Errorf("staged policy is invalid: %w", err) } // Update the reference for the base to point to the new commit if err := repo.SetReference(PolicyRef, policyStagingTip); err != nil { return fmt.Errorf("failed to set new policy reference: %w", err) } if err := rsl.NewReferenceEntry(PolicyRef, policyStagingTip).Commit(repo, signRSLEntry); err != nil { if !policyTip.IsZero() { return repo.ResetDueToError(err, PolicyRef, policyTip) } return err } return nil } // Discard resets the policy staging ref, discarding any changes made to the policy staging ref. func Discard(repo *gitinterface.Repository) error { policyTip, err := repo.GetReference(PolicyRef) if err != nil { if errors.Is(err, gitinterface.ErrReferenceNotFound) { if err := repo.DeleteReference(PolicyStagingRef); err != nil && !errors.Is(err, gitinterface.ErrReferenceNotFound) { return fmt.Errorf("failed to delete policy staging reference %s: %w", PolicyStagingRef, err) } return nil } return fmt.Errorf("failed to get policy reference %s: %w", PolicyRef, err) } // Reset PolicyStagingRef to match the actual policy ref if err := repo.SetReference(PolicyStagingRef, policyTip); err != nil { return fmt.Errorf("failed to reset policy staging reference %s: %w", PolicyStagingRef, err) } return nil } func (s *State) GetRootKeys() ([]tuf.Principal, error) { rootMetadata, err := s.GetRootMetadata(false) // don't migrate: this may be for a write and we don't want to write tufv02 metadata yet if err != nil { return nil, err } return rootMetadata.GetRootPrincipals() } // GetRootMetadata returns the deserialized payload of the State's RootEnvelope. // The `migrate` parameter determines if the schema must be converted to a newer // version. func (s *State) GetRootMetadata(migrate bool) (tuf.RootMetadata, error) { payloadBytes, err := s.RootEnvelope.DecodeB64Payload() if err != nil { return nil, err } inspectRootMetadata := map[string]any{} if err := json.Unmarshal(payloadBytes, &inspectRootMetadata); err != nil { return nil, fmt.Errorf("unable to unmarshal root metadata: %w", err) } schemaVersion, hasSchemaVersion := inspectRootMetadata["schemaVersion"] switch { case !hasSchemaVersion: // this is tufv01 // Something that's not tufv01 may also lack the schemaVersion field and // enter this code path. At that point, we're relying on the unmarshal // to return something that's close to tufv01. We may see strange bugs // if this happens, but it's also likely someone trying to submit // incorrect metadata / trigger a version rollback, which we do want to // be aware of. rootMetadata := &tufv01.RootMetadata{} if err := json.Unmarshal(payloadBytes, rootMetadata); err != nil { return nil, fmt.Errorf("unable to unmarshal root metadata: %w", err) } if migrate { return migrations.MigrateRootMetadataV01ToV02(rootMetadata), nil } return rootMetadata, nil case schemaVersion == tufv02.RootVersion: rootMetadata := &tufv02.RootMetadata{} if err := json.Unmarshal(payloadBytes, rootMetadata); err != nil { return nil, fmt.Errorf("unable to unmarshal root metadata: %w", err) } return rootMetadata, nil default: return nil, tuf.ErrUnknownRootMetadataVersion } } // GetTargetsMetadata returns the deserialized payload of the State's // TargetsEnvelope for the specified `roleName`. The `migrate` parameter // determines if the schema must be converted to a newer version. func (s *State) GetTargetsMetadata(roleName string, migrate bool) (tuf.TargetsMetadata, error) { e := s.TargetsEnvelope if roleName != TargetsRoleName { env, ok := s.DelegationEnvelopes[roleName] if !ok { return nil, ErrMetadataNotFound } e = env } if e == nil { return nil, ErrMetadataNotFound } payloadBytes, err := e.DecodeB64Payload() if err != nil { return nil, err } inspectTargetsMetadata := map[string]any{} if err := json.Unmarshal(payloadBytes, &inspectTargetsMetadata); err != nil { return nil, fmt.Errorf("unable to unmarshal rule file metadata: %w", err) } schemaVersion, hasSchemaVersion := inspectTargetsMetadata["schemaVersion"] switch { case !hasSchemaVersion: // this is tufv01 // Something that's not tufv01 may also lack the schemaVersion field and // enter this code path. At that point, we're relying on the unmarshal // to return something that's close to tufv01. We may see strange bugs // if this happens, but it's also likely someone trying to submit // incorrect metadata / trigger a version rollback, which we do want to // be aware of. targetsMetadata := &tufv01.TargetsMetadata{} if err := json.Unmarshal(payloadBytes, targetsMetadata); err != nil { return nil, fmt.Errorf("unable to unmarshal rule file metadata: %w", err) } if migrate { return migrations.MigrateTargetsMetadataV01ToV02(targetsMetadata), nil } return targetsMetadata, nil case schemaVersion == tufv02.TargetsVersion: targetsMetadata := &tufv02.TargetsMetadata{} if err := json.Unmarshal(payloadBytes, targetsMetadata); err != nil { return nil, fmt.Errorf("unable to unmarshal rule file metadata: %w", err) } return targetsMetadata, nil default: return nil, tuf.ErrUnknownTargetsMetadataVersion } } func (s *State) HasTargetsRole(roleName string) bool { if roleName == TargetsRoleName { return s.TargetsEnvelope != nil } _, ok := s.DelegationEnvelopes[roleName] return ok } func (s *State) HasRuleName(name string) bool { return s.ruleNames.Has(name) } // preprocess handles several "one time" tasks when the state is first loaded. // This includes things like loading the set of rule names present in the state, // checking if it has file rules, etc. func (s *State) preprocess() error { rootMetadata, err := s.GetRootMetadata(false) if err != nil { return err } s.globalRules = rootMetadata.GetGlobalRules() if s.allPrincipals == nil { s.allPrincipals = map[string]tuf.Principal{} } for principalID, principal := range rootMetadata.GetPrincipals() { s.allPrincipals[principalID] = principal } if s.TargetsEnvelope == nil { return nil } s.ruleNames = set.NewSet[string]() targetsMetadata, err := s.GetTargetsMetadata(TargetsRoleName, false) if err != nil { return err } for principalID, principal := range targetsMetadata.GetPrincipals() { s.allPrincipals[principalID] = principal } for _, rule := range targetsMetadata.GetRules() { if rule.ID() == tuf.AllowRuleName { continue } if s.ruleNames.Has(rule.ID()) { return tuf.ErrDuplicatedRuleName } s.ruleNames.Add(rule.ID()) if !s.hasFileRule { patterns := rule.GetProtectedNamespaces() for _, pattern := range patterns { if strings.HasPrefix(pattern, fileRuleScheme) { s.hasFileRule = true break } } } } if len(s.DelegationEnvelopes) == 0 { return nil } for delegatedRoleName := range s.DelegationEnvelopes { delegatedMetadata, err := s.GetTargetsMetadata(delegatedRoleName, false) if err != nil { return err } for principalID, principal := range delegatedMetadata.GetPrincipals() { s.allPrincipals[principalID] = principal } for _, rule := range delegatedMetadata.GetRules() { if rule.ID() == tuf.AllowRuleName { continue } if s.ruleNames.Has(rule.ID()) { return tuf.ErrDuplicatedRuleName } s.ruleNames.Add(rule.ID()) if !s.hasFileRule { patterns := rule.GetProtectedNamespaces() for _, pattern := range patterns { if strings.HasPrefix(pattern, fileRuleScheme) { s.hasFileRule = true break } } } } } return nil } func (s *State) getRootVerifier() (*SignatureVerifier, error) { rootMetadata, err := s.GetRootMetadata(false) if err != nil { return nil, err } principals, err := rootMetadata.GetRootPrincipals() if err != nil { return nil, err } threshold, err := rootMetadata.GetRootThreshold() if err != nil { return nil, err } return &SignatureVerifier{ repository: s.repository, principals: principals, threshold: threshold, }, nil } func (s *State) getTargetsVerifier() (*SignatureVerifier, error) { rootMetadata, err := s.GetRootMetadata(false) if err != nil { return nil, err } principals, err := rootMetadata.GetPrimaryRuleFilePrincipals() if err != nil { return nil, err } threshold, err := rootMetadata.GetPrimaryRuleFileThreshold() if err != nil { return nil, err } return &SignatureVerifier{ repository: s.repository, principals: principals, threshold: threshold, }, nil } // loadStateForEntry returns the State for a specified RSL reference entry for // the policy namespace. This helper is focused on reading the Git object store // and loading the policy contents. Typically, LoadCurrentState of LoadState // must be used. The exception is VerifyRelative... which performs root // verification between consecutive policy states. func loadStateForEntry(repo *gitinterface.Repository, entry rsl.ReferenceUpdaterEntry) (*State, error) { if entry.GetRefName() != PolicyRef && entry.GetRefName() != PolicyStagingRef { return nil, rsl.ErrRSLEntryDoesNotMatchRef } commitTreeID, err := repo.GetCommitTreeID(entry.GetTargetID()) if err != nil { return nil, err } allTreeEntries, err := repo.GetAllFilesInTree(commitTreeID) if err != nil { return nil, err } state := &State{repository: repo} for name, blobID := range allTreeEntries { contents, err := repo.ReadBlob(blobID) if err != nil { return nil, err } // We have this conditional because once upon a time we used to store // the root keys on disk as well; now we just get them from the root // metadata file. We ignore the keys on disk in the old policy states. if strings.HasPrefix(name, metadataTreeEntryName+"/") { env := &sslibdsse.Envelope{} if err := json.Unmarshal(contents, env); err != nil { return nil, err } metadataName := strings.TrimPrefix(name, metadataTreeEntryName+"/") switch metadataName { case fmt.Sprintf("%s.json", RootRoleName): state.RootEnvelope = env case fmt.Sprintf("%s.json", TargetsRoleName): state.TargetsEnvelope = env default: if state.DelegationEnvelopes == nil { state.DelegationEnvelopes = map[string]*sslibdsse.Envelope{} } state.DelegationEnvelopes[strings.TrimSuffix(metadataName, ".json")] = env } } } if err := state.preprocess(); err != nil { return nil, err } rootMetadata, err := state.GetRootMetadata(false) if err != nil { return nil, err } rootPrincipals, err := rootMetadata.GetRootPrincipals() if err != nil { return nil, err } state.RootPublicKeys = rootPrincipals state.githubAppApprovalsTrusted = rootMetadata.IsGitHubAppApprovalTrusted() githubAppPrincipals, err := rootMetadata.GetGitHubAppPrincipals() if err == nil { state.githubAppKeys = githubAppPrincipals state.githubAppRoleName = tuf.GitHubAppRoleName } else if state.githubAppApprovalsTrusted { return nil, tuf.ErrGitHubAppInformationNotFoundInRoot } return state, nil } func verifyRootKeysMatch(keys1, keys2 []tuf.Principal) bool { if len(keys1) != len(keys2) { return false } sort.Slice(keys1, func(i, j int) bool { return keys1[i].ID() < keys1[j].ID() }) sort.Slice(keys2, func(i, j int) bool { return keys2[i].ID() < keys2[j].ID() }) return reflect.DeepEqual(keys1, keys2) } gittuf-0.9.0/internal/policy/policy_test.go000066400000000000000000000430721475150141000210260ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "fmt" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestLoadState(t *testing.T) { t.Run("loading while verifying multiple states", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithPolicy) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } loadedState, err := LoadState(context.Background(), repo, entry.(*rsl.ReferenceEntry)) if err != nil { t.Error(err) } assertStatesEqual(t, state, loadedState) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("test-rule-1", []string{key.KeyID}, []string{"test-rule-1"}, 1); err != nil { t.Fatal(err) } state.ruleNames.Add("test-rule-1") env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(context.Background(), env, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = env if err := state.Commit(repo, "", false); err != nil { t.Fatal(err) } if err := Apply(context.Background(), repo, false); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("test-rule-2", []string{key.KeyID}, []string{"test-rule-2"}, 1); err != nil { t.Fatal(err) } state.ruleNames.Add("test-rule-2") env, err = dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(context.Background(), env, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = env if err := state.Commit(repo, "", false); err != nil { t.Fatal(err) } if err := Apply(context.Background(), repo, false); err != nil { t.Fatal(err) } entry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } loadedState, err = LoadState(context.Background(), repo, entry.(*rsl.ReferenceEntry)) if err != nil { t.Error(err) } assertStatesEqual(t, state, loadedState) }) t.Run("fail loading while verifying multiple states, bad sig", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithPolicy) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } loadedState, err := LoadState(context.Background(), repo, entry.(*rsl.ReferenceEntry)) if err != nil { t.Error(err) } assertStatesEqual(t, state, loadedState) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("test-rule-1", []string{key.KeyID}, []string{"test-rule-1"}, 1); err != nil { t.Fatal(err) } state.ruleNames.Add("test-rule-1") env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(context.Background(), env, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = env if err := state.Commit(repo, "", false); err != nil { t.Fatal(err) } if err := Apply(context.Background(), repo, false); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("test-rule-2", []string{key.KeyID}, []string{"test-rule-2"}, 1); err != nil { t.Fatal(err) } state.ruleNames.Add("test-rule-2") env, err = dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } badSigner := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err = dsse.SignEnvelope(context.Background(), env, badSigner) if err != nil { t.Fatal(err) } state.TargetsEnvelope = env if err := state.Commit(repo, "", false); err != nil { t.Fatal(err) } policyStagingRefTip, err := repo.GetReference(PolicyStagingRef) if err != nil { t.Fatal(err) } if err := repo.SetReference(PolicyRef, policyStagingRefTip); err != nil { t.Fatal(err) } if err := rsl.NewReferenceEntry(PolicyRef, policyStagingRefTip).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } _, err = LoadState(context.Background(), repo, entry.(*rsl.ReferenceEntry)) assert.ErrorIs(t, err, ErrVerifierConditionsUnmet) }) } func TestLoadCurrentState(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithOnlyRoot) loadedState, err := LoadCurrentState(context.Background(), repo, PolicyRef) if err != nil { t.Error(err) } assertStatesEqual(t, state, loadedState) } func TestLoadFirstState(t *testing.T) { repo, firstState := createTestRepository(t, createTestStateWithPolicy) // Update policy, record in RSL secondState, err := LoadCurrentState(context.Background(), repo, PolicyRef) // secondState := state will modify state as well if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) targetsMetadata, err := secondState.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("new-rule", []string{key.KeyID}, []string{"*"}, 1); err != nil { // just a dummy rule t.Fatal(err) } targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } secondState.TargetsEnvelope = targetsEnv if err := secondState.Commit(repo, "Second state", false); err != nil { t.Fatal(err) } loadedState, err := LoadFirstState(context.Background(), repo) if err != nil { t.Error(err) } assert.Equal(t, firstState, loadedState) } func TestLoadStateForEntry(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithOnlyRoot) entry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo, rsl.ForReference(PolicyRef)) if err != nil { t.Fatal(err) } loadedState, err := loadStateForEntry(repo, entry) if err != nil { t.Error(err) } assertStatesEqual(t, state, loadedState) } func TestStateVerify(t *testing.T) { t.Parallel() t.Run("only root", func(t *testing.T) { t.Parallel() state := createTestStateWithOnlyRoot(t) err := state.Verify(testCtx) assert.Nil(t, err) }) t.Run("only root, remove root keys", func(t *testing.T) { t.Parallel() state := createTestStateWithOnlyRoot(t) state.RootPublicKeys = nil err := state.Verify(testCtx) assert.ErrorIs(t, err, ErrUnableToMatchRootKeys) }) t.Run("with policy", func(t *testing.T) { t.Parallel() state := createTestStateWithPolicy(t) err := state.Verify(testCtx) assert.Nil(t, err) }) t.Run("with delegated policy", func(t *testing.T) { t.Parallel() state := createTestStateWithDelegatedPolicies(t) err := state.Verify(testCtx) assert.Nil(t, err) }) } func TestStateCommit(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) // Commit and Apply are called by the helper policyTip, err := repo.GetReference(PolicyRef) if err != nil { t.Fatal(err) } tmpEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } entry := tmpEntry.(*rsl.ReferenceEntry) assert.Equal(t, entry.TargetID, policyTip) } func TestStateGetRootMetadata(t *testing.T) { t.Parallel() state := createTestStateWithOnlyRoot(t) rootMetadata, err := state.GetRootMetadata(true) assert.Nil(t, err) rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.Nil(t, err) assert.Equal(t, "SHA256:ESJezAOo+BsiEpddzRXS6+wtF16FID4NCd+3gj96rFo", rootPrincipals[0].ID()) } func TestStateFindVerifiersForPath(t *testing.T) { t.Parallel() t.Run("with delegated policy", func(t *testing.T) { t.Parallel() state := createTestStateWithDelegatedPolicies(t) // changed from createTestStateWithPolicies to increase test // coverage to cover s.DelegationEnvelopes in PublicKeys() keyR := ssh.NewKeyFromBytes(t, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(keyR) tests := map[string]struct { path string verifiers []*SignatureVerifier }{ "verifiers for files 1": { path: "file:1/*", verifiers: []*SignatureVerifier{{ name: "1", principals: []tuf.Principal{key}, threshold: 1, }}, }, "verifiers for files": { path: "file:2/*", verifiers: []*SignatureVerifier{{ name: "2", principals: []tuf.Principal{key}, threshold: 1, }}, }, "verifiers for unprotected branch": { path: "git:refs/heads/unprotected", verifiers: []*SignatureVerifier{}, }, "verifiers for unprotected files": { path: "file:unprotected", verifiers: []*SignatureVerifier{}, }, } for name, test := range tests { verifiers, err := state.FindVerifiersForPath(test.path) assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) assert.Equal(t, test.verifiers, verifiers, fmt.Sprintf("policy verifiers for path '%s' don't match expected verifiers in test '%s'", test.path, name)) } }) t.Run("without policy", func(t *testing.T) { t.Parallel() state := createTestStateWithOnlyRoot(t) verifiers, err := state.FindVerifiersForPath("test-path") assert.Nil(t, verifiers) assert.ErrorIs(t, err, ErrMetadataNotFound) }) } func TestGetStateForCommit(t *testing.T) { t.Parallel() repo, firstState := createTestRepository(t, createTestStateWithPolicy) // Create some commits refName := "refs/heads/main" treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeHash, refName, "Initial commit", false) if err != nil { t.Fatal(err) } // No RSL entry for commit => no state yet state, err := GetStateForCommit(context.Background(), repo, commitID) assert.Nil(t, err) assert.Nil(t, state) // Record RSL entry for commit if err := rsl.NewReferenceEntry(refName, commitID).Commit(repo, false); err != nil { t.Fatal(err) } state, err = GetStateForCommit(context.Background(), repo, commitID) assert.Nil(t, err) assertStatesEqual(t, firstState, state) // Create new branch, record new commit there anotherRefName := "refs/heads/feature" if err := repo.SetReference(anotherRefName, commitID); err != nil { t.Fatal(err) } newCommitID, err := repo.Commit(emptyTreeHash, anotherRefName, "Second commit", false) if err != nil { t.Fatal(err) } if err := rsl.NewReferenceEntry(anotherRefName, newCommitID).Commit(repo, false); err != nil { t.Fatal(err) } state, err = GetStateForCommit(context.Background(), repo, newCommitID) assert.Nil(t, err) assertStatesEqual(t, firstState, state) // Update policy, record in RSL secondState, err := LoadCurrentState(context.Background(), repo, PolicyRef) // secondState := firstState will modify firstState as well if err != nil { t.Fatal(err) } targetsMetadata, err := secondState.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } keyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } key := tufv01.NewKeyFromSSLibKey(keyR) if err := targetsMetadata.AddRule("new-rule", []string{key.KeyID}, []string{"*"}, 1); err != nil { // just a dummy rule t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) targetsEnv, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } targetsEnv, err = dsse.SignEnvelope(context.Background(), targetsEnv, signer) if err != nil { t.Fatal(err) } secondState.TargetsEnvelope = targetsEnv if err := secondState.Commit(repo, "Second state", false); err != nil { t.Fatal(err) } if err := Apply(context.Background(), repo, false); err != nil { t.Fatal(err) } // Merge feature branch commit into main if err := repo.CheckAndSetReference(refName, newCommitID, commitID); err != nil { t.Fatal(err) } // Record in RSL if err := rsl.NewReferenceEntry(refName, newCommitID).Commit(repo, false); err != nil { t.Fatal(err) } // Check that for this commit ID, the first state is returned and not the // second state, err = GetStateForCommit(context.Background(), repo, newCommitID) assert.Nil(t, err) assertStatesEqual(t, firstState, state) } func TestStateHasFileRule(t *testing.T) { t.Parallel() t.Run("with file rules", func(t *testing.T) { state := createTestStateWithDelegatedPolicies(t) hasFileRule := state.hasFileRule assert.True(t, hasFileRule) }) t.Run("with no file rules", func(t *testing.T) { t.Parallel() state := createTestStateWithOnlyRoot(t) hasFileRule := state.hasFileRule assert.False(t, hasFileRule) }) } func TestApply(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithOnlyRoot) key := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) rootMetadata, err := state.GetRootMetadata(false) if err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key); err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(context.Background(), rootEnv, signer) if err != nil { t.Fatal(err) } state.RootEnvelope = rootEnv if err := state.Commit(repo, "Added target key to root", false); err != nil { t.Fatal(err) } staging, err := LoadCurrentState(testCtx, repo, PolicyStagingRef) if err != nil { t.Fatal(err) } policy, err := LoadCurrentState(testCtx, repo, PolicyRef) if err != nil { t.Fatal(err) } // Currently the policy ref is behind the staging ref, since the staging ref currently has an extra target key assertStatesNotEqual(t, staging, policy) err = Apply(testCtx, repo, false) assert.Nil(t, err) staging, err = LoadCurrentState(testCtx, repo, PolicyStagingRef) if err != nil { t.Fatal(err) } policy, err = LoadCurrentState(testCtx, repo, PolicyRef) if err != nil { t.Fatal(err) } // After Apply, the policy ref was fast-forward merged with the staging ref assertStatesEqual(t, staging, policy) } func TestDiscard(t *testing.T) { t.Parallel() t.Run("discard changes when policy ref exists", func(t *testing.T) { t.Parallel() repo, state := createTestRepository(t, createTestStateWithPolicy) signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) targetsMetadata, err := state.GetTargetsMetadata(TargetsRoleName, false) if err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } if err := targetsMetadata.AddRule("test-rule", []string{key.KeyID}, []string{"test-rule"}, 1); err != nil { t.Fatal(err) } env, err := dsse.CreateEnvelope(targetsMetadata) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(context.Background(), env, signer) if err != nil { t.Fatal(err) } state.TargetsEnvelope = env if err := state.Commit(repo, "", false); err != nil { t.Fatal(err) } policyTip, err := repo.GetReference(PolicyRef) if err != nil { t.Fatal(err) } stagingTip, err := repo.GetReference(PolicyStagingRef) if err != nil { t.Fatal(err) } assert.NotEqual(t, policyTip, stagingTip) err = Discard(repo) assert.Nil(t, err) policyTip, err = repo.GetReference(PolicyRef) if err != nil { t.Fatal(err) } stagingTip, err = repo.GetReference(PolicyStagingRef) if err != nil { t.Fatal(err) } assert.Equal(t, policyTip, stagingTip) }) t.Run("discard changes when policy ref does not exist", func(t *testing.T) { t.Parallel() tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } commitID, err := repo.Commit(emptyTreeHash, PolicyStagingRef, "test commit", false) if err != nil { t.Fatal(err) } stagingTip, err := repo.GetReference(PolicyStagingRef) if err != nil { t.Fatal(err) } assert.Equal(t, commitID, stagingTip) err = Discard(repo) assert.Nil(t, err) _, err = repo.GetReference(PolicyStagingRef) assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) }) } func assertStatesEqual(t *testing.T, stateA, stateB *State) { t.Helper() assert.Equal(t, stateA.RootEnvelope, stateB.RootEnvelope) assert.Equal(t, stateA.TargetsEnvelope, stateB.TargetsEnvelope) assert.Equal(t, stateA.DelegationEnvelopes, stateB.DelegationEnvelopes) assert.Equal(t, stateA.RootPublicKeys, stateB.RootPublicKeys) } func assertStatesNotEqual(t *testing.T, stateA, stateB *State) { t.Helper() // at least one of these has to be different assert.True(t, assert.NotEqual(t, stateA.RootEnvelope, stateB.RootEnvelope) || assert.NotEqual(t, stateA.TargetsEnvelope, stateB.TargetsEnvelope) || assert.NotEqual(t, stateA.DelegationEnvelopes, stateB.DelegationEnvelopes) || assert.NotEqual(t, stateA.RootPublicKeys, stateB.RootPublicKeys)) } gittuf-0.9.0/internal/policy/root.go000066400000000000000000000016121475150141000174450ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "time" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" ) // InitializeRootMetadata initializes a new instance of tuf.RootMetadata with // default values and a given key. The default values are version set to 1, // expiry date set to one year from now, and the provided key is added. func InitializeRootMetadata(key tuf.Principal) (tuf.RootMetadata, error) { var rootMetadata tuf.RootMetadata if tufv02.AllowV02Metadata() { rootMetadata = tufv02.NewRootMetadata() } else { rootMetadata = tufv01.NewRootMetadata() } rootMetadata.SetExpires(time.Now().AddDate(1, 0, 0).Format(time.RFC3339)) if err := rootMetadata.AddRootPrincipal(key); err != nil { return nil, err } return rootMetadata, nil } gittuf-0.9.0/internal/policy/root_test.go000066400000000000000000000014731475150141000205110ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "testing" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestInitializeRootMetadata(t *testing.T) { key := tufv01.NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) rootMetadata, err := InitializeRootMetadata(key) assert.Nil(t, err) allPrincipals := rootMetadata.GetPrincipals() assert.Equal(t, key, allPrincipals[key.KeyID]) threshold, err := rootMetadata.GetRootThreshold() assert.Nil(t, err) assert.Equal(t, 1, threshold) rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.Nil(t, err) assert.Equal(t, []tuf.Principal{key}, rootPrincipals) } gittuf-0.9.0/internal/policy/searcher.go000066400000000000000000000274061475150141000202670ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "errors" "fmt" "log/slog" "github.com/gittuf/gittuf/internal/attestations" "github.com/gittuf/gittuf/internal/cache" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" ) // searcher defines the interface for finding policy and attestation entries in // the RSL. type searcher interface { FindFirstPolicyEntry() (rsl.ReferenceUpdaterEntry, error) FindLatestPolicyEntry() (rsl.ReferenceUpdaterEntry, error) FindPolicyEntryFor(rsl.Entry) (rsl.ReferenceUpdaterEntry, error) FindPolicyEntriesInRange(rsl.Entry, rsl.Entry) ([]rsl.ReferenceUpdaterEntry, error) FindAttestationsEntryFor(rsl.Entry) (rsl.ReferenceUpdaterEntry, error) FindLatestAttestationsEntry() (rsl.ReferenceUpdaterEntry, error) } func newSearcher(repo *gitinterface.Repository) searcher { persistentCache, err := cache.LoadPersistentCache(repo) if err == nil { slog.Debug("Persistent cache found, loading cache RSL searcher...") return newCacheSearcher(repo, persistentCache) } slog.Debug("Persistent cache not found, using regular RSL searcher...") return newRegularSearcher(repo) } // regularSearcher implements the searcher interface. It walks back the RSL from // to identify the requested policy or attestation entries. type regularSearcher struct { repo *gitinterface.Repository } // FindFirstPolicyEntry identifies the very first policy entry in the RSL. func (r *regularSearcher) FindFirstPolicyEntry() (rsl.ReferenceUpdaterEntry, error) { entry, _, err := rsl.GetFirstReferenceUpdaterEntryForRef(r.repo, PolicyRef) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { // we don't have a policy entry yet return nil, ErrPolicyNotFound } return nil, err } return entry, nil } // FindLatestPolicyEntry returns the latest policy entry in the RSL. func (r *regularSearcher) FindLatestPolicyEntry() (rsl.ReferenceUpdaterEntry, error) { entry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.repo, rsl.ForReference(PolicyRef)) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { // we don't have a policy entry return nil, ErrPolicyNotFound } return nil, err } return entry, nil } // FindPolicyEntryFor identifies the latest policy entry for the specified // entry. func (r *regularSearcher) FindPolicyEntryFor(entry rsl.Entry) (rsl.ReferenceUpdaterEntry, error) { // If the requested entry itself is for the policy ref, return as is if entry, isReferenceUpdaterEntry := entry.(rsl.ReferenceUpdaterEntry); isReferenceUpdaterEntry && entry.GetRefName() == PolicyRef { slog.Debug(fmt.Sprintf("Initial entry '%s' is for gittuf policy, setting that as current policy...", entry.GetID().String())) return entry, nil } policyEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.repo, rsl.ForReference(PolicyRef), rsl.BeforeEntryID(entry.GetID())) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { slog.Debug(fmt.Sprintf("No policy found before initial entry '%s'", entry.GetID().String())) return nil, ErrPolicyNotFound } // Any other err must be returned return nil, err } return policyEntry, nil } // FindPolicyEntriesInRange returns all policy RSL entries in the specified // range. firstEntry and lastEntry are included if they are for the policy ref. func (r *regularSearcher) FindPolicyEntriesInRange(firstEntry, lastEntry rsl.Entry) ([]rsl.ReferenceUpdaterEntry, error) { allPolicyEntries, _, err := rsl.GetReferenceUpdaterEntriesInRangeForRef(r.repo, firstEntry.GetID(), lastEntry.GetID(), PolicyRef) if err != nil { return nil, err } return allPolicyEntries, nil } // FindAttestationsEntryFor identifies the latest attestations entry for the // specified entry. func (r *regularSearcher) FindAttestationsEntryFor(entry rsl.Entry) (rsl.ReferenceUpdaterEntry, error) { // If the requested entry itself is for the attestations ref, return as is if entry, isReferenceUpdaterEntry := entry.(rsl.ReferenceUpdaterEntry); isReferenceUpdaterEntry && entry.GetRefName() == attestations.Ref { slog.Debug(fmt.Sprintf("Initial entry '%s' is for attestations, setting that as current set of attestations...", entry.GetID().String())) return entry, nil } attestationsEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.repo, rsl.ForReference(attestations.Ref), rsl.BeforeEntryID(entry.GetID())) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { // Attestations may not be used yet, they're not // compulsory slog.Debug(fmt.Sprintf("No attestations found before initial entry '%s'", entry.GetID().String())) return nil, attestations.ErrAttestationsNotFound } return nil, err } return attestationsEntry, nil } // FindLatestAttestationsEntry returns the latest RSL entry for the attestations // reference. func (r *regularSearcher) FindLatestAttestationsEntry() (rsl.ReferenceUpdaterEntry, error) { entry, _, err := rsl.GetLatestReferenceUpdaterEntry(r.repo, rsl.ForReference(attestations.Ref)) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { // we don't have an attestations entry return nil, attestations.ErrAttestationsNotFound } return nil, err } return entry, nil } func newRegularSearcher(repo *gitinterface.Repository) *regularSearcher { return ®ularSearcher{repo: repo} } // cacheSearcher implements the searcher interface. It checks the persistent // cache for results before falling back to the regular searcher if the // persistent cache yields no results. type cacheSearcher struct { repo *gitinterface.Repository persistentCache *cache.Persistent searcher *regularSearcher } // FindFirstPolicyEntry identifies the very first policy entry in the RSL. func (c *cacheSearcher) FindFirstPolicyEntry() (rsl.ReferenceUpdaterEntry, error) { if c.persistentCache == nil { return c.searcher.FindFirstPolicyEntry() } policyEntries := c.persistentCache.GetPolicyEntries() if len(policyEntries) == 0 { return nil, ErrPolicyNotFound } entry, err := loadRSLReferenceUpdaterEntry(c.repo, policyEntries[0].GetEntryID()) if err != nil { return c.searcher.FindFirstPolicyEntry() } return entry, nil } func (c *cacheSearcher) FindLatestPolicyEntry() (rsl.ReferenceUpdaterEntry, error) { if c.persistentCache == nil { return c.searcher.FindLatestPolicyEntry() } policyEntries := c.persistentCache.GetPolicyEntries() if len(policyEntries) == 0 { return nil, ErrPolicyNotFound } entry, err := loadRSLReferenceUpdaterEntry(c.repo, policyEntries[len(policyEntries)-1].GetEntryID()) if err != nil { return c.searcher.FindLatestPolicyEntry() } return entry, nil } // FindPolicyEntryFor identifies the latest policy entry for the specified // entry. func (c *cacheSearcher) FindPolicyEntryFor(entry rsl.Entry) (rsl.ReferenceUpdaterEntry, error) { if c.persistentCache == nil { slog.Debug("No persistent cache found, falling back to regular searcher...") return c.searcher.FindPolicyEntryFor(entry) } if entry.GetNumber() == 0 { // no number is set slog.Debug("Entry is not numbered, falling back to regular searcher...") return c.searcher.FindPolicyEntryFor(entry) } if entry, isReferenceUpdaterEntry := entry.(rsl.ReferenceUpdaterEntry); isReferenceUpdaterEntry && entry.GetRefName() == PolicyRef { slog.Debug("Requested entry is a policy entry, inserting into cache...") c.persistentCache.InsertPolicyEntryNumber(entry.GetNumber(), entry.GetID()) return entry, nil } policyEntryIndex := c.persistentCache.FindPolicyEntryNumberForEntry(entry.GetNumber()) if policyEntryIndex.GetEntryNumber() == 0 { return nil, ErrPolicyNotFound } policyEntry, err := loadRSLReferenceUpdaterEntry(c.repo, policyEntryIndex.GetEntryID()) if err != nil { return c.searcher.FindPolicyEntryFor(entry) } return policyEntry, nil } // FindPolicyEntriesInRange returns all policy RSL entries in the specified // range. firstEntry and lastEntry are included if they are for the policy ref. func (c *cacheSearcher) FindPolicyEntriesInRange(firstEntry, lastEntry rsl.Entry) ([]rsl.ReferenceUpdaterEntry, error) { if c.persistentCache == nil { return c.searcher.FindPolicyEntriesInRange(firstEntry, lastEntry) } if lastEntry.GetNumber() == 0 || firstEntry.GetNumber() == 0 { // first or last entry doesn't have a number return c.searcher.FindPolicyEntriesInRange(firstEntry, lastEntry) } if firstEntry, isReferenceUpdaterEntry := firstEntry.(rsl.ReferenceUpdaterEntry); isReferenceUpdaterEntry && firstEntry.GetRefName() == PolicyRef { slog.Debug("Requested first entry is a policy entry, inserting into cache...") c.persistentCache.InsertPolicyEntryNumber(firstEntry.GetNumber(), firstEntry.GetID()) } if lastEntry, isReferenceUpdaterEntry := lastEntry.(rsl.ReferenceUpdaterEntry); isReferenceUpdaterEntry && lastEntry.GetRefName() == PolicyRef { slog.Debug("Requested last entry is a policy entry, inserting into cache...") c.persistentCache.InsertPolicyEntryNumber(lastEntry.GetNumber(), lastEntry.GetID()) } policyIndices, err := c.persistentCache.FindPolicyEntriesInRange(firstEntry.GetNumber(), lastEntry.GetNumber()) if err != nil { return c.searcher.FindPolicyEntriesInRange(firstEntry, lastEntry) } entries := []rsl.ReferenceUpdaterEntry{} for _, index := range policyIndices { entry, err := loadRSLReferenceUpdaterEntry(c.repo, index.GetEntryID()) if err != nil { return c.searcher.FindPolicyEntriesInRange(firstEntry, lastEntry) } entries = append(entries, entry) } return entries, nil } // FindAttestationsEntryFor identifies the latest attestations entry for the // specified entry. func (c *cacheSearcher) FindAttestationsEntryFor(entry rsl.Entry) (rsl.ReferenceUpdaterEntry, error) { if c.persistentCache == nil { slog.Debug("No persistent cache found, falling back to regular searcher...") return c.searcher.FindAttestationsEntryFor(entry) } if entry.GetNumber() == 0 { // no number is set slog.Debug("Entry is not numbered, falling back to regular searcher...") return c.searcher.FindAttestationsEntryFor(entry) } if entry, isReferenceUpdaterEntry := entry.(rsl.ReferenceUpdaterEntry); isReferenceUpdaterEntry && entry.GetRefName() == attestations.Ref { slog.Debug("Requested entry is an attestations entry, inserting into cache...") c.persistentCache.InsertAttestationEntryNumber(entry.GetNumber(), entry.GetID()) return entry, nil } attestationsEntryIndex, _ := c.persistentCache.FindAttestationsEntryNumberForEntry(entry.GetNumber()) if attestationsEntryIndex.GetEntryNumber() == 0 { return nil, attestations.ErrAttestationsNotFound } attestationsEntry, err := loadRSLReferenceUpdaterEntry(c.repo, attestationsEntryIndex.GetEntryID()) if err != nil { return c.searcher.FindAttestationsEntryFor(entry) } return attestationsEntry, nil } func (c *cacheSearcher) FindLatestAttestationsEntry() (rsl.ReferenceUpdaterEntry, error) { if c.persistentCache == nil { return c.searcher.FindLatestAttestationsEntry() } attestationsEntries := c.persistentCache.GetAttestationsEntries() if len(attestationsEntries) == 0 { return nil, attestations.ErrAttestationsNotFound } entry, err := loadRSLReferenceUpdaterEntry(c.repo, attestationsEntries[len(attestationsEntries)-1].GetEntryID()) if err != nil { return c.searcher.FindLatestAttestationsEntry() } return entry, nil } func newCacheSearcher(repo *gitinterface.Repository, persistentCache *cache.Persistent) *cacheSearcher { return &cacheSearcher{ repo: repo, persistentCache: persistentCache, searcher: newRegularSearcher(repo), } } func loadRSLReferenceUpdaterEntry(repo *gitinterface.Repository, entryID gitinterface.Hash) (rsl.ReferenceUpdaterEntry, error) { entryT, err := rsl.GetEntry(repo, entryID) if err != nil { return nil, err } entry, isReferenceEntry := entryT.(*rsl.ReferenceEntry) if !isReferenceEntry { return nil, fmt.Errorf("not reference entry") } return entry, nil } gittuf-0.9.0/internal/policy/searcher_test.go000066400000000000000000000320371475150141000213220ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "testing" "github.com/gittuf/gittuf/internal/attestations" "github.com/gittuf/gittuf/internal/cache" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/stretchr/testify/assert" ) func TestRegularSearcher(t *testing.T) { t.Run("policy exists", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) expectedPolicyEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newRegularSearcher(repo) policyEntry, err := searcher.FindPolicyEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntry.GetID(), policyEntry.GetID()) // Try with annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "Annotation\n").Commit(repo, false); err != nil { t.Fatal(err) } policyEntry, err = searcher.FindPolicyEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntry.GetID(), policyEntry.GetID()) // Requested entry is policy entry if err := rsl.NewReferenceEntry(PolicyRef, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } policyEntry, err = searcher.FindPolicyEntryFor(entry) assert.Nil(t, err) assert.Equal(t, entry.GetID(), policyEntry.GetID()) }) t.Run("policy does not exist", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newRegularSearcher(repo) policyEntry, err := searcher.FindPolicyEntryFor(entry) assert.ErrorIs(t, err, ErrPolicyNotFound) assert.Nil(t, policyEntry) }) t.Run("first policy", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) expectedPolicyEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newRegularSearcher(repo) policyEntry, err := searcher.FindFirstPolicyEntry() assert.Nil(t, err) assert.Equal(t, expectedPolicyEntry.GetID(), policyEntry.GetID()) }) t.Run("policies in range", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) latestEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } firstEntry := latestEntry expectedPolicyEntries := []rsl.ReferenceUpdaterEntry{latestEntry.(*rsl.ReferenceEntry)} searcher := newRegularSearcher(repo) policyEntries, err := searcher.FindPolicyEntriesInRange(firstEntry, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntries, policyEntries) if err := rsl.NewReferenceEntry(PolicyRef, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedPolicyEntries = append(expectedPolicyEntries, latestEntry.(*rsl.ReferenceEntry)) policyEntries, err = searcher.FindPolicyEntriesInRange(firstEntry, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntries, policyEntries) if err := rsl.NewReferenceEntry(PolicyStagingRef, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } // expectedPolicyEntries does not change in this instance policyEntries, err = searcher.FindPolicyEntriesInRange(firstEntry, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntries, policyEntries) }) t.Run("attestations exist", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) currentAttestations := &attestations.Attestations{} if err := currentAttestations.Commit(repo, "Initial attestations\n", false); err != nil { t.Fatal(err) } expectedAttestationsEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newRegularSearcher(repo) attestationsEntry, err := searcher.FindAttestationsEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedAttestationsEntry.GetID(), attestationsEntry.GetID()) // Try with annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "Annotation\n").Commit(repo, false); err != nil { t.Fatal(err) } attestationsEntry, err = searcher.FindAttestationsEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedAttestationsEntry.GetID(), attestationsEntry.GetID()) // Requested entry is attestations entry if err := rsl.NewReferenceEntry(attestations.Ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } attestationsEntry, err = searcher.FindAttestationsEntryFor(entry) assert.Nil(t, err) assert.Equal(t, entry.GetID(), attestationsEntry.GetID()) }) t.Run("attestations do not exist", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newRegularSearcher(repo) attestationsEntry, err := searcher.FindAttestationsEntryFor(entry) assert.ErrorIs(t, err, attestations.ErrAttestationsNotFound) assert.Nil(t, attestationsEntry) }) } func TestCacheSearcher(t *testing.T) { t.Run("policy exists", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) if err := cache.PopulatePersistentCache(repo); err != nil { t.Fatal(err) } persistentCache, err := cache.LoadPersistentCache(repo) if err != nil { t.Fatal(err) } expectedPolicyEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newCacheSearcher(repo, persistentCache) policyEntry, err := searcher.FindPolicyEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntry.GetID(), policyEntry.GetID()) // Try with annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "Annotation\n").Commit(repo, false); err != nil { t.Fatal(err) } policyEntry, err = searcher.FindPolicyEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntry.GetID(), policyEntry.GetID()) // Requested entry is policy entry if err := rsl.NewReferenceEntry(PolicyRef, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } policyEntry, err = searcher.FindPolicyEntryFor(entry) assert.Nil(t, err) assert.Equal(t, entry.GetID(), policyEntry.GetID()) }) t.Run("policy does not exist", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } if err := cache.PopulatePersistentCache(repo); err != nil { t.Fatal(err) } persistentCache, err := cache.LoadPersistentCache(repo) if err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newCacheSearcher(repo, persistentCache) policyEntry, err := searcher.FindPolicyEntryFor(entry) assert.ErrorIs(t, err, ErrPolicyNotFound) assert.Nil(t, policyEntry) }) t.Run("first policy", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) if err := cache.PopulatePersistentCache(repo); err != nil { t.Fatal(err) } persistentCache, err := cache.LoadPersistentCache(repo) if err != nil { t.Fatal(err) } expectedPolicyEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } searcher := newCacheSearcher(repo, persistentCache) policyEntry, err := searcher.FindFirstPolicyEntry() assert.Nil(t, err) assert.Equal(t, expectedPolicyEntry.GetID(), policyEntry.GetID()) }) t.Run("policies in range", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithOnlyRoot) if err := cache.PopulatePersistentCache(repo); err != nil { t.Fatal(err) } persistentCache, err := cache.LoadPersistentCache(repo) if err != nil { t.Fatal(err) } latestEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } firstEntry := latestEntry expectedPolicyEntries := []rsl.ReferenceUpdaterEntry{latestEntry.(*rsl.ReferenceEntry)} searcher := newCacheSearcher(repo, persistentCache) policyEntries, err := searcher.FindPolicyEntriesInRange(firstEntry, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntries, policyEntries) if err := rsl.NewReferenceEntry(PolicyRef, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedPolicyEntries = append(expectedPolicyEntries, latestEntry.(*rsl.ReferenceEntry)) policyEntries, err = searcher.FindPolicyEntriesInRange(firstEntry, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntries, policyEntries) if err := rsl.NewReferenceEntry(PolicyStagingRef, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } // expectedPolicyEntries does not change in this instance policyEntries, err = searcher.FindPolicyEntriesInRange(firstEntry, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedPolicyEntries, policyEntries) }) t.Run("attestations exist", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) currentAttestations := &attestations.Attestations{} if err := currentAttestations.Commit(repo, "Initial attestations\n", false); err != nil { t.Fatal(err) } expectedAttestationsEntry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } if err := cache.PopulatePersistentCache(repo); err != nil { t.Fatal(err) } persistentCache, err := cache.LoadPersistentCache(repo) if err != nil { t.Fatal(err) } searcher := newCacheSearcher(repo, persistentCache) attestationsEntry, err := searcher.FindAttestationsEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedAttestationsEntry.GetID(), attestationsEntry.GetID()) // Try with annotation if err := rsl.NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "Annotation\n").Commit(repo, false); err != nil { t.Fatal(err) } attestationsEntry, err = searcher.FindAttestationsEntryFor(entry) assert.Nil(t, err) assert.Equal(t, expectedAttestationsEntry.GetID(), attestationsEntry.GetID()) // Requested entry is annotations entry if err := rsl.NewReferenceEntry(attestations.Ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } attestationsEntry, err = searcher.FindAttestationsEntryFor(entry) assert.Nil(t, err) assert.Equal(t, entry.GetID(), attestationsEntry.GetID()) }) t.Run("attestations do not exist", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) // Add an entry after if err := rsl.NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := rsl.GetLatestEntry(repo) if err != nil { t.Fatal(err) } if err := cache.PopulatePersistentCache(repo); err != nil { t.Fatal(err) } persistentCache, err := cache.LoadPersistentCache(repo) if err != nil { t.Fatal(err) } searcher := newCacheSearcher(repo, persistentCache) attestationsEntry, err := searcher.FindAttestationsEntryFor(entry) assert.ErrorIs(t, err, attestations.ErrAttestationsNotFound) assert.Nil(t, attestationsEntry) }) } gittuf-0.9.0/internal/policy/signature.go000066400000000000000000000215171475150141000204710ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "errors" "fmt" "log/slog" "strings" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/common" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/gpg" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" sigstoreverifieropts "github.com/gittuf/gittuf/internal/signerverifier/sigstore/options/verifier" "github.com/gittuf/gittuf/internal/signerverifier/ssh" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) type SignatureVerifier struct { repository *gitinterface.Repository name string principals []tuf.Principal threshold int verifyExhaustively bool // verifyExhaustively checks all possible signatures and returns all matched principals, even if threshold is already met } func (v *SignatureVerifier) Name() string { return v.name } func (v *SignatureVerifier) Threshold() int { return v.threshold } func (v *SignatureVerifier) TrustedPrincipalIDs() *set.Set[string] { principalIDs := set.NewSet[string]() for _, principal := range v.principals { principalIDs.Add(principal.ID()) } return principalIDs } // Verify is used to check for a threshold of signatures using the verifier. The // threshold of signatures may be met using a combination of at most one Git // signature and signatures embedded in a DSSE envelope. Verify does not inspect // the envelope's payload, but instead only verifies the signatures. The caller // must ensure the validity of the envelope's contents. func (v *SignatureVerifier) Verify(ctx context.Context, gitObjectID gitinterface.Hash, env *sslibdsse.Envelope) (*set.Set[string], error) { if v.threshold < 1 || len(v.principals) < 1 { return nil, ErrInvalidVerifier } // usedPrincipalIDs is ultimately returned to track the set of principals // who have been authenticated usedPrincipalIDs := set.NewSet[string]() // usedKeyIDs is tracked to ensure a key isn't duplicated between two // principals, allowing two principals to meet a threshold using the same // key usedKeyIDs := set.NewSet[string]() // gitObjectVerified is set to true if the gitObjectID's signature is // verified gitObjectVerified := false // First, verify the gitObject's signature if one is presented if gitObjectID != nil && !gitObjectID.IsZero() { slog.Debug(fmt.Sprintf("Verifying signature of Git object with ID '%s'...", gitObjectID.String())) for _, principal := range v.principals { // there are multiple keys we must try keys := principal.Keys() for _, key := range keys { err := v.repository.VerifySignature(ctx, gitObjectID, key) if err == nil { // Signature verification succeeded slog.Debug(fmt.Sprintf("Public key '%s' belonging to principal '%s' successfully used to verify signature of Git object '%s', counting '%s' towards threshold...", key.KeyID, principal.ID(), gitObjectID.String(), principal.ID())) usedPrincipalIDs.Add(principal.ID()) usedKeyIDs.Add(key.KeyID) gitObjectVerified = true // No need to try the other keys for this principal, break break } if errors.Is(err, gitinterface.ErrUnknownSigningMethod) { // TODO: this should be removed once we have unified signing // methods across metadata and git signatures continue } if !errors.Is(err, gitinterface.ErrIncorrectVerificationKey) { return nil, err } } if gitObjectVerified { // No need to try other principals, break break } } } // If we don't have to verify exhaustively and threshold is 1 and the Git // signature is verified, we can return if !v.verifyExhaustively && v.threshold == 1 && gitObjectVerified { return usedPrincipalIDs, nil } slog.Debug("Proceeding with verification of attestations...") if env != nil { // Second, verify signatures on the envelope // We have to verify the envelope independently for each principal // trusted in the verifier as a principal may have multiple keys // associated with them. for _, principal := range v.principals { if usedPrincipalIDs.Has(principal.ID()) { // Do not verify using this principal as they were verified for // the Git signature slog.Debug(fmt.Sprintf("Principal '%s' has already been counted towards the threshold, skipping...", principal.ID())) continue } principalVerifiers := []sslibdsse.Verifier{} keys := principal.Keys() for _, key := range keys { if usedKeyIDs.Has(key.KeyID) { // this key has been encountered before, possibly because // another Principal included this key slog.Debug(fmt.Sprintf("Key with ID '%s' has already been used to verify a signature, skipping...", key.KeyID)) continue } var ( dsseVerifier sslibdsse.Verifier err error ) switch key.KeyType { case ssh.KeyType: slog.Debug(fmt.Sprintf("Found SSH key '%s'...", key.KeyID)) dsseVerifier, err = ssh.NewVerifierFromKey(key) if err != nil { return nil, err } case gpg.KeyType: slog.Debug(fmt.Sprintf("Found GPG key '%s', cannot use for DSSE signature verification yet...", key.KeyID)) continue case sigstore.KeyType: slog.Debug(fmt.Sprintf("Found Sigstore key '%s'...", key.KeyID)) opts := []sigstoreverifieropts.Option{} config, err := v.repository.GetGitConfig() if err != nil { return nil, err } if rekorURL, has := config[sigstore.GitConfigRekor]; has { slog.Debug(fmt.Sprintf("Using '%s' as Rekor server...", rekorURL)) opts = append(opts, sigstoreverifieropts.WithRekorURL(rekorURL)) } dsseVerifier = sigstore.NewVerifierFromIdentityAndIssuer(key.KeyVal.Identity, key.KeyVal.Issuer, opts...) case signerverifier.ED25519KeyType: // These are only used to verify old policy metadata signed before the ssh-signer was added slog.Debug(fmt.Sprintf("Found legacy ED25519 key '%s' in custom securesystemslib format...", key.KeyID)) dsseVerifier, err = signerverifier.NewED25519SignerVerifierFromSSLibKey(key) if err != nil { return nil, err } case signerverifier.RSAKeyType: // These are only used to verify old policy metadata signed before the ssh-signer was added slog.Debug(fmt.Sprintf("Found legacy RSA key '%s' in custom securesystemslib format...", key.KeyID)) dsseVerifier, err = signerverifier.NewRSAPSSSignerVerifierFromSSLibKey(key) if err != nil { return nil, err } case signerverifier.ECDSAKeyType: // These are only used to verify old policy metadata signed before the ssh-signer was added slog.Debug(fmt.Sprintf("Found legacy ECDSA key '%s' in custom securesystemslib format...", key.KeyID)) dsseVerifier, err = signerverifier.NewECDSASignerVerifierFromSSLibKey(key) if err != nil { return nil, err } default: return nil, common.ErrUnknownKeyType } principalVerifiers = append(principalVerifiers, dsseVerifier) } // We have the principal's verifiers: use that to verify the envelope if len(principalVerifiers) == 0 { // TODO: remove this when we have signing method unification // across git and dsse continue } // We set threshold to 1 as we only need one of the keys for this // principal to be matched. If more than one key is matched and // returned in acceptedKeys, we count this only once towards the // principal and therefore the verifier's threshold. However, for // safety, we count both keys. If two principals share keys, this // can lead to a problem meeting thresholds. Arguably, they // shouldn't be sharing keys, so this seems reasonable. acceptedKeys, err := dsse.VerifyEnvelope(ctx, env, principalVerifiers, 1) if err != nil && !strings.Contains(err.Error(), "accepted signatures do not match threshold") { return nil, err } for _, key := range acceptedKeys { // Mark all accepted keys as used: this doesn't count towards // the threshold directly, but if another principal has the same // key, they may not be counted towards the threshold slog.Debug(fmt.Sprintf("Public key '%s' belonging to principal '%s' successfully used to verify signature of attestation, counting '%s' towards threshold...", key.KeyID, principal.ID(), principal.ID())) usedKeyIDs.Add(key.KeyID) usedPrincipalIDs.Add(principal.ID()) } } } if v.verifyExhaustively || usedPrincipalIDs.Len() >= v.Threshold() { // TODO: double check that this is okay! return usedPrincipalIDs, nil } // Return usedPrincipalIDs so the consumer can decide what to do with the // principals that were used return usedPrincipalIDs, ErrVerifierConditionsUnmet } gittuf-0.9.0/internal/policy/signature_test.go000066400000000000000000000117361475150141000215320ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "fmt" "testing" "github.com/gittuf/gittuf/internal/common" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/gpg" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestSignatureVerifier(t *testing.T) { t.Parallel() tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) gpgKeyR, err := gpg.LoadGPGKeyFromBytes(gpgPubKeyBytes) if err != nil { t.Fatal(err) } gpgKey := tufv01.NewKeyFromSSLibKey(gpgKeyR) rootSigner := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) rootPubKeyR := rootSigner.MetadataKey() rootPubKey := tufv01.NewKeyFromSSLibKey(rootPubKeyR) targetsSigner := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) targetsPubKeyR := targetsSigner.MetadataKey() targetsPubKey := tufv01.NewKeyFromSSLibKey(targetsPubKeyR) commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, "refs/heads/main", 1, gpgKeyBytes) commitID := commitIDs[0] tagID := common.CreateTestSignedTag(t, repo, "test-tag", commitID, gpgKeyBytes) attestation, err := dsse.CreateEnvelope(nil) if err != nil { t.Fatal(err) } attestation, err = dsse.SignEnvelope(testCtx, attestation, rootSigner) if err != nil { t.Fatal(err) } invalidAttestation, err := dsse.CreateEnvelope(nil) if err != nil { t.Fatal(err) } invalidAttestation, err = dsse.SignEnvelope(testCtx, invalidAttestation, targetsSigner) if err != nil { t.Fatal(err) } attestationWithTwoSigs, err := dsse.CreateEnvelope(nil) if err != nil { t.Fatal(err) } attestationWithTwoSigs, err = dsse.SignEnvelope(testCtx, attestationWithTwoSigs, rootSigner) if err != nil { t.Fatal(err) } attestationWithTwoSigs, err = dsse.SignEnvelope(testCtx, attestationWithTwoSigs, targetsSigner) if err != nil { t.Fatal(err) } tests := map[string]struct { principals []tuf.Principal threshold int gitObjectID gitinterface.Hash attestation *sslibdsse.Envelope expectedError error }{ "commit, no attestation, valid key, threshold 1": { principals: []tuf.Principal{gpgKey}, threshold: 1, gitObjectID: commitID, }, "commit, no attestation, valid key, threshold 2": { principals: []tuf.Principal{gpgKey}, threshold: 2, gitObjectID: commitID, expectedError: ErrVerifierConditionsUnmet, }, "commit, attestation, valid key, threshold 1": { principals: []tuf.Principal{gpgKey}, threshold: 1, gitObjectID: commitID, attestation: attestation, }, "commit, attestation, valid keys, threshold 2": { principals: []tuf.Principal{gpgKey, rootPubKey}, threshold: 2, gitObjectID: commitID, attestation: attestation, }, "commit, invalid signed attestation, threshold 2": { principals: []tuf.Principal{gpgKey, rootPubKey}, threshold: 2, gitObjectID: commitID, attestation: invalidAttestation, expectedError: ErrVerifierConditionsUnmet, }, "commit, attestation, valid keys, threshold 3": { principals: []tuf.Principal{gpgKey, rootPubKey, targetsPubKey}, threshold: 3, gitObjectID: commitID, attestation: attestationWithTwoSigs, }, "tag, no attestation, valid key, threshold 1": { principals: []tuf.Principal{gpgKey}, threshold: 1, gitObjectID: tagID, }, "tag, no attestation, valid key, threshold 2": { principals: []tuf.Principal{gpgKey}, threshold: 2, gitObjectID: tagID, expectedError: ErrVerifierConditionsUnmet, }, "tag, attestation, valid key, threshold 1": { principals: []tuf.Principal{gpgKey}, threshold: 1, gitObjectID: tagID, attestation: attestation, }, "tag, attestation, valid keys, threshold 2": { principals: []tuf.Principal{gpgKey, rootPubKey}, threshold: 2, gitObjectID: tagID, attestation: attestation, }, "tag, invalid signed attestation, threshold 2": { principals: []tuf.Principal{gpgKey, rootPubKey}, threshold: 2, gitObjectID: tagID, attestation: invalidAttestation, expectedError: ErrVerifierConditionsUnmet, }, "tag, attestation, valid keys, threshold 3": { principals: []tuf.Principal{gpgKey, rootPubKey, targetsPubKey}, threshold: 3, gitObjectID: tagID, attestation: attestationWithTwoSigs, }, } for name, test := range tests { verifier := &SignatureVerifier{ repository: repo, name: "test-verifier", principals: test.principals, threshold: test.threshold, } _, err := verifier.Verify(testCtx, test.gitObjectID, test.attestation) if test.expectedError == nil { assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) } else { assert.ErrorIs(t, err, test.expectedError, fmt.Sprintf("incorrect error received in test '%s'", name)) } } } gittuf-0.9.0/internal/policy/targets.go000066400000000000000000000012341475150141000201330ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "time" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" ) // InitializeTargetsMetadata creates a new instance of TargetsMetadata. func InitializeTargetsMetadata() tuf.TargetsMetadata { var targetsMetadata tuf.TargetsMetadata if tufv02.AllowV02Metadata() { targetsMetadata = tufv02.NewTargetsMetadata() } else { targetsMetadata = tufv01.NewTargetsMetadata() } targetsMetadata.SetExpires(time.Now().AddDate(1, 0, 0).Format(time.RFC3339)) return targetsMetadata } gittuf-0.9.0/internal/policy/targets_test.go000066400000000000000000000005641475150141000211770ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "testing" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestInitializeTargetsMetadata(t *testing.T) { targetsMetadata := InitializeTargetsMetadata() assert.Contains(t, targetsMetadata.GetRules(), tufv01.AllowRule()) } gittuf-0.9.0/internal/policy/verify.go000066400000000000000000001251071475150141000177740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "context" "encoding/json" "errors" "fmt" "log/slog" "strings" "github.com/gittuf/gittuf/internal/attestations" "github.com/gittuf/gittuf/internal/attestations/authorizations" "github.com/gittuf/gittuf/internal/attestations/github" githubv01 "github.com/gittuf/gittuf/internal/attestations/github/v01" "github.com/gittuf/gittuf/internal/cache" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" ita "github.com/in-toto/attestation/go/v1" ) var ( ErrVerificationFailed = errors.New("gittuf policy verification failed") ErrInvalidEntryNotSkipped = errors.New("invalid entry found not marked as skipped") ErrLastGoodEntryIsSkipped = errors.New("entry expected to be unskipped is marked as skipped") ErrNoVerifiers = errors.New("no verifiers present for verification") ErrInvalidVerifier = errors.New("verifier has invalid parameters (is threshold 0?)") ErrVerifierConditionsUnmet = errors.New("verifier's key and threshold constraints not met") ErrCannotVerifyMergeableForTagRef = errors.New("cannot verify mergeable into tag reference") ) // PolicyVerifier implements various gittuf verification workflows. type PolicyVerifier struct { //nolint:revive // We want to call this PolicyVerifier to avoid any confusion with // SignatureVerifier. repo *gitinterface.Repository searcher searcher persistentCacheEnabled bool persistentCache *cache.Persistent } func NewPolicyVerifier(repo *gitinterface.Repository) *PolicyVerifier { searcher := newSearcher(repo) verifier := &PolicyVerifier{ repo: repo, searcher: searcher, } if searcher, isCacheSearcher := searcher.(*cacheSearcher); isCacheSearcher { verifier.persistentCacheEnabled = true verifier.persistentCache = searcher.persistentCache } return verifier } // VerifyRef verifies the signature on the latest RSL entry for the target ref // using the latest policy. The expected Git ID for the ref in the latest RSL // entry is returned if the policy verification is successful. func (v *PolicyVerifier) VerifyRef(ctx context.Context, target string) (gitinterface.Hash, error) { // Find latest entry for target slog.Debug(fmt.Sprintf("Identifying latest RSL entry for '%s'...", target)) latestEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(target)) if err != nil { return gitinterface.ZeroHash, err } return latestEntry.GetTargetID(), v.VerifyRelativeForRef(ctx, latestEntry, latestEntry, target) } // VerifyRefFull verifies the entire RSL for the target ref from the first // entry. The expected Git ID for the ref in the latest RSL entry is returned if // the policy verification is successful. func (v *PolicyVerifier) VerifyRefFull(ctx context.Context, target string) (gitinterface.Hash, error) { // Trace RSL back to the start slog.Debug(fmt.Sprintf("Identifying first RSL entry for '%s'...", target)) var ( firstEntry rsl.ReferenceUpdaterEntry err error ) switch v.persistentCacheEnabled { case true: slog.Debug("Cache is enabled, checking for last verified entry...") entryNumber, entryID := v.persistentCache.GetLastVerifiedEntryForRef(target) if entryNumber != 0 { firstEntry, err = loadRSLReferenceUpdaterEntry(v.repo, entryID) if err != nil { return gitinterface.ZeroHash, err } // break because we've loaded the entry and don't need to fallthrough break } slog.Debug("Cache doesn't have last verified entry for ref...") fallthrough case false: firstEntry, _, err = rsl.GetFirstReferenceUpdaterEntryForRef(v.repo, target) if err != nil { return gitinterface.ZeroHash, err } } // Find latest entry for target slog.Debug(fmt.Sprintf("Identifying latest RSL entry for '%s'...", target)) latestEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(target)) if err != nil { return gitinterface.ZeroHash, err } slog.Debug("Verifying all entries...") return latestEntry.GetTargetID(), v.VerifyRelativeForRef(ctx, firstEntry, latestEntry, target) } // VerifyRefFromEntry performs verification for the reference from a specific // RSL entry. The expected Git ID for the ref in the latest RSL entry is // returned if the policy verification is successful. func (v *PolicyVerifier) VerifyRefFromEntry(ctx context.Context, target string, entryID gitinterface.Hash) (gitinterface.Hash, error) { // Load starting point entry slog.Debug("Identifying starting RSL entry...") fromEntryT, err := rsl.GetEntry(v.repo, entryID) if err != nil { return gitinterface.ZeroHash, err } fromEntry, isRefEntry := fromEntryT.(*rsl.ReferenceEntry) if !isRefEntry { // TODO: we should instead find the latest reference entry // before the entryID and use that return gitinterface.ZeroHash, fmt.Errorf("starting entry is not an RSL reference entry") } // Find latest entry for target slog.Debug(fmt.Sprintf("Identifying latest RSL entry for '%s'...", target)) latestEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(target)) if err != nil { return gitinterface.ZeroHash, err } // Do a relative verify from start entry to the latest entry slog.Debug("Verifying all entries...") return latestEntry.GetTargetID(), v.VerifyRelativeForRef(ctx, fromEntry, latestEntry, target) } // VerifyMergeable checks if the targetRef can be updated to reflect the changes // in featureRef. It checks if sufficient authorizations / approvals exist for // the merge to happen, indicated by the error being nil. Additionally, a // boolean value is also returned that indicates whether a final authorized // signature is still necessary via the RSL entry for the merge. // // Summary of return combinations: // (false, err) -> merge is not possible // (false, nil) -> merge is possible and can be performed by anyone // (true, nil) -> merge is possible but it MUST be performed by an authorized // person for the rule, i.e., an authorized person must sign the merge's RSL // entry func (v *PolicyVerifier) VerifyMergeable(ctx context.Context, targetRef, featureRef string) (bool, error) { if strings.HasPrefix(targetRef, gitinterface.TagRefPrefix) { return false, ErrCannotVerifyMergeableForTagRef } var fromID gitinterface.Hash slog.Debug(fmt.Sprintf("Identifying latest RSL entry for '%s'...", targetRef)) targetEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(targetRef), rsl.IsUnskipped()) switch { case err == nil: fromID = targetEntry.GetTargetID() case errors.Is(err, rsl.ErrRSLEntryNotFound): fromID = gitinterface.ZeroHash default: return false, err } slog.Debug(fmt.Sprintf("Identifying latest RSL entry for '%s'...", featureRef)) featureEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(featureRef), rsl.IsUnskipped()) if err != nil { return false, err } return v.verifyMergeable(ctx, targetRef, fromID, featureEntry.GetTargetID()) } // VerifyMergeableForCommit checks if the targetRef can be updated to reflect // the changes in featureID. It checks if sufficient authorizations / approvals // exist for the merge to happen, indicated by the error being nil. // Additionally, a boolean value is also returned that indicates whether a final // authorized signature is still necessary via the RSL entry for the merge. // Note: this function DOES NOT use the RSL to identify the tip of the feature // ref. // // Summary of return combinations: // (false, err) -> merge is not possible // (false, nil) -> merge is possible and can be performed by anyone // (true, nil) -> merge is possible but it MUST be performed by an authorized // person for the rule, i.e., an authorized person must sign the merge's RSL // entry func (v *PolicyVerifier) VerifyMergeableForCommit(ctx context.Context, targetRef string, featureID gitinterface.Hash) (bool, error) { if strings.HasPrefix(targetRef, gitinterface.TagRefPrefix) { return false, ErrCannotVerifyMergeableForTagRef } var fromID gitinterface.Hash slog.Debug(fmt.Sprintf("Identifying latest RSL entry for '%s'...", targetRef)) targetEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(targetRef), rsl.IsUnskipped()) switch { case err == nil: fromID = targetEntry.GetTargetID() case errors.Is(err, rsl.ErrRSLEntryNotFound): fromID = gitinterface.ZeroHash default: return false, err } return v.verifyMergeable(ctx, targetRef, fromID, featureID) } func (v *PolicyVerifier) verifyMergeable(ctx context.Context, targetRef string, fromID, featureID gitinterface.Hash) (bool, error) { // We're specifically focused on commit merges here, this doesn't apply to // tags mergeTreeID, err := v.repo.GetMergeTree(fromID, featureID) if err != nil { return false, err } var ( currentPolicy *State currentAttestations *attestations.Attestations ) // Load latest policy slog.Debug("Loading latest policy...") initialPolicyEntry, err := v.searcher.FindLatestPolicyEntry() if err != nil { return false, err } state, err := LoadState(ctx, v.repo, initialPolicyEntry) if err != nil { return false, err } currentPolicy = state // Load latest attestations slog.Debug("Loading latest attestations...") initialAttestationsEntry, err := v.searcher.FindLatestAttestationsEntry() if err == nil { attestationsState, err := attestations.LoadAttestationsForEntry(v.repo, initialAttestationsEntry) if err != nil { return false, err } currentAttestations = attestationsState } else if !errors.Is(err, attestations.ErrAttestationsNotFound) { // Attestations are not compulsory, so return err only // if it's some other error return false, err } authorizationAttestation, approverIDs, err := getApproverAttestationAndKeyIDsForIndex(ctx, v.repo, currentPolicy, currentAttestations, targetRef, fromID, mergeTreeID, false) if err != nil { return false, err } _, rslEntrySignatureNeededForThreshold, err := verifyGitObjectAndAttestations(ctx, currentPolicy, fmt.Sprintf("%s:%s", gitReferenceRuleScheme, targetRef), gitinterface.ZeroHash, authorizationAttestation, withApproverPrincipalIDs(approverIDs), withVerifyMergeable()) if err != nil { return false, fmt.Errorf("not enough approvals to meet Git namespace policies, %w", ErrVerificationFailed) } if !currentPolicy.hasFileRule { return rslEntrySignatureNeededForThreshold, nil } // Verify modified files commitIDs, err := v.repo.GetCommitsBetweenRange(featureID, fromID) if err != nil { return false, err } for _, commitID := range commitIDs { paths, err := v.repo.GetFilePathsChangedByCommit(commitID) if err != nil { return false, err } verifiedUsing := "" // this will be set after one successful verification of the commit to avoid repeated signature verification for _, path := range paths { // If we've already verified and identified commit signature, we can // just check if that verifier is trusted for the new path. If not // found, we don't make any assumptions about it being a failure in // case of name mismatches. So, the signature check proceeds as // usual. Also, we don't use verifyMergeable=true here. File // verification rules are not met using the signature on the RSL // entry, so we don't count threshold-1 here. verifiedUsing, _, err = verifyGitObjectAndAttestations(ctx, currentPolicy, fmt.Sprintf("%s:%s", fileRuleScheme, path), commitID, authorizationAttestation, withApproverPrincipalIDs(approverIDs), withTrustedVerifier(verifiedUsing)) if err != nil { return false, fmt.Errorf("verifying file namespace policies failed, %w", ErrVerificationFailed) } } } return rslEntrySignatureNeededForThreshold, nil } // VerifyRelativeForRef verifies the RSL between specified start and end entries // using the provided policy entry for the first entry. func (v *PolicyVerifier) VerifyRelativeForRef(ctx context.Context, firstEntry, lastEntry rsl.ReferenceUpdaterEntry, target string) error { /* require firstEntry != nil require lastEntry != nil require target != "" */ if v.persistentCacheEnabled { defer v.persistentCache.Commit(v.repo) //nolint:errcheck } var ( currentPolicy *State currentAttestations *attestations.Attestations err error ) // Load policy applicable at firstEntry slog.Debug(fmt.Sprintf("Loading policy applicable at first entry '%s'...", firstEntry.GetID().String())) initialPolicyEntry, err := v.searcher.FindPolicyEntryFor(firstEntry) if err == nil { state, err := LoadState(ctx, v.repo, initialPolicyEntry) if err != nil { return err } currentPolicy = state } else if !errors.Is(err, ErrPolicyNotFound) { // Searcher gives us nil when firstEntry is the very first entry // or close to it (i.e., before a policy was applied) return err } // require currentPolicy != nil || parent(firstEntry) == nil slog.Debug(fmt.Sprintf("Loading attestations applicable at first entry '%s'...", firstEntry.GetID().String())) initialAttestationsEntry, err := v.searcher.FindAttestationsEntryFor(firstEntry) if err == nil { attestationsState, err := attestations.LoadAttestationsForEntry(v.repo, initialAttestationsEntry) if err != nil { return err } currentAttestations = attestationsState } else if !errors.Is(err, attestations.ErrAttestationsNotFound) { // Attestations are not compulsory, so return err only // if it's some other error return err } // require currentAttestations != nil || (entry.Ref != attestations.Ref for entry in 0..firstEntry) // Enumerate RSL entries between firstEntry and lastEntry, ignoring irrelevant ones slog.Debug("Identifying all entries in range...") entries, annotations, err := rsl.GetReferenceUpdaterEntriesInRangeForRef(v.repo, firstEntry.GetID(), lastEntry.GetID(), target) if err != nil { return err } // require len(entries) != 0 // Verify each entry, looking for a fix when an invalid entry is encountered var invalidEntry rsl.ReferenceUpdaterEntry var verificationErr error for len(entries) != 0 { // invariant invalidEntry == nil || inRecoveryMode() == true if invalidEntry == nil { // Pop entry from queue entry := entries[0] entries = entries[1:] slog.Debug(fmt.Sprintf("Verifying entry '%s'...", entry.GetID().String())) switch entry := entry.(type) { case *rsl.PropagationEntry: slog.Debug(fmt.Sprintf("Entry '%s' is propagation entry, proceeding...", entry.GetID().String())) continue case *rsl.ReferenceEntry: slog.Debug("Checking if entry is for policy staging reference...") if entry.GetRefName() == PolicyStagingRef { continue } slog.Debug("Checking if entry is for policy reference...") if entry.GetRefName() == PolicyRef { if entry.GetID().Equal(firstEntry.GetID()) { // We've already loaded this policy continue } newPolicy, err := loadStateForEntry(v.repo, entry) if err != nil { return err } // require newPolicy != nil if currentPolicy != nil { // currentPolicy can be nil when // verifying from the beginning of the // RSL entry and we only have staging // refs slog.Debug("Verifying new policy using current policy...") if err := currentPolicy.VerifyNewState(ctx, newPolicy); err != nil { return err } slog.Debug("Updating current policy...") } else { slog.Debug("Setting current policy...") } currentPolicy = newPolicy if v.persistentCacheEnabled { v.persistentCache.InsertPolicyEntryNumber(entry.GetNumber(), entry.GetID()) } continue } slog.Debug("Checking if entry is for attestations reference...") if entry.GetRefName() == attestations.Ref { newAttestationsState, err := attestations.LoadAttestationsForEntry(v.repo, entry) if err != nil { return err } currentAttestations = newAttestationsState if v.persistentCacheEnabled { v.persistentCache.InsertAttestationEntryNumber(entry.GetNumber(), entry.GetID()) } continue } slog.Debug("Verifying changes...") if currentPolicy == nil { return ErrPolicyNotFound } if err := verifyEntry(ctx, v.repo, currentPolicy, currentAttestations, entry); err != nil { slog.Debug(fmt.Sprintf("Violation found: %s", err.Error())) slog.Debug("Checking if entry has been revoked...") // If the invalid entry is never marked as skipped, we return err if !entry.SkippedBy(annotations[entry.GetID().String()]) { return err } // The invalid entry's been marked as skipped but we still need // to see if another entry fixed state for non-gittuf users slog.Debug("Entry has been revoked, searching for fix entry...") invalidEntry = entry verificationErr = err if len(entries) == 0 { // Fix entry does not exist after revoking annotation return verificationErr } } else if v.persistentCacheEnabled { // Verification has passed, add to cache v.persistentCache.SetLastVerifiedEntryForRef(entry.GetRefName(), entry.GetNumber(), entry.GetID()) } continue } } // This is only reached when we have an invalid state. // First, the verification workflow determines the last good state for // the ref. This is needed to evaluate whether a fix for the invalid // state is available. After this is found, the workflow looks through // the remaining entries in the queue to find the fix. Until the fix is // found, entries encountered that are for other refs are added to a new // queue. Entries that are for the same ref but not the fix are // considered invalid. The workflow enters a valid state again when a) // the fix entry (which hasn't also been revoked) is found, and b) all // entries for the ref in the invalid range are marked as skipped by an // annotation. If these conditions don't both hold, the workflow returns // an error. After the fix is found, all remaining entries in the // original queue are also added to the new queue. The new queue then // takes the place of the original queue. This ensures that all entries // are processed even when an invalid state is reached. // 1. What's the last good state? slog.Debug("Identifying last valid state...") lastGoodEntry, lastGoodEntryAnnotations, err := rsl.GetLatestReferenceUpdaterEntry(v.repo, rsl.ForReference(invalidEntry.GetRefName()), rsl.BeforeEntryID(invalidEntry.GetID()), rsl.IsUnskipped(), rsl.IsReferenceEntry()) if err != nil { return err } slog.Debug("Verifying identified last valid entry has not been revoked...") if lastGoodEntry.(*rsl.ReferenceEntry).SkippedBy(lastGoodEntryAnnotations) { // this type assertion is fine because we use the rsl.IsReferenceEntry opt return ErrLastGoodEntryIsSkipped } // require lastGoodEntry != nil // TODO: what if the very first entry for a ref is a violation? // gittuf requires the fix to point to a commit that is tree-same as the // last good state lastGoodTreeID, err := v.repo.GetCommitTreeID(lastGoodEntry.GetTargetID()) if err != nil { return err } // 2. What entries do we have in the current verification set for the // ref? The first one that is tree-same as lastGoodEntry's commit is the // fix. Entries prior to that one in the queue are considered invalid // and must be skipped fixed := false var fixEntry *rsl.ReferenceEntry invalidIntermediateEntries := []*rsl.ReferenceEntry{} newEntryQueue := []rsl.ReferenceUpdaterEntry{} for len(entries) != 0 { newEntry := entries[0] entries = entries[1:] slog.Debug(fmt.Sprintf("Inspecting entry '%s' to see if it's a fix entry...", newEntry.GetID().String())) slog.Debug("Checking if entry is for the affected reference...") if newEntry.GetRefName() != invalidEntry.GetRefName() { // Unrelated entry that must be processed in the outer loop // Currently this is just policy entries newEntryQueue = append(newEntryQueue, newEntry) continue } switch newEntry := newEntry.(type) { case *rsl.PropagationEntry: // propagation entry cannot be a fix entry newEntryQueue = append(newEntryQueue, newEntry) continue case *rsl.ReferenceEntry: newCommitTreeID, err := v.repo.GetCommitTreeID(newEntry.GetTargetID()) if err != nil { return err } slog.Debug("Checking if entry is tree-same with last valid state...") if newCommitTreeID.Equal(lastGoodTreeID) { // Fix found, we append the rest of the current verification set // to the new entry queue // But first, we must check that this fix hasn't been skipped // If it has been skipped, it's not actually a fix and we need // to keep looking slog.Debug("Verifying potential fix entry has not been revoked...") if !newEntry.SkippedBy(annotations[newEntry.ID.String()]) { slog.Debug("Fix entry found, proceeding with regular verification workflow...") fixed = true fixEntry = newEntry newEntryQueue = append(newEntryQueue, entries...) } } if fixed { break } // newEntry is not tree-same / commit-same, so it is automatically // invalid, check that it's been marked as revoked slog.Debug("Checking non-fix entry has been revoked as well...") if !newEntry.SkippedBy(annotations[newEntry.ID.String()]) { invalidIntermediateEntries = append(invalidIntermediateEntries, newEntry) } } } if !fixed { // If we haven't found a fix, return the original error return verificationErr } if len(invalidIntermediateEntries) != 0 { // We may have found a fix but if an invalid intermediate entry // wasn't skipped, return error return ErrInvalidEntryNotSkipped } // Reset these trackers to continue verification with rest of the queue // We may encounter other issues invalidEntry = nil verificationErr = nil entries = newEntryQueue if v.persistentCacheEnabled { v.persistentCache.SetLastVerifiedEntryForRef(fixEntry.RefName, fixEntry.GetNumber(), fixEntry.GetID()) } } return nil } // VerifyNewState ensures that when a new policy is encountered, its root role // is signed by keys trusted in the current policy. func (s *State) VerifyNewState(ctx context.Context, newPolicy *State) error { rootVerifier, err := s.getRootVerifier() if err != nil { return err } _, err = rootVerifier.Verify(ctx, gitinterface.ZeroHash, newPolicy.RootEnvelope) return err } // verifyEntry is a helper to verify an entry's signature using the specified // policy. The specified policy is used for the RSL entry itself. However, for // commit signatures, verifyEntry checks when the commit was first introduced // via the RSL across all refs. Then, it uses the policy applicable at the // commit's first entry into the repository. If the commit is brand new to the // repository, the specified policy is used. func verifyEntry(ctx context.Context, repo *gitinterface.Repository, policy *State, attestationsState *attestations.Attestations, entry *rsl.ReferenceEntry) error { if entry.RefName == PolicyRef || entry.RefName == attestations.Ref { return nil } if strings.HasPrefix(entry.RefName, gitinterface.TagRefPrefix) { slog.Debug("Entry is for a Git tag, using tag verification workflow...") return verifyTagEntry(ctx, repo, policy, attestationsState, entry) } // Load the applicable reference authorization and approvals from trusted // code review systems slog.Debug("Searching for applicable reference authorizations and code reviews...") authorizationAttestation, approverKeyIDs, err := getApproverAttestationAndKeyIDs(ctx, repo, policy, attestationsState, entry) if err != nil { return err } // Verify Git namespace policies using the RSL entry and attestations if _, _, err := verifyGitObjectAndAttestations(ctx, policy, fmt.Sprintf("%s:%s", gitReferenceRuleScheme, entry.RefName), entry.ID, authorizationAttestation, withApproverPrincipalIDs(approverKeyIDs)); err != nil { return fmt.Errorf("verifying Git namespace policies failed, %w", ErrVerificationFailed) } // Check if policy has file rules at all for efficiency if !policy.hasFileRule { // No file rules to verify return nil } // Verify modified files // First, get all commits between the current and last entry for the ref. commitIDs, err := getCommits(repo, entry) // note: this is ordered by commit ID if err != nil { return err } for _, commitID := range commitIDs { paths, err := repo.GetFilePathsChangedByCommit(commitID) if err != nil { return err } verifiedUsing := "" // this will be set after one successful verification of the commit to avoid repeated signature verification for _, path := range paths { // If we've already verified and identified commit signature, we // can just check if that verifier is trusted for the new path. // If not found, we don't make any assumptions about it being a // failure in case of name mismatches. So, the signature check // proceeds as usual. verifiedUsing, _, err = verifyGitObjectAndAttestations(ctx, policy, fmt.Sprintf("%s:%s", fileRuleScheme, path), commitID, authorizationAttestation, withApproverPrincipalIDs(approverKeyIDs), withTrustedVerifier(verifiedUsing)) if err != nil { return fmt.Errorf("verifying file namespace policies failed, %w", ErrVerificationFailed) } } } return nil } func verifyTagEntry(ctx context.Context, repo *gitinterface.Repository, policy *State, attestationsState *attestations.Attestations, entry *rsl.ReferenceEntry) error { entryTagRef, err := repo.GetReference(entry.RefName) if err != nil { return err } tagTargetID, err := repo.GetTagTarget(entry.TargetID) if err != nil { return err } if !entry.TargetID.Equal(entryTagRef) && !entry.TargetID.Equal(tagTargetID) { return fmt.Errorf("verifying RSL entry failed, tag reference set to unexpected target") } authorizationAttestation, approverKeyIDs, err := getApproverAttestationAndKeyIDs(ctx, repo, policy, attestationsState, entry) if err != nil { return err } if _, _, err := verifyGitObjectAndAttestations(ctx, policy, fmt.Sprintf("%s:%s", gitReferenceRuleScheme, entry.RefName), entry.GetID(), authorizationAttestation, withApproverPrincipalIDs(approverKeyIDs), withTagObjectID(entry.TargetID)); err != nil { return fmt.Errorf("verifying tag entry failed, %w: %w", ErrVerificationFailed, err) } return nil } func getApproverAttestationAndKeyIDs(ctx context.Context, repo *gitinterface.Repository, policy *State, attestationsState *attestations.Attestations, entry *rsl.ReferenceEntry) (*sslibdsse.Envelope, *set.Set[string], error) { if attestationsState == nil { return nil, nil, nil } firstEntry := false slog.Debug(fmt.Sprintf("Searching for RSL entry for '%s' before entry '%s'...", entry.RefName, entry.ID.String())) priorRefEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo, rsl.ForReference(entry.RefName), rsl.BeforeEntryID(entry.ID)) if err != nil { if !errors.Is(err, rsl.ErrRSLEntryNotFound) { return nil, nil, err } firstEntry = true } fromID := gitinterface.ZeroHash if !firstEntry { fromID = priorRefEntry.GetTargetID() } // We need to handle the case where we're approving a tag // For a tag, the expected toID in the approval is the commit the tag points to // Otherwise, the expected toID is the tree the commit points to var ( toID gitinterface.Hash isTag bool ) if strings.HasPrefix(entry.RefName, gitinterface.TagRefPrefix) { isTag = true toID, err = repo.GetTagTarget(entry.TargetID) } else { toID, err = repo.GetCommitTreeID(entry.TargetID) } if err != nil { return nil, nil, err } return getApproverAttestationAndKeyIDsForIndex(ctx, repo, policy, attestationsState, entry.RefName, fromID, toID, isTag) } func getApproverAttestationAndKeyIDsForIndex(ctx context.Context, repo *gitinterface.Repository, policy *State, attestationsState *attestations.Attestations, targetRef string, fromID, toID gitinterface.Hash, isTag bool) (*sslibdsse.Envelope, *set.Set[string], error) { if attestationsState == nil { return nil, nil, nil } slog.Debug(fmt.Sprintf("Finding reference authorization attestations for '%s' from '%s' to '%s'...", targetRef, fromID.String(), toID.String())) authorizationAttestation, err := attestationsState.GetReferenceAuthorizationFor(repo, targetRef, fromID.String(), toID.String()) if err != nil { if !errors.Is(err, authorizations.ErrAuthorizationNotFound) { return nil, nil, err } } approverIdentities := set.NewSet[string]() // When we add other code review systems, we can move this into a // generalized helper that inspects the attestations for each system trusted // in policy. // We only use this flow right now for non-tags as tags cannot be approved // on currently supported systems // TODO: support multiple apps / threshold per system if !isTag && policy.githubAppApprovalsTrusted { slog.Debug("GitHub pull request approvals are trusted, loading applicable attestations...") githubApprovalAttestation, err := attestationsState.GetGitHubPullRequestApprovalAttestationFor(repo, policy.githubAppRoleName, targetRef, fromID.String(), toID.String()) if err != nil { if !errors.Is(err, github.ErrPullRequestApprovalAttestationNotFound) { return nil, nil, err } } // if it exists if githubApprovalAttestation != nil { slog.Debug("GitHub pull request approval found, verifying attestation signature...") approvalVerifier := &SignatureVerifier{ repository: policy.repository, name: tuf.GitHubAppRoleName, principals: policy.githubAppKeys, threshold: 1, // TODO: support higher threshold } _, err := approvalVerifier.Verify(ctx, nil, githubApprovalAttestation) if err != nil { return nil, nil, fmt.Errorf("%w: failed to verify GitHub app approval attestation, signed by untrusted key", ErrVerificationFailed) } payloadBytes, err := githubApprovalAttestation.DecodeB64Payload() if err != nil { return nil, nil, err } // TODO: support multiple versions type tmpStatement struct { Type string `json:"_type"` Subject []*ita.ResourceDescriptor `json:"subject"` PredicateType string `json:"predicateType"` Predicate *githubv01.PullRequestApprovalAttestation `json:"predicate"` } stmt := new(tmpStatement) if err := json.Unmarshal(payloadBytes, stmt); err != nil { return nil, nil, err } for _, approver := range stmt.Predicate.GetApprovers() { approverIdentities.Add(approver) } } } return authorizationAttestation, approverIdentities, nil } // getCommits identifies the commits introduced to the entry's ref since the // last RSL entry for the same ref. These commits are then verified for file // policies. func getCommits(repo *gitinterface.Repository, entry *rsl.ReferenceEntry) ([]gitinterface.Hash, error) { firstEntry := false priorRefEntry, _, err := rsl.GetLatestReferenceUpdaterEntry(repo, rsl.ForReference(entry.RefName), rsl.BeforeEntryID(entry.ID)) if err != nil { if !errors.Is(err, rsl.ErrRSLEntryNotFound) { return nil, err } firstEntry = true } if firstEntry { return repo.GetCommitsBetweenRange(entry.TargetID, gitinterface.ZeroHash) } return repo.GetCommitsBetweenRange(entry.TargetID, priorRefEntry.GetTargetID()) } // verifyGitObjectAndAttestationsOptions contains the configurable options for // verifyGitObjectAndAttestations. type verifyGitObjectAndAttestationsOptions struct { approverPrincipalIDs *set.Set[string] verifyMergeable bool trustedVerifier string tagObjectID gitinterface.Hash } type verifyGitObjectAndAttestationsOption func(o *verifyGitObjectAndAttestationsOptions) // withApproverPrincipalIDs allows for optionally passing in approver IDs to // verifyGitObjectAndAttestations. These IDs may be obtained via a code review // tool such as GitHub pull request approvals. func withApproverPrincipalIDs(approverPrincipalIDs *set.Set[string]) verifyGitObjectAndAttestationsOption { return func(o *verifyGitObjectAndAttestationsOptions) { o.approverPrincipalIDs = approverPrincipalIDs } } // withVerifyMergeable indicates that the verification must check if a change // can be merged. func withVerifyMergeable() verifyGitObjectAndAttestationsOption { return func(o *verifyGitObjectAndAttestationsOptions) { o.verifyMergeable = true } } // withTrustedVerifier is used to specify the name of a verifier that has // already been used to verify in the past. If the newly discovered set of // verifiers includes the trusted verifier, then we can return early. func withTrustedVerifier(name string) verifyGitObjectAndAttestationsOption { return func(o *verifyGitObjectAndAttestationsOptions) { o.trustedVerifier = name } } // withTagObjectID is used to set the Git ID of a tag object. When this is set, // the tag object's signature is also verified in addition to the RSL entry for // the tag. func withTagObjectID(objID gitinterface.Hash) verifyGitObjectAndAttestationsOption { return func(o *verifyGitObjectAndAttestationsOptions) { o.tagObjectID = objID } } func verifyGitObjectAndAttestations(ctx context.Context, policy *State, target string, gitID gitinterface.Hash, authorizationAttestation *sslibdsse.Envelope, opts ...verifyGitObjectAndAttestationsOption) (string, bool, error) { options := &verifyGitObjectAndAttestationsOptions{tagObjectID: gitinterface.ZeroHash} for _, fn := range opts { fn(options) } verifiers, err := policy.FindVerifiersForPath(target) if err != nil { return "", false, err } if len(verifiers) == 0 { // This target is not protected by gittuf policy return "", false, nil } if options.trustedVerifier != "" { for _, verifier := range verifiers { if verifier.Name() == options.trustedVerifier { return options.trustedVerifier, false, nil } } } appName := "" if policy.githubAppApprovalsTrusted { appName = policy.githubAppRoleName } verifiedUsing, acceptedPrincipalIDs, rslSignatureNeededForThreshold, err := verifyGitObjectAndAttestationsUsingVerifiers(ctx, verifiers, gitID, authorizationAttestation, appName, options.approverPrincipalIDs, options.verifyMergeable) if err != nil { return "", false, err } if !options.tagObjectID.IsZero() { // Verify tag object's signature as well tagObjVerified := false for _, verifier := range verifiers { // explicitly not looking at the attestation // that applies to the _push_ // thus, we also set threshold to 1 verifier.threshold = 1 _, err := verifier.Verify(ctx, options.tagObjectID, nil) if err == nil { // Signature verification succeeded tagObjVerified = true // TODO: should we check if a different verifier / signer was // matched for the tag object compared with the RSL entry? break } else if !errors.Is(err, ErrVerifierConditionsUnmet) { // Unexpected error return "", false, err } // Haven't found a valid verifier, continue with next verifier } if !tagObjVerified { return "", false, fmt.Errorf("verifying tag object's signature failed") } } verifiedPrincipalIDs := 0 if acceptedPrincipalIDs != nil { verifiedPrincipalIDs = acceptedPrincipalIDs.Len() } globalRules := policy.globalRules for _, rule := range globalRules { // We check every global rule slog.Debug(fmt.Sprintf("Checking if global rule '%s' applies...", rule.GetName())) switch rule := rule.(type) { case tuf.GlobalRuleThreshold: if !rule.Matches(target) { break } // The global rule applies to the namespace under verification slog.Debug(fmt.Sprintf("Verifying threshold global rule '%s'...", rule.GetName())) requiredThreshold := rule.GetThreshold() if rslSignatureNeededForThreshold && options.verifyMergeable { // Since we're verifying if it's mergeable and we already know // that the RSL signature is needed to meet threshold, we can // reduce the global constraint threshold as well slog.Debug("Reducing required global threshold by 1 (verifying if change is mergeable and RSL signature is required)...") requiredThreshold-- } if verifiedPrincipalIDs < requiredThreshold { // Check if the verifiedPrincipalIDs meets the required global // threshold slog.Debug(fmt.Sprintf("Global rule '%s' not met, required threshold '%d', only have '%d'", rule.GetName(), rule.GetThreshold(), verifiedPrincipalIDs)) return "", false, ErrVerifierConditionsUnmet } slog.Debug(fmt.Sprintf("Successfully verified global rule '%s'", rule.GetName())) case tuf.GlobalRuleBlockForcePushes: // TODO: we use policy.repository, not ideal... if !rule.Matches(target) { break } // The global rule applies to the namespace under verification slog.Debug(fmt.Sprintf("Verifying block force pushes global rule '%s'...", rule.GetName())) if options.verifyMergeable { // Cannot check for force pushes for a proposed change slog.Debug("Cannot verify block force pushes global rule when verifying if a change is mergeable") break } // TODO: should we not look up the entry's afresh in the RSL here? // the in-memory cache _should_ make this okay, but something to // consider... // gitID _must_ be for an RSL reference entry, and we must find // its predecessor entry. // Why? Because the rule type only accepts git:<> as patterns. // If we have another object here, we've gone wrong somewhere. currentEntry, err := rsl.GetEntry(policy.repository, gitID) if err != nil { slog.Debug(fmt.Sprintf("unable to load RSL entry for '%s': %v", gitID.String(), err)) return "", false, err } currentEntryRef, isReferenceEntry := currentEntry.(*rsl.ReferenceEntry) if !isReferenceEntry { slog.Debug(fmt.Sprintf("Expected '%s' to be RSL reference entry, aborting verification of block force pushes global rule...", gitID.String())) return "", false, rsl.ErrInvalidRSLEntry } previousEntryRef, _, err := rsl.GetLatestReferenceUpdaterEntry(policy.repository, rsl.BeforeEntryID(currentEntry.GetID()), rsl.ForReference(currentEntryRef.RefName), rsl.IsUnskipped()) if err != nil { if errors.Is(err, rsl.ErrRSLEntryNotFound) { slog.Debug(fmt.Sprintf("Entry '%s' is the first one for reference '%s', cannot check if it's a force push", currentEntryRef.GetID().String(), currentEntryRef.RefName)) break } return "", false, err } knows, err := policy.repository.KnowsCommit(currentEntryRef.TargetID, previousEntryRef.GetTargetID()) if err != nil { return "", false, err } if !knows { slog.Debug(fmt.Sprintf("Current entry's commit '%s' is not a descendant of prior entry's commit '%s'", currentEntryRef.TargetID.String(), previousEntryRef.GetTargetID().String())) return "", false, ErrVerifierConditionsUnmet } slog.Debug(fmt.Sprintf("Successfully verified global rule '%s' as '%s' is a descendant of '%s'", rule.GetName(), currentEntryRef.TargetID.String(), previousEntryRef.GetTargetID().String())) default: slog.Debug("Unknown global rule type, aborting verification...") return "", false, tuf.ErrUnknownGlobalRuleType } } return verifiedUsing, rslSignatureNeededForThreshold, nil } func verifyGitObjectAndAttestationsUsingVerifiers(ctx context.Context, verifiers []*SignatureVerifier, gitID gitinterface.Hash, authorizationAttestation *sslibdsse.Envelope, appName string, approverIDs *set.Set[string], verifyMergeable bool) (string, *set.Set[string], bool, error) { if len(verifiers) == 0 { return "", nil, false, ErrNoVerifiers } var ( verifiedUsing string acceptedPrincipalIDs *set.Set[string] rslEntrySignatureNeededForThreshold bool ) for _, verifier := range verifiers { trustedPrincipalIDs := verifier.TrustedPrincipalIDs() usedPrincipalIDs, err := verifier.Verify(ctx, gitID, authorizationAttestation) if err == nil { // We meet requirements just from the authorization attestation's sigs verifiedUsing = verifier.Name() acceptedPrincipalIDs = usedPrincipalIDs break } else if !errors.Is(err, ErrVerifierConditionsUnmet) { return "", nil, false, err } if approverIDs != nil { slog.Debug("Using approvers from code review tool attestations...") // Unify the principalIDs we've already used with that listed in // approval attestation // We ensure that someone who has signed an attestation and is listed in // the approval attestation is only counted once for _, approverID := range approverIDs.Contents() { // For each approver ID from the app attestation, we try to see // if it matches a principal in the current verifiers. for _, principal := range verifier.principals { slog.Debug(fmt.Sprintf("Checking if approver identity '%s' matches '%s'...", approverID, principal.ID())) if usedPrincipalIDs.Has(principal.ID()) { // This principal has already been counted towards the // threshold slog.Debug(fmt.Sprintf("Principal '%s' has already been counted towards threshold, skipping...", principal.ID())) continue } // We can only match against a principal if it has a notion // of associated identities // Right now, this is just tufv02.Person if principal, isV02 := principal.(*tufv02.Person); isV02 { if associatedIdentity, has := principal.AssociatedIdentities[appName]; has && associatedIdentity == approverID { // The approver ID from the issuer (appName) matches // the principal's associated identity for the same // issuer! slog.Debug(fmt.Sprintf("Principal '%s' has associated identity '%s', counting principal towards threshold...", principal.ID(), approverID)) usedPrincipalIDs.Add(principal.ID()) break } } } } } // Get a list of used principals that are also trusted by the verifier trustedUsedPrincipalIDs := trustedPrincipalIDs.Intersection(usedPrincipalIDs) if trustedUsedPrincipalIDs.Len() >= verifier.Threshold() { // With approvals, we now meet threshold! slog.Debug(fmt.Sprintf("Counted '%d' principals towards threshold '%d' for '%s', threshold met!", trustedUsedPrincipalIDs.Len(), verifier.Threshold(), verifier.Name())) verifiedUsing = verifier.Name() acceptedPrincipalIDs = trustedUsedPrincipalIDs break } // If verifyMergeable is true, we only need to meet threshold - 1 if verifyMergeable && verifier.Threshold() > 1 { if trustedUsedPrincipalIDs.Len() >= verifier.Threshold()-1 { slog.Debug(fmt.Sprintf("Counted '%d' principals towards threshold '%d' for '%s', policies can be met if the merge is by authorized person!", trustedUsedPrincipalIDs.Len(), verifier.Threshold(), verifier.Name())) verifiedUsing = verifier.Name() acceptedPrincipalIDs = trustedPrincipalIDs rslEntrySignatureNeededForThreshold = true break } } } if verifiedUsing != "" { return verifiedUsing, acceptedPrincipalIDs, rslEntrySignatureNeededForThreshold, nil } return "", nil, false, ErrVerifierConditionsUnmet } gittuf-0.9.0/internal/policy/verify_test.go000066400000000000000000004212631475150141000210350ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package policy import ( "os" "path/filepath" "sort" "testing" "github.com/gittuf/gittuf/internal/attestations" authorizationsv01 "github.com/gittuf/gittuf/internal/attestations/authorizations/v01" "github.com/gittuf/gittuf/internal/common" "github.com/gittuf/gittuf/internal/dev" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/rsl" "github.com/gittuf/gittuf/internal/signerverifier/dsse" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" "github.com/stretchr/testify/assert" ) // FIXME: the verification tests do not check for expected failures. More // broadly, we need to rework the test setup here starting with // createTestRepository and the state creation helpers. func TestVerifyRef(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) verifier := NewPolicyVerifier(repo) currentTip, err := verifier.VerifyRef(testCtx, refName) assert.Nil(t, err) assert.Equal(t, commitIDs[0], currentTip) } func TestVerifyRefFull(t *testing.T) { // FIXME: currently this test is identical to the one for VerifyRef. // This is because it's not trivial to create a bunch of test policy / RSL // states cleanly. We need something that is easy to maintain and add cases // to. repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) verifier := NewPolicyVerifier(repo) currentTip, err := verifier.VerifyRefFull(testCtx, refName) assert.Nil(t, err) assert.Equal(t, commitIDs[0], currentTip) } func TestVerifyRefFromEntry(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" // Policy violation commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgUnauthorizedKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[2]) common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) // Not policy violation by itself commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[2]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) // Not policy violation by itself commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[1]) common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) verifier := NewPolicyVerifier(repo) // Verification passes because it's from a non-violating state only currentTip, err := verifier.VerifyRefFromEntry(testCtx, refName, entryID) assert.Nil(t, err) assert.Equal(t, commitIDs[1], currentTip) } func TestVerifyRelativeForRefUsingPersons(t *testing.T) { t.Setenv(tufv02.AllowV02MetadataKey, "1") t.Setenv(dev.DevModeKey, "1") t.Run("no recovery", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) err = verifier.VerifyRelativeForRef(testCtx, entry, firstEntry, refName) assert.ErrorIs(t, err, rsl.ErrRSLEntryNotFound) }) t.Run("no recovery, first entry is the very first entry", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" firstEntry, _, err := rsl.GetFirstEntry(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) err = verifier.VerifyRelativeForRef(testCtx, entry, firstEntry, refName) assert.ErrorIs(t, err, rsl.ErrRSLEntryNotFound) }) t.Run("no recovery, first entry is the very first entry but policy is not applied", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) currentRSLTip, err := repo.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } currentRSLTipParentIDs, err := repo.GetCommitParentIDs(currentRSLTip) if err != nil { t.Fatal(err) } if err := repo.SetReference(rsl.Ref, currentRSLTipParentIDs[0]); err != nil { // Set to parent -> this is policy staging t.Fatal(err) } refName := "refs/heads/main" firstEntry, _, err := rsl.GetFirstEntry(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrPolicyNotFound) }) t.Run("with recovery, commit-same, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, commit-same, recovered by unauthorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgUnauthorizedKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, tree-same, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit's tree validTreeID, err := repo.GetCommitTreeID(validCommitID) if err != nil { t.Fatal(err) } newCommitID, err := repo.CommitUsingSpecificKey(validTreeID, refName, "Revert invalid commit\n", gpgKeyBytes) if err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, newCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, tree-same, recovered by unauthorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit's tree validTreeID, err := repo.GetCommitTreeID(validCommitID) if err != nil { t.Fatal(err) } newCommitID, err := repo.CommitUsingSpecificKey(validTreeID, refName, "Revert invalid commit\n", gpgUnauthorizedKeyBytes) if err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgUnauthorizedKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, newCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, commit-same, multiple invalid entries, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) invalidEntryIDs := []gitinterface.Hash{entryID} commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's still in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) invalidEntryIDs = append(invalidEntryIDs, entryID) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entries annotation := rsl.NewAnnotationEntry(invalidEntryIDs, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, commit-same, unskipped invalid entries, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) invalidEntryIDs := []gitinterface.Hash{entryID} commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's still in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for only one invalid entry annotation := rsl.NewAnnotationEntry(invalidEntryIDs, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // An invalid entry is not marked as skipped verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrInvalidEntryNotSkipped) }) t.Run("with recovery, commit-same, recovered by authorized user, last good state is due to recovery", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) // Send it into invalid state again commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation = rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID = common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, error because recovery goes back too far, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) invalidLastGoodCommitID := commitIDs[len(commitIDs)-1] // Add more commits, change the number of commits to have different trees commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 4, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the invalid last good commit if err := repo.SetReference(refName, invalidLastGoodCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to invalid last good commit entry = rsl.NewReferenceEntry(refName, invalidLastGoodCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("with recovery but recovered entry is also skipped, tree-same, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit's tree validTreeID, err := repo.GetCommitTreeID(validCommitID) if err != nil { t.Fatal(err) } newCommitID, err := repo.CommitUsingSpecificKey(validTreeID, refName, "Revert invalid commit\n", gpgKeyBytes) if err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, newCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) // Skip the recovery entry as well annotation = rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID = common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("with annotation but no fix entry", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicyUsingPersons) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // No fix entry, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) }) } func TestVerifyMergeable(t *testing.T) { refName := "refs/heads/main" featureRefName := "refs/heads/feature" t.Setenv(dev.DevModeKey, "1") t.Setenv(tufv02.AllowV02MetadataKey, "1") t.Run("base commit zero, mergeable using GitHub approval, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(featureRefName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Set up approval attestation with "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit zero, mergeable using mixed approvals, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(featureRefName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Set up approval attestation with "jill.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } // Set up reference authorization from "john.doe" refAuthorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } // This signer is for the SSH keys associated with john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) env, err = dsse.CreateEnvelope(refAuthorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval and reference authorization", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit zero, mergeable using GitHub approval, RSL entry signature not required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(featureRefName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Add approval with "jane.doe" and "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jane.doe", "john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.False(t, rslSignatureRequired) }) t.Run("base commit zero, not mergeable", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(featureRefName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Add approval with "alice" and "bob" // These are untrusted identities githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"alice", "bob"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.ErrorIs(t, err, ErrVerificationFailed) assert.False(t, rslSignatureRequired) }) t.Run("base commit not zero, mergeable using GitHub approval, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureEntry := rsl.NewReferenceEntry(featureRefName, featureCommitIDs[1]) // latest commit featureEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, featureEntry, gpgKeyBytes) featureEntry.ID = featureEntryID commitTreeID, err := repo.GetCommitTreeID(featureCommitIDs[1]) // latest commit if err != nil { t.Fatal(err) } // Set up approval attestation with "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit not zero, mergeable using mixed approvals, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureEntry := rsl.NewReferenceEntry(featureRefName, featureCommitIDs[1]) // latest commit featureEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, featureEntry, gpgKeyBytes) featureEntry.ID = featureEntryID commitTreeID, err := repo.GetCommitTreeID(featureCommitIDs[1]) // latest commit if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Set up approval attestation with "jill.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } // Set up reference authorization from "john.doe" refAuthorization, err := attestations.NewReferenceAuthorizationForCommit(refName, baseCommitIDs[1].String(), commitTreeID.String()) if err != nil { t.Fatal(err) } // This is the key associated with john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) env, err = dsse.CreateEnvelope(refAuthorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval and reference authorization", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit not zero, mergeable using GitHub approval, RSL entry signature not required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureEntry := rsl.NewReferenceEntry(featureRefName, featureCommitIDs[1]) // latest commit featureEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, featureEntry, gpgKeyBytes) featureEntry.ID = featureEntryID commitTreeID, err := repo.GetCommitTreeID(featureCommitIDs[1]) // latest commit if err != nil { t.Fatal(err) } // Add approval with "jane.doe" and "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"john.doe", "jane.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.False(t, rslSignatureRequired) }) t.Run("base commit not zero, not mergeable", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureEntry := rsl.NewReferenceEntry(featureRefName, featureCommitIDs[1]) // latest commit featureEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, featureEntry, gpgKeyBytes) featureEntry.ID = featureEntryID commitTreeID, err := repo.GetCommitTreeID(featureCommitIDs[1]) // latest commit if err != nil { t.Fatal(err) } // Add approval with "alice" and "bob" // These are untrusted approvals githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"alice", "bob"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.ErrorIs(t, err, ErrVerificationFailed) assert.False(t, rslSignatureRequired) }) t.Run("unprotected base branch", func(t *testing.T) { refName := "refs/heads/unprotected" // overriding refName repo, _ := createTestRepository(t, createTestStateWithPolicy) baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } // Set feature to the same commit as base if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureEntry := rsl.NewReferenceEntry(featureRefName, featureCommitIDs[1]) // latest commit featureEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, featureEntry, gpgKeyBytes) featureEntry.ID = featureEntryID verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeable(testCtx, refName, featureRefName) assert.Nil(t, err) assert.False(t, rslSignatureRequired) }) } func TestVerifyMergeableForCommit(t *testing.T) { refName := "refs/heads/main" featureRefName := "refs/heads/feature" t.Run("base commit zero, mergeable using GitHub approval, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) featureID := commitIDs[0] commitTreeID, err := repo.GetCommitTreeID(featureID) if err != nil { t.Fatal(err) } // Set up approval attestation with "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit zero, mergeable using mixed approvals, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) featureID := commitIDs[0] commitTreeID, err := repo.GetCommitTreeID(featureID) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Set up approval attestation with "jill.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } // Set up reference authorization from "john.doe" refAuthorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } // This signer is for the SSH keys associated with john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) env, err = dsse.CreateEnvelope(refAuthorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval and reference authorization", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit zero, mergeable using GitHub approval, RSL entry signature not required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) featureID := commitIDs[0] commitTreeID, err := repo.GetCommitTreeID(featureID) if err != nil { t.Fatal(err) } // Add approval with "jane.doe" and "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jane.doe", "john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.False(t, rslSignatureRequired) }) t.Run("base commit zero, not mergeable", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 1, gpgKeyBytes) featureID := commitIDs[0] commitTreeID, err := repo.GetCommitTreeID(featureID) if err != nil { t.Fatal(err) } // Add approval with "alice" and "bob" // These are untrusted identities githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"alice", "bob"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.ErrorIs(t, err, ErrVerificationFailed) assert.False(t, rslSignatureRequired) }) t.Run("base commit not zero, mergeable using GitHub approval, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureID := featureCommitIDs[1] commitTreeID, err := repo.GetCommitTreeID(featureID) // latest commit if err != nil { t.Fatal(err) } // Set up approval attestation with "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit not zero, mergeable using mixed approvals, RSL entry signature required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureID := featureCommitIDs[1] commitTreeID, err := repo.GetCommitTreeID(featureID) // latest commit if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Set up approval attestation with "jill.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } // Set up reference authorization from "john.doe" refAuthorization, err := attestations.NewReferenceAuthorizationForCommit(refName, baseCommitIDs[1].String(), commitTreeID.String()) if err != nil { t.Fatal(err) } // This is the key associated with john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) env, err = dsse.CreateEnvelope(refAuthorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval and reference authorization", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.True(t, rslSignatureRequired) }) t.Run("base commit not zero, mergeable using GitHub approval, RSL entry signature not required", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureID := featureCommitIDs[1] commitTreeID, err := repo.GetCommitTreeID(featureID) // latest commit if err != nil { t.Fatal(err) } // Add approval with "jane.doe" and "john.doe" githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"john.doe", "jane.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.False(t, rslSignatureRequired) }) t.Run("base commit not zero, not mergeable", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } repo.RestoreWorktree(t) // Set feature to the same commit as main if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureID := featureCommitIDs[1] commitTreeID, err := repo.GetCommitTreeID(featureID) // latest commit if err != nil { t.Fatal(err) } // Add approval with "alice" and "bob" // These are untrusted approvals githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, baseCommitIDs[1].String(), commitTreeID.String(), []string{"alice", "bob"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, baseCommitIDs[1].String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.ErrorIs(t, err, ErrVerificationFailed) assert.False(t, rslSignatureRequired) }) t.Run("unprotected base branch", func(t *testing.T) { refName := "refs/heads/unprotected" // overriding refName repo, _ := createTestRepository(t, createTestStateWithPolicy) // We need to change the directory for this test because we `checkout` // for older Git versions, modifying the worktree. This chdir ensures // that the temporary directory is used as the worktree. pwd, err := os.Getwd() if err != nil { t.Fatal(err) } if err := os.Chdir(filepath.Join(repo.GetGitDir(), "..")); err != nil { t.Fatal(err) } defer os.Chdir(pwd) //nolint:errcheck baseCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) baseEntry := rsl.NewReferenceEntry(refName, baseCommitIDs[1]) baseEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, baseEntry, gpgKeyBytes) baseEntry.ID = baseEntryID if err := repo.SetReference("HEAD", baseCommitIDs[1]); err != nil { t.Fatal(err) } // Set feature to the same commit as base if err := repo.SetReference(featureRefName, baseCommitIDs[1]); err != nil { t.Fatal(err) } featureCommitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, featureRefName, 2, gpgKeyBytes) featureID := featureCommitIDs[1] repo.RestoreWorktree(t) verifier := NewPolicyVerifier(repo) rslSignatureRequired, err := verifier.VerifyMergeableForCommit(testCtx, refName, featureID) assert.Nil(t, err) assert.False(t, rslSignatureRequired) }) } func TestVerifyRelativeForRef(t *testing.T) { t.Run("no recovery", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) err = verifier.VerifyRelativeForRef(testCtx, entry, firstEntry, refName) assert.ErrorIs(t, err, rsl.ErrRSLEntryNotFound) }) t.Run("no recovery, first entry is the very first entry", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" firstEntry, _, err := rsl.GetFirstEntry(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) err = verifier.VerifyRelativeForRef(testCtx, entry, firstEntry, refName) assert.ErrorIs(t, err, rsl.ErrRSLEntryNotFound) }) t.Run("no recovery, first entry is the very first entry but policy is not applied", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) currentRSLTip, err := repo.GetReference(rsl.Ref) if err != nil { t.Fatal(err) } currentRSLTipParentIDs, err := repo.GetCommitParentIDs(currentRSLTip) if err != nil { t.Fatal(err) } if err := repo.SetReference(rsl.Ref, currentRSLTipParentIDs[0]); err != nil { // Set to parent -> this is policy staging t.Fatal(err) } refName := "refs/heads/main" firstEntry, _, err := rsl.GetFirstEntry(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrPolicyNotFound) }) t.Run("with recovery, commit-same, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, commit-same, recovered by unauthorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgUnauthorizedKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, tree-same, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit's tree validTreeID, err := repo.GetCommitTreeID(validCommitID) if err != nil { t.Fatal(err) } newCommitID, err := repo.CommitUsingSpecificKey(validTreeID, refName, "Revert invalid commit\n", gpgKeyBytes) if err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, newCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, tree-same, recovered by unauthorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit's tree validTreeID, err := repo.GetCommitTreeID(validCommitID) if err != nil { t.Fatal(err) } newCommitID, err := repo.CommitUsingSpecificKey(validTreeID, refName, "Revert invalid commit\n", gpgUnauthorizedKeyBytes) if err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgUnauthorizedKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, newCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, commit-same, multiple invalid entries, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) invalidEntryIDs := []gitinterface.Hash{entryID} commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's still in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) invalidEntryIDs = append(invalidEntryIDs, entryID) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entries annotation := rsl.NewAnnotationEntry(invalidEntryIDs, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, commit-same, unskipped invalid entries, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) invalidEntryIDs := []gitinterface.Hash{entryID} commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's still in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for only one invalid entry annotation := rsl.NewAnnotationEntry(invalidEntryIDs, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // An invalid entry is not marked as skipped verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrInvalidEntryNotSkipped) }) t.Run("with recovery, commit-same, recovered by authorized user, last good state is due to recovery", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) // Send it into invalid state again commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit if err := repo.SetReference(refName, validCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation = rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID = common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, validCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) }) t.Run("with recovery, error because recovery goes back too far, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) invalidLastGoodCommitID := commitIDs[len(commitIDs)-1] // Add more commits, change the number of commits to have different trees commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 4, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the invalid last good commit if err := repo.SetReference(refName, invalidLastGoodCommitID); err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to invalid last good commit entry = rsl.NewReferenceEntry(refName, invalidLastGoodCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("with recovery but recovered entry is also skipped, tree-same, recovered by authorized user", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) validCommitID := commitIDs[0] // track this for later commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Fix using the known-good commit's tree validTreeID, err := repo.GetCommitTreeID(validCommitID) if err != nil { t.Fatal(err) } newCommitID, err := repo.CommitUsingSpecificKey(validTreeID, refName, "Revert invalid commit\n", gpgKeyBytes) if err != nil { t.Fatal(err) } // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // Create a new entry moving branch back to valid commit entry = rsl.NewReferenceEntry(refName, newCommitID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // No error anymore verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) // Skip the recovery entry as well annotation = rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID = common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("with annotation but no fix entry", func(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID verifier := NewPolicyVerifier(repo) err := verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.Nil(t, err) commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgUnauthorizedKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgUnauthorizedKeyBytes) entry.ID = entryID // It's in an invalid state right now, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) // Create a skip annotation for the invalid entry annotation := rsl.NewAnnotationEntry([]gitinterface.Hash{entryID}, true, "invalid entry") annotationID := common.CreateTestRSLAnnotationEntryCommit(t, repo, annotation, gpgKeyBytes) annotation.ID = annotationID // No fix entry, error out verifier = NewPolicyVerifier(repo) err = verifier.VerifyRelativeForRef(testCtx, firstEntry, entry, refName) assert.ErrorIs(t, err, ErrVerificationFailed) }) } func TestVerifyEntry(t *testing.T) { refName := "refs/heads/main" t.Run("successful verification", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithPolicy) commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err := verifyEntry(testCtx, repo, state, nil, entry) assert.Nil(t, err) }) t.Run("successful verification using persons", func(t *testing.T) { t.Setenv(tufv02.AllowV02MetadataKey, "1") t.Setenv(dev.DevModeKey, "1") repo, state := createTestRepository(t, createTestStateWithPolicyUsingPersons) commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err := verifyEntry(testCtx, repo, state, nil, entry) assert.Nil(t, err) }) t.Run("successful verification with higher threshold using v0.1 reference authorization", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicy) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create authorization for this change // We're explicitly using the old type here to ensure policy // verification still works authorization, err := authorizationsv01.NewReferenceAuthorization(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) }) t.Run("successful verification with higher threshold using latest reference authorization", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicy) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create authorization for this change // This uses the latest reference authorization version authorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) }) t.Run("successful verification with higher threshold but using GitHub approval", func(t *testing.T) { t.Setenv(dev.DevModeKey, "1") t.Setenv(tufv02.AllowV02MetadataKey, "1") repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // This is using the jane.doe signer commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create authorization for this change using john.doe trusted as approver githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) }) t.Run("unsuccessful verification with higher threshold but using GitHub approval due to invalid app key", func(t *testing.T) { t.Setenv(dev.DevModeKey, "1") t.Setenv(tufv02.AllowV02MetadataKey, "1") repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // This is using the jane.doe signer commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create authorization for this change using john.doe trusted as approver githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is NOT trusted in the root setup by // the policy state creator helper signer := setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("successful verification with higher threshold but using GitHub approval and reference authorization v0.2", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // This is the jane.doe principal commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Approved by jill.doe githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } // Add reference authorization for john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) authorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } env, err = dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) }) t.Run("unsuccessful verification with higher threshold but using GitHub approval from untrusted key and reference authorization v0.2", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // This is the jane.doe principal commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Approved by jill.doe githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is NOT trusted in the root setup by // the policy state creator helper signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } // Add reference authorization for john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) authorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } env, err = dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("unsuccessful verification with higher threshold but using GitHub approval", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrust) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create approval for jill.doe -> NOT TRUSTED in this policy githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"jill.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("unsuccessful verification with higher threshold when a person signs reference authorization and uses GitHub approval", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithThresholdPolicyAndGitHubAppTrustForMixedAttestations) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create approval for john.doe githubAppApproval, err := attestations.NewGitHubPullRequestApprovalAttestation(refName, gitinterface.ZeroHash.String(), commitTreeID.String(), []string{"john.doe"}, nil) if err != nil { t.Fatal(err) } // This signer for the GitHub app is trusted in the root setup by the // policy state creator helper signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(githubAppApproval) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetGitHubPullRequestApprovalAttestation(repo, env, "https://github.com", 1, state.githubAppRoleName, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add GitHub pull request approval", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Add reference authorization for john.doe signer = setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) authorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } env, err = dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // We have an RSL signature from jane.doe, a GitHub approval from // john.doe and a reference authorization from john.doe // Insufficient to meet threshold 3 err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("successful verification with global threshold constraint", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithGlobalConstraintThreshold) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create authorization for this change // This uses the latest reference authorization version authorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, rootKeyBytes, rootPubKeyBytes) // this is trusted in the global constraint state creator env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) }) t.Run("unsuccessful verification with global threshold constraint", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithGlobalConstraintThreshold) currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) commitTreeID, err := repo.GetCommitTreeID(commitIDs[0]) if err != nil { t.Fatal(err) } // Create authorization for this change // This uses the latest reference authorization version authorization, err := attestations.NewReferenceAuthorizationForCommit(refName, gitinterface.ZeroHash.String(), commitTreeID.String()) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) // this is NOT trusted in the global constraint state creator env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, refName, gitinterface.ZeroHash.String(), commitTreeID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } entry := rsl.NewReferenceEntry(refName, commitIDs[0]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("verify block force pushes rule for protected ref", func(t *testing.T) { repo, state := createTestRepository(t, createTestStateWithGlobalConstraintBlockForcePushes) commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Only one entry, this is fine err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) // Add more entries commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // Still fine err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) // Rewrite history altogether // Delete ref if err := repo.SetReference(refName, gitinterface.ZeroHash); err != nil { t.Fatal(err) } // Switch up the key for good measure commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, rootKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, rootKeyBytes) entry.ID = entryID // Not fine err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("verify block force pushes rule for unprotected ref", func(t *testing.T) { refName := "refs/heads/feature" repo, state := createTestRepository(t, createTestStateWithGlobalConstraintBlockForcePushes) commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } // Only one entry, this is fine err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) // Add more entries commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 1, gpgKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[0]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID // Still fine err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) // Rewrite history altogether // Delete ref if err := repo.SetReference(refName, gitinterface.ZeroHash); err != nil { t.Fatal(err) } // Switch up the key for good measure commitIDs = common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 2, rootKeyBytes) entry = rsl.NewReferenceEntry(refName, commitIDs[1]) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, rootKeyBytes) entry.ID = entryID // Still fine; this ref is not protected err = verifyEntry(testCtx, repo, state, currentAttestations, entry) assert.Nil(t, err) }) } func TestVerifyTagEntry(t *testing.T) { t.Run("no tag specific policy", func(t *testing.T) { repo, policy := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID tagName := "v1" tagID := common.CreateTestSignedTag(t, repo, tagName, commitIDs[len(commitIDs)-1], gpgKeyBytes) entry = rsl.NewReferenceEntry(gitinterface.TagReferenceName(tagName), tagID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err := verifyTagEntry(testCtx, repo, policy, nil, entry) assert.Nil(t, err) }) t.Run("with tag specific policy", func(t *testing.T) { repo, policy := createTestRepository(t, createTestStateWithTagPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID tagName := "v1" tagID := common.CreateTestSignedTag(t, repo, tagName, commitIDs[len(commitIDs)-1], gpgKeyBytes) entry = rsl.NewReferenceEntry(gitinterface.TagReferenceName(tagName), tagID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err := verifyTagEntry(testCtx, repo, policy, nil, entry) assert.Nil(t, err) }) t.Run("with threshold tag specific policy", func(t *testing.T) { repo, policy := createTestRepository(t, createTestStateWithThresholdTagPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID tagName := "v1" tagRefName := "refs/tags/v1" // Create authorization for this change // This uses the latest reference authorization version // As this is for a tag, the target is the commit the tag points to, // taken from the RSL entry we just created for it authorization, err := attestations.NewReferenceAuthorizationForTag(tagRefName, gitinterface.ZeroHash.String(), entry.TargetID.String()) if err != nil { t.Fatal(err) } signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, tagRefName, gitinterface.ZeroHash.String(), entry.TargetID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } tagID := common.CreateTestSignedTag(t, repo, tagName, commitIDs[len(commitIDs)-1], gpgKeyBytes) entry = rsl.NewReferenceEntry(gitinterface.TagReferenceName(tagName), tagID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyTagEntry(testCtx, repo, policy, currentAttestations, entry) assert.Nil(t, err) }) t.Run("with tag specific policy, unauthorized", func(t *testing.T) { repo, policy := createTestRepository(t, createTestStateWithTagPolicyForUnauthorizedTest) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID tagName := "v1" tagID := common.CreateTestSignedTag(t, repo, tagName, commitIDs[len(commitIDs)-1], gpgKeyBytes) entry = rsl.NewReferenceEntry(gitinterface.TagReferenceName(tagName), tagID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err := verifyTagEntry(testCtx, repo, policy, nil, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) t.Run("with threshold tag specific policy, unauthorized", func(t *testing.T) { repo, policy := createTestRepository(t, createTestStateWithThresholdTagPolicy) refName := "refs/heads/main" commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 3, gpgKeyBytes) entry := rsl.NewReferenceEntry(refName, commitIDs[len(commitIDs)-1]) entryID := common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID tagName := "v1" tagRefName := "refs/tags/v1" // Create authorization for this change // This uses the latest reference authorization version // As this is for a tag, the target is the commit the tag points to, // taken from the RSL entry we just created for it authorization, err := attestations.NewReferenceAuthorizationForTag(tagRefName, gitinterface.ZeroHash.String(), entry.TargetID.String()) if err != nil { t.Fatal(err) } // The policy expects targets1Key but we're signing with targets2Key signer := setupSSHKeysForSigning(t, targets2KeyBytes, targets2PubKeyBytes) env, err := dsse.CreateEnvelope(authorization) if err != nil { t.Fatal(err) } env, err = dsse.SignEnvelope(testCtx, env, signer) if err != nil { t.Fatal(err) } currentAttestations, err := attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } if err := currentAttestations.SetReferenceAuthorization(repo, env, tagRefName, gitinterface.ZeroHash.String(), entry.TargetID.String()); err != nil { t.Fatal(err) } if err := currentAttestations.Commit(repo, "Add authorization", false); err != nil { t.Fatal(err) } currentAttestations, err = attestations.LoadCurrentAttestations(repo) if err != nil { t.Fatal(err) } tagID := common.CreateTestSignedTag(t, repo, tagName, commitIDs[len(commitIDs)-1], gpgKeyBytes) entry = rsl.NewReferenceEntry(gitinterface.TagReferenceName(tagName), tagID) entryID = common.CreateTestRSLReferenceEntryCommit(t, repo, entry, gpgKeyBytes) entry.ID = entryID err = verifyTagEntry(testCtx, repo, policy, currentAttestations, entry) assert.ErrorIs(t, err, ErrVerificationFailed) }) } func TestGetCommits(t *testing.T) { repo, _ := createTestRepository(t, createTestStateWithPolicy) refName := "refs/heads/main" // FIXME: this setup with RSL entries can be formalized using another // helper like createTestStateWithPolicy. The RSL could then also // incorporate policy changes and so on. commitIDs := common.AddNTestCommitsToSpecifiedRef(t, repo, refName, 5, gpgKeyBytes) firstEntry := rsl.NewReferenceEntry(refName, commitIDs[0]) firstEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, firstEntry, gpgKeyBytes) firstEntry.ID = firstEntryID secondEntry := rsl.NewReferenceEntry(refName, commitIDs[4]) secondEntryID := common.CreateTestRSLReferenceEntryCommit(t, repo, secondEntry, gpgKeyBytes) secondEntry.ID = secondEntryID expectedCommitIDs := []gitinterface.Hash{commitIDs[1], commitIDs[2], commitIDs[3], commitIDs[4]} sort.Slice(expectedCommitIDs, func(i, j int) bool { return expectedCommitIDs[i].String() < expectedCommitIDs[j].String() }) commitIDs, err := getCommits(repo, secondEntry) assert.Nil(t, err) assert.Equal(t, expectedCommitIDs, commitIDs) } func TestStateVerifyNewState(t *testing.T) { t.Parallel() t.Run("valid policy transition", func(t *testing.T) { t.Parallel() currentPolicy := createTestStateWithOnlyRoot(t) newPolicy := createTestStateWithOnlyRoot(t) err := currentPolicy.VerifyNewState(testCtx, newPolicy) assert.Nil(t, err) }) t.Run("invalid policy transition", func(t *testing.T) { t.Parallel() currentPolicy := createTestStateWithOnlyRoot(t) // Create invalid state signer := setupSSHKeysForSigning(t, targets1KeyBytes, targets1PubKeyBytes) key := tufv01.NewKeyFromSSLibKey(signer.MetadataKey()) rootMetadata, err := InitializeRootMetadata(key) if err != nil { t.Fatal(err) } rootEnv, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } rootEnv, err = dsse.SignEnvelope(testCtx, rootEnv, signer) if err != nil { t.Fatal(err) } newPolicy := &State{ RootPublicKeys: []tuf.Principal{key}, RootEnvelope: rootEnv, DelegationEnvelopes: map[string]*sslibdsse.Envelope{}, } err = currentPolicy.VerifyNewState(testCtx, newPolicy) assert.ErrorIs(t, err, ErrVerifierConditionsUnmet) }) } gittuf-0.9.0/internal/rsl/000077500000000000000000000000001475150141000154345ustar00rootroot00000000000000gittuf-0.9.0/internal/rsl/cache.go000066400000000000000000000020311475150141000170220ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl import "github.com/gittuf/gittuf/internal/gitinterface" type rslCache struct { entryCache map[string]Entry parentCache map[string]string } func (r *rslCache) getEntry(id gitinterface.Hash) (Entry, bool) { entry, has := r.entryCache[id.String()] return entry, has } func (r *rslCache) setEntry(id gitinterface.Hash, entry Entry) { r.entryCache[id.String()] = entry } func (r *rslCache) getParent(id gitinterface.Hash) (gitinterface.Hash, bool, error) { parentID, has := r.parentCache[id.String()] if !has { return nil, false, nil } parentIDHash, err := gitinterface.NewHash(parentID) if err != nil { return nil, false, err } return parentIDHash, true, nil } func (r *rslCache) setParent(id, parentID gitinterface.Hash) { r.parentCache[id.String()] = parentID.String() } var cache *rslCache func newRSLCache() { cache = &rslCache{ entryCache: map[string]Entry{}, parentCache: map[string]string{}, } } func init() { newRSLCache() } gittuf-0.9.0/internal/rsl/cache_test.go000066400000000000000000000031501475150141000200640ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl import ( "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/stretchr/testify/assert" ) func TestRSLCache(t *testing.T) { // Add test entries // Using fake hashes (these are commits in the gittuf repo itself) entry1 := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash) entry1.Number = 1 hash, err := gitinterface.NewHash("4dcd174e182cedf597b8a84f24ea5a53dae7e1e7") if err != nil { t.Fatal(err) } entry1.ID = hash entry2 := NewReferenceEntry("refs/heads/feature", gitinterface.ZeroHash) entry2.Number = 2 hash, err = gitinterface.NewHash("5bf80ffecacfde7e6b8281e65223b139a76160e1") if err != nil { t.Fatal(err) } entry2.ID = hash // Nothing yet in the parent cache assert.Empty(t, cache.parentCache) // Test set and get for parent-child cache cache.setParent(entry2.ID, entry1.ID) assert.Equal(t, entry1.ID.String(), cache.parentCache[entry2.ID.String()]) assert.Equal(t, 1, len(cache.parentCache)) parentID, has, err := cache.getParent(entry2.ID) assert.Nil(t, err) assert.Equal(t, entry1.ID.String(), parentID.String()) assert.True(t, has) _, has, err = cache.getParent(entry1.ID) // not in cache assert.Nil(t, err) assert.False(t, has) // Nothing yet in the entry cache assert.Empty(t, cache.entryCache) // Test set and get for entry cache cache.setEntry(entry1.ID, entry1) assert.Equal(t, entry1, cache.entryCache[entry1.ID.String()]) assert.Equal(t, 1, len(cache.entryCache)) entry, has := cache.getEntry(entry1.ID) assert.Equal(t, entry1, entry) assert.True(t, has) } gittuf-0.9.0/internal/rsl/options.go000066400000000000000000000062341475150141000174630ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl import "github.com/gittuf/gittuf/internal/gitinterface" type GetLatestReferenceUpdaterEntryOptions struct { Reference string BeforeEntryID gitinterface.Hash BeforeEntryNumber uint64 UntilEntryID gitinterface.Hash UntilEntryNumber uint64 Unskipped bool NonGittuf bool IsReferenceEntry bool } type GetLatestReferenceUpdaterEntryOption func(*GetLatestReferenceUpdaterEntryOptions) // ForReference indicates that the reference entry returned must be for a // specific Git reference. func ForReference(reference string) GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.Reference = reference } } // BeforeEntryID searches for the matching reference entry before the specified // entry ID. It cannot be used in combination with BeforeEntryNumber. // BeforeEntryID is exclusive: the returned entry cannot be the reference entry // that matches the specified ID. func BeforeEntryID(entryID gitinterface.Hash) GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.BeforeEntryID = entryID } } // BeforeEntryNumber searches for the matching reference entry before the // specified entry number. It cannot be used in combination with BeforeEntryID. // BeforeEntryNumber is exclusive: the returned entry cannot be the reference // entry that matches the specified number. func BeforeEntryNumber(number uint64) GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.BeforeEntryNumber = number } } // UntilEntryID terminates the search for the desired reference entry when an // entry with the specified ID is encountered. It cannot be used in combination // with UntilEntryNumber. UntilEntryID is inclusive: the returned entry can be // the entry that matches the specified ID. func UntilEntryID(entryID gitinterface.Hash) GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.UntilEntryID = entryID } } // UntilEntryNumber terminates the search for the desired reference entry when // an entry with the specified number is encountered. It cannot be used in // combination with UntilEntryID. UntilEntryNumber is inclusive: the returned // entry can be the entry that matches the specified number. func UntilEntryNumber(number uint64) GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.UntilEntryNumber = number } } // IsUnskipped ensures that the returned reference entry has not been skipped by // a subsequent annotation entry. func IsUnskipped() GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.Unskipped = true } } // ForNonGittufReference ensures that the returned reference entry is not for a // gittuf-specific reference. func ForNonGittufReference() GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.NonGittuf = true } } func IsReferenceEntry() GetLatestReferenceUpdaterEntryOption { return func(o *GetLatestReferenceUpdaterEntryOptions) { o.IsReferenceEntry = true } } gittuf-0.9.0/internal/rsl/rsl.go000066400000000000000000001206541475150141000165730ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl import ( "encoding/pem" "errors" "fmt" "log/slog" "strconv" "strings" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/tuf" ) const ( Ref = "refs/gittuf/reference-state-log" NumberKey = "number" ReferenceEntryHeader = "RSL Reference Entry" RefKey = "ref" TargetIDKey = "targetID" AnnotationEntryHeader = "RSL Annotation Entry" AnnotationMessageBlockType = "MESSAGE" BeginMessage = "-----BEGIN MESSAGE-----" EndMessage = "-----END MESSAGE-----" EntryIDKey = "entryID" SkipKey = "skip" PropagationEntryHeader = "RSL Propagation Entry" UpstreamRepositoryKey = "upstreamRepository" UpstreamEntryIDKey = "upstreamEntryID" remoteTrackerRef = "refs/remotes/%s/gittuf/reference-state-log" gittufNamespacePrefix = "refs/gittuf/" gittufPolicyStagingRef = "refs/gittuf/policy-staging" ) var ( ErrRSLEntryNotFound = errors.New("unable to find RSL entry") ErrRSLBranchDetected = errors.New("potential RSL branch detected, entry has more than one parent") ErrInvalidRSLEntry = errors.New("RSL entry has invalid format or is of unexpected type") ErrRSLEntryDoesNotMatchRef = errors.New("RSL entry does not match requested ref") ErrNoRecordOfCommit = errors.New("commit has not been encountered before") ErrInvalidGetLatestReferenceUpdaterEntryOptions = errors.New("invalid options presented for getting latest reference updater entry (are both before or until conditions set or is the before number less than the until number?)") ErrCannotUseEntryNumberFilter = errors.New("current RSL entries are not numbered, cannot use number range options") ErrInvalidUntilEntryNumberCondition = errors.New("cannot meet until entry number condition") ) // RemoteTrackerRef returns the remote tracking ref for the specified remote // name. For example, for 'origin', the remote tracker ref is // 'refs/remotes/origin/gittuf/reference-state-log'. func RemoteTrackerRef(remote string) string { return fmt.Sprintf(remoteTrackerRef, remote) } // Entry is the abstract representation of an object in the RSL. type Entry interface { GetID() gitinterface.Hash Commit(*gitinterface.Repository, bool) error GetNumber() uint64 createCommitMessage(bool) (string, error) } // ReferenceUpdaterEntry represents RSL entry types that can record an update to // a Git reference. Some examples are the reference entry and the propagation // entry. type ReferenceUpdaterEntry interface { Entry GetRefName() string GetTargetID() gitinterface.Hash } // ReferenceEntry represents a record of a reference state in the RSL. It // implements the Entry interface. type ReferenceEntry struct { // ID contains the Git hash for the commit corresponding to the entry. ID gitinterface.Hash // RefName contains the Git reference the entry is for. RefName string // TargetID contains the Git hash for the object expected at RefName. TargetID gitinterface.Hash // Number contains a strictly increasing number that hints at entry ordering. Number uint64 } // NewReferenceEntry returns a ReferenceEntry object for a normal RSL entry. func NewReferenceEntry(refName string, targetID gitinterface.Hash) *ReferenceEntry { return &ReferenceEntry{RefName: refName, TargetID: targetID} } func (e *ReferenceEntry) GetID() gitinterface.Hash { return e.ID } func (e *ReferenceEntry) GetRefName() string { return e.RefName } func (e *ReferenceEntry) GetTargetID() gitinterface.Hash { return e.TargetID } // Commit creates a commit object in the RSL for the ReferenceEntry. The // function looks up the latest committed entry in the RSL and increments the // number in the new entry. If a parent entry does not exist or the parent // entry's number is 0 (unset), the current entry's number is set to 1. The // numbering starts from 1 as 0 is used to signal the lack of numbering. func (e *ReferenceEntry) Commit(repo *gitinterface.Repository, sign bool) error { if err := e.setEntryNumber(repo); err != nil { return err } message, _ := e.createCommitMessage(true) // we have an error return for annotations, always nil here emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.Commit(emptyTreeID, Ref, message, sign) return err } // CommitUsingSpecificKey creates a commit object in the RSL for the // ReferenceEntry. The commit is signed using the provided PEM encoded SSH or // GPG private key. This is only intended for use in gittuf's developer mode or // in tests. The function looks up the latest committed entry in the RSL and // increments the number in the new entry. If a parent entry does not exist or // the parent entry's number is 0 (unset), the current entry's number is set to // 1. The numbering starts from 1 as 0 is used to signal the lack of numbering. func (e *ReferenceEntry) CommitUsingSpecificKey(repo *gitinterface.Repository, signingKeyBytes []byte) error { if err := e.setEntryNumber(repo); err != nil { return err } message, _ := e.createCommitMessage(true) // we have an error return for annotations, always nil here emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.CommitUsingSpecificKey(emptyTreeID, Ref, message, signingKeyBytes) return err } func (e *ReferenceEntry) GetNumber() uint64 { return e.Number } // Skipped returns true if any of the annotations mark the entry as // to-be-skipped. func (e *ReferenceEntry) SkippedBy(annotations []*AnnotationEntry) bool { for _, annotation := range annotations { if annotation.RefersTo(e.ID) && annotation.Skip { return true } } return false } func (e *ReferenceEntry) setEntryNumber(repo *gitinterface.Repository) error { latestEntry, err := GetLatestEntry(repo) if err == nil { e.Number = latestEntry.GetNumber() + 1 } else { if errors.Is(err, ErrRSLEntryNotFound) { // First entry e.Number = 1 } else { return err } } return nil } func (e *ReferenceEntry) createCommitMessage(includeNumber bool) (string, error) { lines := []string{ ReferenceEntryHeader, "", fmt.Sprintf("%s: %s", RefKey, e.RefName), fmt.Sprintf("%s: %s", TargetIDKey, e.TargetID.String()), } if includeNumber && e.Number > 0 { lines = append(lines, fmt.Sprintf("%s: %d", NumberKey, e.Number)) } return strings.Join(lines, "\n"), nil } // commitWithoutNumber is used to test the RSL's support for entry numbers in // repositories that switch from not having numbered entries to having numbered // entries. func (e *ReferenceEntry) commitWithoutNumber(repo *gitinterface.Repository) error { message, _ := e.createCommitMessage(true) // we have an error return for annotations, always nil here emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.Commit(emptyTreeID, Ref, message, false) return err } // AnnotationEntry is a type of RSL record that references prior items in the // RSL. It can be used to add extra information for the referenced items. // Annotations can also be used to "skip", i.e. revoke, the referenced items. It // implements the Entry interface. type AnnotationEntry struct { // ID contains the Git hash for the commit corresponding to the annotation. ID gitinterface.Hash // RSLEntryIDs contains one or more Git hashes for the RSL entries the annotation applies to. RSLEntryIDs []gitinterface.Hash // Skip indicates if the RSLEntryIDs must be skipped during gittuf workflows. Skip bool // Message contains any messages or notes added by a user for the annotation. Message string // Number contains a strictly increasing number that hints at entry ordering. Number uint64 } // NewAnnotationEntry returns an Annotation object that applies to one or more // prior RSL entries. func NewAnnotationEntry(rslEntryIDs []gitinterface.Hash, skip bool, message string) *AnnotationEntry { return &AnnotationEntry{RSLEntryIDs: rslEntryIDs, Skip: skip, Message: message} } func (a *AnnotationEntry) GetID() gitinterface.Hash { return a.ID } // Commit creates a commit object in the RSL for the Annotation. The function // looks up the latest committed entry in the RSL and increments the number in // the new entry. If a parent entry does not exist or the parent entry's number // is 0 (unset), the current entry's number is set to 1. The numbering starts // from 1 as 0 is used to signal the lack of numbering. func (a *AnnotationEntry) Commit(repo *gitinterface.Repository, sign bool) error { // Check if referred entries exist in the RSL namespace. for _, id := range a.RSLEntryIDs { if _, err := GetEntry(repo, id); err != nil { return err } } if err := a.setEntryNumber(repo); err != nil { return err } message, err := a.createCommitMessage(true) if err != nil { return err } emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.Commit(emptyTreeID, Ref, message, sign) return err } // CommitUsingSpecificKey creates a commit object in the RSL for the // AnnotationEntry. The commit is signed using the provided PEM encoded SSH or // GPG private key. This is only intended for use in gittuf's developer mode or // in tests. The function looks up the latest committed entry in the RSL and // increments the number in the new entry. If a parent entry does not exist or // the parent entry's number is 0 (unset), the current entry's number is set to // 1. The numbering starts from 1 as 0 is used to signal the lack of numbering. func (a *AnnotationEntry) CommitUsingSpecificKey(repo *gitinterface.Repository, signingKeyBytes []byte) error { // Check if referred entries exist in the RSL namespace. for _, id := range a.RSLEntryIDs { if _, err := GetEntry(repo, id); err != nil { return err } } if err := a.setEntryNumber(repo); err != nil { return err } message, err := a.createCommitMessage(true) if err != nil { return err } emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.CommitUsingSpecificKey(emptyTreeID, Ref, message, signingKeyBytes) return err } func (a *AnnotationEntry) GetNumber() uint64 { return a.Number } // RefersTo returns true if the specified entryID is referred to by the // annotation. func (a *AnnotationEntry) RefersTo(entryID gitinterface.Hash) bool { for _, id := range a.RSLEntryIDs { if id.Equal(entryID) { return true } } return false } func (a *AnnotationEntry) setEntryNumber(repo *gitinterface.Repository) error { latestEntry, err := GetLatestEntry(repo) if err == nil { a.Number = latestEntry.GetNumber() + 1 } else { if errors.Is(err, ErrRSLEntryNotFound) { // First entry -> can an annotation actually be first? TODO a.Number = 1 } else { return err } } return err } func (a *AnnotationEntry) createCommitMessage(includeNumber bool) (string, error) { lines := []string{ AnnotationEntryHeader, "", } for _, entry := range a.RSLEntryIDs { lines = append(lines, fmt.Sprintf("%s: %s", EntryIDKey, entry.String())) } if a.Skip { lines = append(lines, fmt.Sprintf("%s: true", SkipKey)) } else { lines = append(lines, fmt.Sprintf("%s: false", SkipKey)) } if includeNumber && a.Number > 0 { lines = append(lines, fmt.Sprintf("%s: %d", NumberKey, a.Number)) } if len(a.Message) != 0 { var message strings.Builder messageBlock := pem.Block{ Type: AnnotationMessageBlockType, Bytes: []byte(a.Message), } if err := pem.Encode(&message, &messageBlock); err != nil { return "", err } lines = append(lines, strings.TrimSpace(message.String())) } return strings.Join(lines, "\n"), nil } // commitWithoutNumber is used to test the RSL's support for entry numbers in // repositories that switch from not having numbered entries to having numbered // entries. func (a *AnnotationEntry) commitWithoutNumber(repo *gitinterface.Repository) error { // Check if referred entries exist in the RSL namespace. for _, id := range a.RSLEntryIDs { if _, err := GetEntry(repo, id); err != nil { return err } } message, err := a.createCommitMessage(true) if err != nil { return err } emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.Commit(emptyTreeID, Ref, message, false) return err } // PropagationEntry represents a record of execution of gittuf's repository // propagation workflow. It indicates which reference was updated with an // upstream repository's contents, as well as details about the upstream // repository such as its location and the specific entry whose contents were // propagated. type PropagationEntry struct { // ID contains the Git hash for the commit corresponding to the entry. ID gitinterface.Hash // RefName contains the Git reference the entry is for. RefName string // TargetID contains the Git hash for the object expected at RefName. TargetID gitinterface.Hash // UpstreamRepository records the location of the upstream repository. UpstreamRepository string // UpstreamEntryID records the upstream repository's RSL entry ID whose // contents were propagated. UpstreamEntryID gitinterface.Hash // Number contains a strictly increasing number that hints at entry ordering. Number uint64 } func NewPropagationEntry(refName string, targetID gitinterface.Hash, upstreamRepository string, upstreamEntryID gitinterface.Hash) *PropagationEntry { return &PropagationEntry{ RefName: refName, TargetID: targetID, UpstreamRepository: upstreamRepository, UpstreamEntryID: upstreamEntryID, } } func (e *PropagationEntry) GetID() gitinterface.Hash { return e.ID } func (e *PropagationEntry) GetRefName() string { return e.RefName } func (e *PropagationEntry) GetTargetID() gitinterface.Hash { return e.TargetID } // Commit creates a commit object in the RSL for the PropagationEntry. The // function looks up the latest committed entry in the RSL and increments the // number in the new entry. If a parent entry does not exist or the parent // entry's number is 0 (unset), the current entry's number is set to 1. The // numbering starts from 1 as 0 is used to signal the lack of numbering. func (e *PropagationEntry) Commit(repo *gitinterface.Repository, sign bool) error { if err := e.setEntryNumber(repo); err != nil { return err } message, _ := e.createCommitMessage(true) // we have an error return for annotations, always nil here emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.Commit(emptyTreeID, Ref, message, sign) return err } // CommitUsingSpecificKey creates a commit object in the RSL for the // PropagationEntry. The commit is signed using the provided PEM encoded SSH or // GPG private key. This is only intended for use in gittuf's developer mode or // in tests. The function looks up the latest committed entry in the RSL and // increments the number in the new entry. If a parent entry does not exist or // the parent entry's number is 0 (unset), the current entry's number is set to // 1. The numbering starts from 1 as 0 is used to signal the lack of numbering. func (e *PropagationEntry) CommitUsingSpecificKey(repo *gitinterface.Repository, signingKeyBytes []byte) error { if err := e.setEntryNumber(repo); err != nil { return err } message, _ := e.createCommitMessage(true) // we have an error return for annotations, always nil here emptyTreeID, err := repo.EmptyTree() if err != nil { return err } _, err = repo.CommitUsingSpecificKey(emptyTreeID, Ref, message, signingKeyBytes) return err } func (e PropagationEntry) GetNumber() uint64 { return e.Number } func (e *PropagationEntry) setEntryNumber(repo *gitinterface.Repository) error { latestEntry, err := GetLatestEntry(repo) if err == nil { e.Number = latestEntry.GetNumber() + 1 } else { if errors.Is(err, ErrRSLEntryNotFound) { // First entry e.Number = 1 } else { return err } } return nil } func (e *PropagationEntry) createCommitMessage(includeNumber bool) (string, error) { lines := []string{ PropagationEntryHeader, "", fmt.Sprintf("%s: %s", RefKey, e.RefName), fmt.Sprintf("%s: %s", TargetIDKey, e.TargetID.String()), fmt.Sprintf("%s: %s", UpstreamRepositoryKey, e.UpstreamRepository), fmt.Sprintf("%s: %s", UpstreamEntryIDKey, e.UpstreamEntryID.String()), } if includeNumber && e.Number > 0 { lines = append(lines, fmt.Sprintf("%s: %d", NumberKey, e.Number)) } return strings.Join(lines, "\n"), nil } // GetEntry returns the entry corresponding to entryID. func GetEntry(repo *gitinterface.Repository, entryID gitinterface.Hash) (Entry, error) { entry, has := cache.getEntry(entryID) if has { return entry, nil } commitMessage, err := repo.GetCommitMessage(entryID) if err != nil { return nil, errors.Join(ErrRSLEntryNotFound, err) } entry, err = parseRSLEntryText(entryID, commitMessage) if err != nil { return nil, err } cache.setEntry(entryID, entry) return entry, nil } // GetParentForEntry returns the entry's parent RSL entry. func GetParentForEntry(repo *gitinterface.Repository, entry Entry) (Entry, error) { parentID, has, err := cache.getParent(entry.GetID()) if err == nil && has { // We don't need to check the parent's Number here because it was // checked when this was set in the cache return GetEntry(repo, parentID) } parentIDs, err := repo.GetCommitParentIDs(entry.GetID()) if err != nil { return nil, err } if parentIDs == nil { return nil, ErrRSLEntryNotFound } if len(parentIDs) > 1 { return nil, ErrRSLBranchDetected } parentID = parentIDs[0] parentEntry, err := GetEntry(repo, parentID) if err != nil { return nil, err } switch entry.GetNumber() { case 0, 1: // parent entry has to be 0 if parentEntry.GetNumber() != 0 { return nil, ErrInvalidRSLEntry } default: // parent entry has to be 1 less than entry if parentEntry.GetNumber() != entry.GetNumber()-1 { return nil, ErrInvalidRSLEntry } } cache.setParent(entry.GetID(), parentID) return parentEntry, nil } // GetNonGittufParentReferenceUpdaterEntryForEntry returns the first RSL // reference updater entry starting from the specified entry's parent that is // not for the gittuf namespace. func GetNonGittufParentReferenceUpdaterEntryForEntry(repo *gitinterface.Repository, entry Entry) (ReferenceUpdaterEntry, []*AnnotationEntry, error) { it, err := GetLatestEntry(repo) if err != nil { return nil, nil, err } parentEntry, err := GetParentForEntry(repo, entry) if err != nil { return nil, nil, err } allAnnotations := []*AnnotationEntry{} for { if annotation, isAnnotation := it.(*AnnotationEntry); isAnnotation { allAnnotations = append(allAnnotations, annotation) } it, err = GetParentForEntry(repo, it) if err != nil { return nil, nil, err } if it.GetID().Equal(parentEntry.GetID()) { break } } var targetEntry ReferenceUpdaterEntry for { switch iterator := it.(type) { case ReferenceUpdaterEntry: if !strings.HasPrefix(iterator.GetRefName(), gittufNamespacePrefix) { targetEntry = iterator } case *AnnotationEntry: allAnnotations = append(allAnnotations, iterator) } if targetEntry != nil { // we've found the target entry, stop walking the RSL break } it, err = GetParentForEntry(repo, it) if err != nil { return nil, nil, err } } annotations := filterAnnotationsForRelevantAnnotations(allAnnotations, targetEntry.GetID()) return targetEntry, annotations, nil } // GetLatestEntry returns the latest entry available locally in the RSL. func GetLatestEntry(repo *gitinterface.Repository) (Entry, error) { commitID, err := repo.GetReference(Ref) if err != nil { if errors.Is(err, gitinterface.ErrReferenceNotFound) { return nil, ErrRSLEntryNotFound } return nil, err } return GetEntry(repo, commitID) } // GetLatestReferenceUpdaterEntry returns the latest reference updater entry in // the local RSL that matches the specified conditions. func GetLatestReferenceUpdaterEntry(repo *gitinterface.Repository, opts ...GetLatestReferenceUpdaterEntryOption) (ReferenceUpdaterEntry, []*AnnotationEntry, error) { options := GetLatestReferenceUpdaterEntryOptions{ BeforeEntryID: gitinterface.ZeroHash, UntilEntryID: gitinterface.ZeroHash, } for _, fn := range opts { fn(&options) } if !options.BeforeEntryID.IsZero() && options.BeforeEntryNumber != 0 { // Only one of the Before options can be set slog.Debug("Found both before entry ID and before entry number conditions, aborting...") return nil, nil, ErrInvalidGetLatestReferenceUpdaterEntryOptions } if !options.UntilEntryID.IsZero() && options.UntilEntryNumber != 0 { // Only one of the Until options can be set slog.Debug("Found both until entry ID and until entry number conditions, aborting...") return nil, nil, ErrInvalidGetLatestReferenceUpdaterEntryOptions } if options.BeforeEntryNumber != 0 && options.UntilEntryNumber != 0 && options.BeforeEntryNumber < options.UntilEntryNumber { slog.Debug(fmt.Sprintf("Cannot search for entry before entry number %d and until entry number %d, aborting...", options.BeforeEntryNumber, options.UntilEntryNumber)) return nil, nil, ErrInvalidGetLatestReferenceUpdaterEntryOptions } allAnnotations := []*AnnotationEntry{} iteratorT, err := GetLatestEntry(repo) if err != nil { return nil, nil, err } // Sanity check before / until number conditions if iteratorT.GetNumber() == 0 { // The repository doesn't use numbers yet if options.BeforeEntryNumber != 0 || options.UntilEntryNumber != 0 { return nil, nil, ErrCannotUseEntryNumberFilter } } else if options.UntilEntryNumber != 0 && iteratorT.GetNumber() < options.UntilEntryNumber { slog.Debug(fmt.Sprintf("Latest entry's number %d is less than the until number condition %d, aborting...", iteratorT.GetNumber(), options.UntilEntryNumber)) return nil, nil, ErrInvalidUntilEntryNumberCondition } // Do initial walk if either before condition is set if !options.BeforeEntryID.IsZero() || options.BeforeEntryNumber != 0 { slog.Debug("Scanning RSL for search start point using before condition...") for !iteratorT.GetID().Equal(options.BeforeEntryID) && (iteratorT.GetNumber() == 0 || iteratorT.GetNumber() != options.BeforeEntryNumber) { if annotation, isAnnotation := iteratorT.(*AnnotationEntry); isAnnotation { allAnnotations = append(allAnnotations, annotation) } iteratorT, err = GetParentForEntry(repo, iteratorT) if err != nil { return nil, nil, err } if iteratorT.GetNumber() < options.UntilEntryNumber { return nil, nil, ErrInvalidGetLatestReferenceUpdaterEntryOptions } } slog.Debug(fmt.Sprintf("Found entry '%s' matching before condition...", iteratorT.GetID().String())) // we've found the before anchor entry, track it if it's an // annotation if annotation, isAnnotation := iteratorT.(*AnnotationEntry); isAnnotation { allAnnotations = append(allAnnotations, annotation) } // Set it to parent as this is the first entry considered below // While this entry may match equal until condition, that's fine // as the until condition is inclusive iteratorT, err = GetParentForEntry(repo, iteratorT) if err != nil { return nil, nil, err } } var targetEntry ReferenceUpdaterEntry for { switch iterator := iteratorT.(type) { case ReferenceUpdaterEntry: matchesConditions := true if options.Reference != "" && iterator.GetRefName() != options.Reference { matchesConditions = false } if matchesConditions && options.IsReferenceEntry { if _, isReferenceEntry := iterator.(*ReferenceEntry); !isReferenceEntry { matchesConditions = false } } // Only reference entry can be skipped referenceEntry, isReferenceEntry := iterator.(*ReferenceEntry) if isReferenceEntry { if matchesConditions && options.Unskipped && referenceEntry.SkippedBy(allAnnotations) { // SkippedBy ensures only the applicable // annotations that refer to the entry // are used matchesConditions = false } } if matchesConditions && options.NonGittuf && strings.HasPrefix(iterator.GetRefName(), gittufNamespacePrefix) { matchesConditions = false } if matchesConditions { targetEntry = iterator } case *AnnotationEntry: allAnnotations = append(allAnnotations, iterator) } if targetEntry != nil { // We've found the target entry, stop walking the RSL break } iteratorT, err = GetParentForEntry(repo, iteratorT) if err != nil { return nil, nil, err } if options.UntilEntryNumber != 0 && iteratorT.GetNumber() < options.UntilEntryNumber { return nil, nil, ErrRSLEntryNotFound } if !options.UntilEntryID.IsZero() && iteratorT.GetID().Equal(options.UntilEntryID) { return nil, nil, ErrRSLEntryNotFound } } annotations := filterAnnotationsForRelevantAnnotations(allAnnotations, targetEntry.GetID()) return targetEntry, annotations, nil } // GetFirstEntry returns the very first entry in the RSL. It is expected to be a // reference updater entry as the first entry in the RSL cannot be an // annotation. func GetFirstEntry(repo *gitinterface.Repository) (ReferenceUpdaterEntry, []*AnnotationEntry, error) { return GetFirstReferenceUpdaterEntryForRef(repo, "") } // GetFirstReferenceEntryForRef returns the very first entry in the RSL for the // specified ref. It is expected to be a reference entry as the first entry in // the RSL for a reference cannot be an annotation. func GetFirstReferenceUpdaterEntryForRef(repo *gitinterface.Repository, targetRef string) (ReferenceUpdaterEntry, []*AnnotationEntry, error) { iteratorT, err := GetLatestEntry(repo) if err != nil { return nil, nil, err } allAnnotations := []*AnnotationEntry{} var firstEntry ReferenceUpdaterEntry for { switch entry := iteratorT.(type) { case ReferenceUpdaterEntry: if targetRef == "" || entry.GetRefName() == targetRef { firstEntry = entry } case *AnnotationEntry: allAnnotations = append(allAnnotations, entry) } parentT, err := GetParentForEntry(repo, iteratorT) if err != nil { if errors.Is(err, ErrRSLEntryNotFound) { break } return nil, nil, err } iteratorT = parentT } if firstEntry == nil { return nil, nil, ErrRSLEntryNotFound } annotations := filterAnnotationsForRelevantAnnotations(allAnnotations, firstEntry.GetID()) return firstEntry, annotations, nil } // SkipAllInvalidReferenceEntriesForRef identifies invalid RSL reference entries. // Each invalid entry points to a target that is not reachable for the current // target of the same reference, indicating that history has been rewritten via a // rebase for the reference. After the invalid entries are identified, an annotation // entry is created that marks all of these entries as to be skipped. func SkipAllInvalidReferenceEntriesForRef(repo *gitinterface.Repository, targetRef string, signCommit bool) error { slog.Debug("Checking if RSL entries point to commits not in the target ref...") latestEntry, _, err := GetLatestReferenceUpdaterEntry(repo, ForReference(targetRef)) if err != nil { return err } iteratorEntry, _, err := GetLatestReferenceUpdaterEntry(repo, ForReference(targetRef), BeforeEntryID(latestEntry.GetID())) if err != nil { if errors.Is(err, ErrRSLEntryNotFound) { // We don't have a parent to check if invalid // So we assume the current one is valid // TODO: should we cross reference state of the branch? return nil } return err } iterator := Entry(iteratorEntry) entriesToSkip := []gitinterface.Hash{} for { if entry, ok := iterator.(*ReferenceEntry); ok { isAncestor, err := repo.KnowsCommit(latestEntry.GetTargetID(), entry.TargetID) if err != nil { return err } if !isAncestor { slog.Debug(fmt.Sprintf("For target ref %s, found RSL entry '%s' pointing to a commit, '%s', that does not exist in the target ref.", targetRef, entry.ID, entry.TargetID)) entriesToSkip = append(entriesToSkip, entry.ID) } else { slog.Debug(fmt.Sprintf("For target ref %s, found RSL entry '%s' pointing to a commit, '%s', that exists in the target ref. No more commits to skip.", targetRef, entry.ID, entry.TargetID)) break } } iterator, err = GetParentForEntry(repo, iterator) if err != nil { if errors.Is(err, ErrRSLEntryNotFound) { break } return err } } if len(entriesToSkip) == 0 { return nil } return NewAnnotationEntry(entriesToSkip, true, "Automated skip of reference entries pointing to non-existent entries").Commit(repo, signCommit) } // GetFirstReferenceUpdaterEntryForCommit returns the first reference entry in // the RSL that either records the commit itself or a descendent of the commit. // This establishes the first time a commit was seen in the repository, // irrespective of the ref it was associated with, and we can infer things like // the active developers who could have signed the commit. func GetFirstReferenceUpdaterEntryForCommit(repo *gitinterface.Repository, commitID gitinterface.Hash) (ReferenceUpdaterEntry, []*AnnotationEntry, error) { // We check entries in pairs. In the initial case, we have the latest entry // and its parent. At all times, the parent in the pair is being tested. // If the latest entry is a descendant of the target commit, we start // checking the parent. The first pair where the parent entry is not // descended from the target commit, we return the other entry in the pair. firstEntry, firstAnnotations, err := GetLatestReferenceUpdaterEntry(repo, ForNonGittufReference()) if err != nil { if errors.Is(err, ErrRSLEntryNotFound) { return nil, nil, ErrNoRecordOfCommit } return nil, nil, err } knowsCommit, err := repo.KnowsCommit(firstEntry.GetTargetID(), commitID) if err != nil { return nil, nil, err } if !knowsCommit { return nil, nil, ErrNoRecordOfCommit } for { iteratorEntry, iteratorAnnotations, err := GetNonGittufParentReferenceUpdaterEntryForEntry(repo, firstEntry) if err != nil { if errors.Is(err, ErrRSLEntryNotFound) { return firstEntry, firstAnnotations, nil } return nil, nil, err } knowsCommit, err := repo.KnowsCommit(iteratorEntry.GetTargetID(), commitID) if err != nil { return nil, nil, err } if !knowsCommit { return firstEntry, firstAnnotations, nil } firstEntry = iteratorEntry firstAnnotations = iteratorAnnotations } } // GetReferenceUpdaterEntriesInRange returns a list of reference entries between // the specified range and a map of annotations that refer to each reference // entry in the range. The annotations map is keyed by the ID of the reference // entry, with the value being a list of annotations that apply to that // reference entry. func GetReferenceUpdaterEntriesInRange(repo *gitinterface.Repository, firstID, lastID gitinterface.Hash) ([]ReferenceUpdaterEntry, map[string][]*AnnotationEntry, error) { return GetReferenceUpdaterEntriesInRangeForRef(repo, firstID, lastID, "") } // GetReferenceUpdaterEntriesInRangeForRef returns a list of reference entries // for the ref between the specified range and a map of annotations that refer // to each reference entry in the range. The annotations map is keyed by the ID // of the reference entry, with the value being a list of annotations that apply // to that reference entry. func GetReferenceUpdaterEntriesInRangeForRef(repo *gitinterface.Repository, firstID, lastID gitinterface.Hash, refName string) ([]ReferenceUpdaterEntry, map[string][]*AnnotationEntry, error) { // We have to iterate from latest to get the annotations that refer to the // last requested entry iterator, err := GetLatestEntry(repo) if err != nil { return nil, nil, err } allAnnotations := []*AnnotationEntry{} for !iterator.GetID().Equal(lastID) { // Until we find the entry corresponding to lastID, we just store // annotations if annotation, isAnnotation := iterator.(*AnnotationEntry); isAnnotation { allAnnotations = append(allAnnotations, annotation) } parent, err := GetParentForEntry(repo, iterator) if err != nil { return nil, nil, err } iterator = parent } entryStack := []ReferenceUpdaterEntry{} inRange := map[string]bool{} for !iterator.GetID().Equal(firstID) { // Here, all items are relevant until the one corresponding to first is // found switch it := iterator.(type) { case ReferenceUpdaterEntry: if len(refName) == 0 || it.GetRefName() == refName || isRelevantGittufRef(it.GetRefName()) { // It's a relevant entry if: // a) there's no refName set, or // b) the entry's refName matches the set refName, or // c) the entry is for a gittuf namespace entryStack = append(entryStack, it) inRange[it.GetID().String()] = true } case *AnnotationEntry: allAnnotations = append(allAnnotations, it) } parent, err := GetParentForEntry(repo, iterator) if err != nil { return nil, nil, err } iterator = parent } // Handle the item corresponding to first explicitly // If it's an annotation, ignore it as it refers to something before the // range we care about if entry, isEntry := iterator.(ReferenceUpdaterEntry); isEntry { if len(refName) == 0 || entry.GetRefName() == refName || isRelevantGittufRef(entry.GetRefName()) { // It's a relevant entry if: // a) there's no refName set, or // b) the entry's refName matches the set refName, or // c) the entry is for a gittuf namespace entryStack = append(entryStack, entry) inRange[entry.GetID().String()] = true } } // For each annotation, add the entry to each relevant entry it refers to // Process annotations in reverse order so that annotations are listed in // order of occurrence in the map annotationMap := map[string][]*AnnotationEntry{} for i := len(allAnnotations) - 1; i >= 0; i-- { annotation := allAnnotations[i] for _, entryID := range annotation.RSLEntryIDs { if _, relevant := inRange[entryID.String()]; relevant { // Annotation is relevant because the entry it refers to was in // the specified range if _, exists := annotationMap[entryID.String()]; !exists { annotationMap[entryID.String()] = []*AnnotationEntry{} } annotationMap[entryID.String()] = append(annotationMap[entryID.String()], annotation) } } } // Reverse entryStack so that it's in order of occurrence rather than in // order of walking back the RSL allEntries := make([]ReferenceUpdaterEntry, 0, len(entryStack)) for i := len(entryStack) - 1; i >= 0; i-- { allEntries = append(allEntries, entryStack[i]) } return allEntries, annotationMap, nil } // PropagateChangesFromUpstreamRepository executes gittuf's propagation workflow // to create a subtree of the contents of an upstream repository's reference // into the specified reference and path in the downstream repository. func PropagateChangesFromUpstreamRepository(downstreamRepo, upstreamRepo *gitinterface.Repository, details []tuf.PropagationDirective, sign bool) error { // FIXME: We assume here that downstreamRepo and upstreamRepo have their // gittuf refs already synced. for _, detail := range details { latestUpstreamEntry, _, err := GetLatestReferenceUpdaterEntry(upstreamRepo, ForReference(detail.GetUpstreamReference()), IsUnskipped()) if err != nil { if !errors.Is(err, ErrRSLEntryNotFound) { return err } continue } // We want to check if propagation is necessary // What if it's already been propagated? // TODO: handle divergence from latest RSL entry for ref downstream? currentRefTip, err := downstreamRepo.GetReference(detail.GetDownstreamReference()) if err != nil { return err // TODO: should we handle this differently? } currentTreeID, err := downstreamRepo.GetCommitTreeID(currentRefTip) if err != nil { return err // TODO: should we handle this differently? } currentPathTreeID, err := downstreamRepo.GetPathIDInTree(detail.GetDownstreamPath(), currentTreeID) if err != nil { if !errors.Is(err, gitinterface.ErrTreeDoesNotHavePath) { return err } } upstreamTreeID, err := upstreamRepo.GetCommitTreeID(latestUpstreamEntry.GetTargetID()) if err != nil { return err } if !currentPathTreeID.IsZero() && currentPathTreeID.Equal(upstreamTreeID) { // Nothing to do continue } commitID, err := downstreamRepo.CreateSubtreeFromUpstreamRepository(upstreamRepo, latestUpstreamEntry.GetTargetID(), detail.GetDownstreamReference(), detail.GetDownstreamPath()) if err != nil { return err } if err := NewPropagationEntry(detail.GetDownstreamReference(), commitID, detail.GetUpstreamRepository(), latestUpstreamEntry.GetID()).Commit(downstreamRepo, sign); err != nil { return err } // TODO: error management should revert propagation entries? // atomicity? } return nil } func parseRSLEntryText(id gitinterface.Hash, text string) (Entry, error) { switch { case strings.HasPrefix(text, ReferenceEntryHeader): return parseReferenceEntryText(id, text) case strings.HasPrefix(text, AnnotationEntryHeader): return parseAnnotationEntryText(id, text) case strings.HasPrefix(text, PropagationEntryHeader): return parsePropagationEntryText(id, text) default: return nil, ErrInvalidRSLEntry } } func parseReferenceEntryText(id gitinterface.Hash, text string) (*ReferenceEntry, error) { lines := strings.Split(text, "\n") if len(lines) < 4 { return nil, ErrInvalidRSLEntry } lines = lines[2:] entry := &ReferenceEntry{ID: id} for _, l := range lines { l = strings.TrimSpace(l) ls := strings.Split(l, ":") if len(ls) < 2 { return nil, ErrInvalidRSLEntry } switch strings.TrimSpace(ls[0]) { case RefKey: entry.RefName = strings.TrimSpace(ls[1]) case TargetIDKey: targetHash, err := gitinterface.NewHash(strings.TrimSpace(ls[1])) if err != nil { return nil, err } entry.TargetID = targetHash case NumberKey: number, err := strconv.ParseUint(strings.TrimSpace(ls[1]), 10, 64) if err != nil { return nil, err } entry.Number = number } } return entry, nil } func parseAnnotationEntryText(id gitinterface.Hash, text string) (*AnnotationEntry, error) { annotation := &AnnotationEntry{ ID: id, RSLEntryIDs: []gitinterface.Hash{}, } messageBlock, _ := pem.Decode([]byte(text)) // rest doesn't seem to work when the PEM block is at the end of text, see: https://go.dev/play/p/oZysAfemA-v if messageBlock != nil { annotation.Message = string(messageBlock.Bytes) } lines := strings.Split(text, "\n") if len(lines) < 4 { return nil, ErrInvalidRSLEntry } lines = lines[2:] for _, l := range lines { l = strings.TrimSpace(l) if l == BeginMessage { break } ls := strings.Split(l, ":") if len(ls) < 2 { return nil, ErrInvalidRSLEntry } switch strings.TrimSpace(ls[0]) { case EntryIDKey: hash, err := gitinterface.NewHash(strings.TrimSpace(ls[1])) if err != nil { return nil, err } annotation.RSLEntryIDs = append(annotation.RSLEntryIDs, hash) case SkipKey: if strings.TrimSpace(ls[1]) == "true" { annotation.Skip = true } else { annotation.Skip = false } case NumberKey: number, err := strconv.ParseUint(strings.TrimSpace(ls[1]), 10, 64) if err != nil { return nil, err } annotation.Number = number } } return annotation, nil } func parsePropagationEntryText(id gitinterface.Hash, text string) (*PropagationEntry, error) { lines := strings.Split(text, "\n") if len(lines) < 6 { return nil, ErrInvalidRSLEntry } lines = lines[2:] entry := &PropagationEntry{ID: id} for _, l := range lines { l = strings.TrimSpace(l) ls := strings.Split(l, ":") if len(ls) < 2 { return nil, ErrInvalidRSLEntry } switch strings.TrimSpace(ls[0]) { case RefKey: entry.RefName = strings.TrimSpace(ls[1]) case TargetIDKey: targetHash, err := gitinterface.NewHash(strings.TrimSpace(ls[1])) if err != nil { return nil, err } entry.TargetID = targetHash case UpstreamRepositoryKey: // The location may also have `:`, so we need to handle all items in ls entry.UpstreamRepository = strings.TrimSpace(strings.Join(ls[1:], ":")) case UpstreamEntryIDKey: upstreamEntryIDHash, err := gitinterface.NewHash(strings.TrimSpace(ls[1])) if err != nil { return nil, err } entry.UpstreamEntryID = upstreamEntryIDHash case NumberKey: number, err := strconv.ParseUint(strings.TrimSpace(ls[1]), 10, 64) if err != nil { return nil, err } entry.Number = number } } return entry, nil } func filterAnnotationsForRelevantAnnotations(allAnnotations []*AnnotationEntry, entryID gitinterface.Hash) []*AnnotationEntry { annotations := []*AnnotationEntry{} for _, annotation := range allAnnotations { annotation := annotation if annotation.RefersTo(entryID) { annotations = append(annotations, annotation) } } if len(annotations) == 0 { return nil } return annotations } func isRelevantGittufRef(refName string) bool { if !strings.HasPrefix(refName, gittufNamespacePrefix) { return false } if refName == gittufPolicyStagingRef { return false } return true } gittuf-0.9.0/internal/rsl/rsl_test.go000066400000000000000000002202371475150141000176300ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package rsl import ( "encoding/base64" "fmt" "math" "slices" "testing" "github.com/gittuf/gittuf/internal/gitinterface" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/go-git/go-git/v5/plumbing" "github.com/stretchr/testify/assert" "github.com/stretchr/testify/require" ) const annotationMessage = "test annotation" func TestNewReferenceEntry(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } currentTip, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } commitMessage, err := repo.GetCommitMessage(currentTip) if err != nil { t.Fatal(err) } parentIDs, err := repo.GetCommitParentIDs(currentTip) if err != nil { t.Fatal(err) } expectedMessage := fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %d", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String(), NumberKey, 1) assert.Equal(t, expectedMessage, commitMessage) assert.Nil(t, parentIDs) if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } newTip, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } commitMessage, err = repo.GetCommitMessage(newTip) if err != nil { t.Fatal(err) } parentIDs, err = repo.GetCommitParentIDs(newTip) if err != nil { t.Fatal(err) } expectedMessage = fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %d", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String(), NumberKey, 2) assert.Equal(t, expectedMessage, commitMessage) assert.Contains(t, parentIDs, currentTip) } func TestReferenceUpdaterEntry(t *testing.T) { refName := "refs/heads/main" gitID := gitinterface.ZeroHash upstreamRepository := "http://git.example.com/repository" t.Run("reference entry", func(t *testing.T) { entry := Entry(NewReferenceEntry(refName, gitID)) updaterEntry, isReferenceUpdaterEntry := entry.(ReferenceUpdaterEntry) assert.True(t, isReferenceUpdaterEntry) assert.Equal(t, refName, updaterEntry.GetRefName()) assert.Equal(t, gitID, updaterEntry.GetTargetID()) }) t.Run("propagation entry", func(t *testing.T) { entry := Entry(NewPropagationEntry(refName, gitID, upstreamRepository, gitID)) updaterEntry, isReferenceUpdaterEntry := entry.(ReferenceUpdaterEntry) assert.True(t, isReferenceUpdaterEntry) assert.Equal(t, refName, updaterEntry.GetRefName()) assert.Equal(t, gitID, updaterEntry.GetTargetID()) }) } func TestGetLatestEntry(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := GetLatestEntry(repo) assert.Nil(t, err) e := entry.(*ReferenceEntry) assert.Equal(t, "refs/heads/main", e.RefName) assert.Equal(t, gitinterface.ZeroHash, e.TargetID) if err := NewReferenceEntry("refs/heads/feature", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = GetLatestEntry(repo) assert.Nil(t, err) e = entry.(*ReferenceEntry) assert.Equal(t, "refs/heads/feature", e.RefName) assert.Equal(t, gitinterface.ZeroHash, e.TargetID) latestTip, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } if err := NewAnnotationEntry([]gitinterface.Hash{latestTip}, true, "This was a mistaken push!").Commit(repo, false); err != nil { t.Error(err) } entry, err = GetLatestEntry(repo) assert.Nil(t, err) a := entry.(*AnnotationEntry) assert.True(t, a.Skip) assert.Equal(t, []gitinterface.Hash{latestTip}, a.RSLEntryIDs) assert.Equal(t, "This was a mistaken push!", a.Message) } func TestGetLatestReferenceUpdaterEntry(t *testing.T) { t.Run("with ref name", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" otherRefName := "refs/heads/feature" if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } rslRef, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference(refName)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, rslRef, entry.GetID()) if err := NewReferenceEntry(otherRefName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, rslRef, entry.GetID()) // Add annotation for the target entry if err := NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName)) assert.Nil(t, err) assert.Equal(t, rslRef, entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) }) t.Run("with invalid conditions", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) emptyTreeID, err := repo.EmptyTree() if err != nil { t.Fatal(err) } // Both Before _, _, err = GetLatestReferenceUpdaterEntry(repo, BeforeEntryID(emptyTreeID), BeforeEntryNumber(3)) assert.ErrorIs(t, err, ErrInvalidGetLatestReferenceUpdaterEntryOptions) // Both Until _, _, err = GetLatestReferenceUpdaterEntry(repo, UntilEntryID(emptyTreeID), UntilEntryNumber(3)) assert.ErrorIs(t, err, ErrInvalidGetLatestReferenceUpdaterEntryOptions) // Before number is less than until number _, _, err = GetLatestReferenceUpdaterEntry(repo, BeforeEntryNumber(2), UntilEntryNumber(29)) assert.ErrorIs(t, err, ErrInvalidGetLatestReferenceUpdaterEntryOptions) // Until number is greater than latest entry in the RSL if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } _, _, err = GetLatestReferenceUpdaterEntry(repo, UntilEntryNumber(10)) assert.ErrorIs(t, err, ErrInvalidUntilEntryNumberCondition) // a reference older than until entry number is being asked for (configuration mismatch) if err := NewReferenceEntry("feature", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(emptyTreeID), UntilEntryNumber(2)) assert.Nil(t, annotations) assert.Nil(t, entry) assert.ErrorIs(t, err, ErrInvalidGetLatestReferenceUpdaterEntryOptions) }) t.Run("with ref name and until entry number", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) refName := "refs/heads/main" otherRefName := "refs/heads/feature" if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // RSL: main rslRef, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference(refName), UntilEntryNumber(1)) // until is inclusive assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, rslRef, entry.GetID()) if err := NewReferenceEntry(otherRefName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // RSL: main <- feature entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), UntilEntryNumber(1)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, rslRef, entry.GetID()) // Add annotation for the target entry if err := NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } // RSL: main <- feature <- annotation-on-main entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, rslRef, entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) // Set higher until limit _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), UntilEntryNumber(2)) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with ref name and before entry ID", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // RSL structure for the test // main <- feature <- main <- feature <- main testRefs := []string{"main", "feature", "main", "feature", "main"} entryIDs := []gitinterface.Hash{} for _, ref := range testRefs { if err := NewReferenceEntry(ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latest, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, latest.GetID()) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4])) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[2], entry.GetID()) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[3])) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[2], entry.GetID()) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[4])) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[3], entry.GetID()) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[3])) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[1], entry.GetID()) _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[1])) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with ref name, before entry ID, and annotations", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // RSL structure for the test // main <- A <- feature <- A <- main <- A <- feature <- A <- main <- A testRefs := []string{"main", "feature", "main", "feature", "main"} entryIDs := []gitinterface.Hash{} for _, ref := range testRefs { if err := NewReferenceEntry(ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latest, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, latest.GetID()) if err := NewAnnotationEntry([]gitinterface.Hash{latest.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latest, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, latest.GetID()) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4])) assert.Nil(t, err) assert.Equal(t, entryIDs[0], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) // Add an annotation at the end for some entry and see it gets pulled in // even when the anchor is for its ancestor assert.Len(t, annotations, 1) // before adding an annotation, we have just 1 if err := NewAnnotationEntry([]gitinterface.Hash{entryIDs[0]}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4])) assert.Nil(t, err) assert.Equal(t, entryIDs[0], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) assert.Len(t, annotations, 2) // now we have 2 entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[3])) assert.Nil(t, err) assert.Equal(t, entryIDs[0], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[6])) assert.Nil(t, err) assert.Equal(t, entryIDs[2], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[7])) assert.Nil(t, err) assert.Equal(t, entryIDs[6], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[1])) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with ref name, before entry ID and until entry number", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // RSL structure for the test // 1 <- 2 <- 3 <- 4 <- 5 // main <- feature <- main <- feature <- main testRefs := []string{"main", "feature", "main", "feature", "main"} entryIDs := []gitinterface.Hash{} for _, ref := range testRefs { if err := NewReferenceEntry(ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latest, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, latest.GetID()) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[2], entry.GetID()) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[3]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[2], entry.GetID()) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[4]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[3], entry.GetID()) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[3]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[1], entry.GetID()) _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[1]), UntilEntryNumber(1)) assert.ErrorIs(t, err, ErrRSLEntryNotFound) // Set higher limits to constrain search _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4]), UntilEntryNumber(5)) assert.ErrorIs(t, err, ErrRSLEntryNotFound) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4]), UntilEntryNumber(3)) // until is inclusive assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, entryIDs[2], entry.GetID()) _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[3]), UntilEntryNumber(3)) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with ref name but before entry ID is not found in initial walk", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // RSL structure for the test // 1 <- 2 // main <- feature testRefs := []string{"main", "feature"} for _, ref := range testRefs { if err := NewReferenceEntry(ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } } emptyTreeID, err := repo.EmptyTree() if err != nil { t.Fatal(err) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(emptyTreeID)) assert.Nil(t, annotations) assert.Nil(t, entry) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with ref name, before entry ID, until entry number and with annotations", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // RSL structure for the test // main <- A <- feature <- A <- main <- A <- feature <- A <- main <- A testRefs := []string{"main", "feature", "main", "feature", "main"} entryIDs := []gitinterface.Hash{} for _, ref := range testRefs { if err := NewReferenceEntry(ref, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latest, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, latest.GetID()) if err := NewAnnotationEntry([]gitinterface.Hash{latest.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latest, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, latest.GetID()) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, entryIDs[0], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) // Add an annotation at the end for some entry and see it gets pulled in // even when the anchor is for its ancestor assert.Len(t, annotations, 1) // before adding an annotation, we have just 1 if err := NewAnnotationEntry([]gitinterface.Hash{entryIDs[0]}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[4]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, entryIDs[0], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) assert.Len(t, annotations, 2) // now we have 2 entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[3]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, entryIDs[0], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[6]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, entryIDs[2], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[7]), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, entryIDs[6], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[1]), UntilEntryNumber(1)) assert.ErrorIs(t, err, ErrRSLEntryNotFound) // Set higher until limits _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("main"), BeforeEntryID(entryIDs[3]), UntilEntryNumber(2)) assert.ErrorIs(t, err, ErrRSLEntryNotFound) entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference("feature"), BeforeEntryID(entryIDs[6]), UntilEntryNumber(3)) // until is inclusive assert.Nil(t, err) assert.Equal(t, entryIDs[2], entry.GetID()) assertAnnotationsReferToEntry(t, entry, annotations) }) t.Run("with ref name and unskipped", func(t *testing.T) { refName := "refs/heads/main" tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) entryIDs := []gitinterface.Hash{} // Add an entry if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // Latest unskipped entry is the one we just added e, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, e.GetID()) entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference(refName), IsUnskipped()) assert.Nil(t, err) assert.Empty(t, annotations) assert.Equal(t, entryIDs[len(entryIDs)-1], entry.GetID()) // Add another entry if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // Record latest entry's ID e, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, e.GetID()) // Latest unskipped entry is the newest one entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), IsUnskipped()) assert.Nil(t, err) assert.Empty(t, annotations) assert.Equal(t, entryIDs[len(entryIDs)-1], entry.GetID()) // Skip the second one if err := NewAnnotationEntry([]gitinterface.Hash{entryIDs[1]}, true, "revoke").Commit(repo, false); err != nil { t.Fatal(err) } // Now the latest unskipped entry should be the first one entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), IsUnskipped()) assert.Nil(t, err) assert.Empty(t, annotations) assert.Equal(t, entryIDs[0], entry.GetID()) // Skip the first one too to trigger error if err := NewAnnotationEntry([]gitinterface.Hash{entryIDs[0]}, true, "revoke").Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), IsUnskipped()) assert.Nil(t, entry) assert.Empty(t, annotations) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with ref name, unskipped, and before entry ID", func(t *testing.T) { refName := "refs/heads/main" tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) entryIDs := []gitinterface.Hash{} // Add an entry if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // Latest unskipped entry is the one we just added e, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, e.GetID()) // Add another entry if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // Record latest entry's ID e, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryIDs = append(entryIDs, e.GetID()) // Latest unskipped before the current entry is the first entry entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference(refName), BeforeEntryID(entryIDs[1]), IsUnskipped()) assert.Nil(t, err) assert.Empty(t, annotations) assert.Equal(t, entryIDs[0], entry.GetID()) // Skip the second one if err := NewAnnotationEntry([]gitinterface.Hash{entryIDs[1]}, true, "revoke").Commit(repo, false); err != nil { t.Fatal(err) } // Now even the latest unskipped entry with zero hash should return the first one entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), BeforeEntryID(gitinterface.ZeroHash), IsUnskipped()) assert.Nil(t, err) assert.Empty(t, annotations) assert.Equal(t, entryIDs[0], entry.GetID()) // Skip the first one too to trigger error if err := NewAnnotationEntry([]gitinterface.Hash{entryIDs[0]}, true, "revoke").Commit(repo, false); err != nil { t.Fatal(err) } entry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForReference(refName), BeforeEntryID(gitinterface.ZeroHash), IsUnskipped()) assert.Nil(t, entry) assert.Empty(t, annotations) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("with non gittuf option, mix of gittuf and non gittuf entries", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // Add the first gittuf entry if err := NewReferenceEntry("refs/gittuf/policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // Add non gittuf entries if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // At this point, latest entry should be returned expectedLatestEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } latestEntry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForNonGittufReference()) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, expectedLatestEntry, latestEntry) // Add another gittuf entry if err := NewReferenceEntry("refs/gittuf/not-policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // At this point, the expected entry is the same as before latestEntry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForNonGittufReference()) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, expectedLatestEntry, latestEntry) // Add an annotation for latest entry, check that it's returned if err := NewAnnotationEntry([]gitinterface.Hash{expectedLatestEntry.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, annotations, err = GetLatestReferenceUpdaterEntry(repo, ForNonGittufReference()) assert.Nil(t, err) assert.Equal(t, expectedLatestEntry, latestEntry) assertAnnotationsReferToEntry(t, latestEntry, annotations) }) t.Run("with non gittuf option, only gittuf entries", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // Add the first gittuf entry if err := NewReferenceEntry("refs/gittuf/policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } _, _, err := GetLatestReferenceUpdaterEntry(repo, ForNonGittufReference()) assert.ErrorIs(t, err, ErrRSLEntryNotFound) // Add another gittuf entry if err := NewReferenceEntry("refs/gittuf/not-policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } _, _, err = GetLatestReferenceUpdaterEntry(repo, ForNonGittufReference()) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) t.Run("transitioning from no numbers to numbers", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // Add non-numbered entries, including an annotation if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).commitWithoutNumber(repo); err != nil { t.Fatal(err) } entry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } if err := NewAnnotationEntry([]gitinterface.Hash{entry.GetID()}, false, "annotation").commitWithoutNumber(repo); err != nil { t.Fatal(err) } _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("refs/heads/main"), BeforeEntryNumber(1)) assert.ErrorIs(t, err, ErrCannotUseEntryNumberFilter) _, _, err = GetLatestReferenceUpdaterEntry(repo, ForReference("refs/heads/main"), UntilEntryNumber(1)) assert.ErrorIs(t, err, ErrCannotUseEntryNumberFilter) // Add numbered entries if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } expectedEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entry, annotations, err := GetLatestReferenceUpdaterEntry(repo, ForReference("refs/heads/main"), UntilEntryNumber(1)) assert.Nil(t, err) assert.Equal(t, expectedEntry.GetID(), entry.GetID()) assert.Nil(t, annotations) }) } func TestGetEntry(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) if err := NewReferenceEntry("main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } initialEntryID, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } if err := NewAnnotationEntry([]gitinterface.Hash{initialEntryID}, true, "This was a mistaken push!").Commit(repo, false); err != nil { t.Fatal(err) } annotationID, err := repo.GetReference(Ref) if err != nil { t.Fatal(err) } if err := NewReferenceEntry("main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Error(err) } entry, err := GetEntry(repo, initialEntryID) assert.Nil(t, err) e := entry.(*ReferenceEntry) assert.Equal(t, "main", e.RefName) assert.Equal(t, gitinterface.ZeroHash, e.TargetID) entry, err = GetEntry(repo, annotationID) assert.Nil(t, err) a := entry.(*AnnotationEntry) assert.True(t, a.Skip) assert.Equal(t, []gitinterface.Hash{initialEntryID}, a.RSLEntryIDs) assert.Equal(t, "This was a mistaken push!", a.Message) } func TestGetParentForEntry(t *testing.T) { t.Run("regular test", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // Assert no parent for first entry if err := NewReferenceEntry("main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } entryID := entry.GetID() _, err = GetParentForEntry(repo, entry) assert.ErrorIs(t, err, ErrRSLEntryNotFound) // Find parent for an entry if err := NewReferenceEntry("main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } parentEntry, err := GetParentForEntry(repo, entry) assert.Nil(t, err) assert.Equal(t, entryID, parentEntry.GetID()) entryID = entry.GetID() // Find parent for an annotation if err := NewAnnotationEntry([]gitinterface.Hash{entryID}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } entry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } parentEntry, err = GetParentForEntry(repo, entry) assert.Nil(t, err) assert.Equal(t, entryID, parentEntry.GetID()) }) t.Run("transition from no number to with number", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).commitWithoutNumber(repo); err != nil { t.Fatal(err) } nonNumberedEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } numberedEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } assert.Equal(t, uint64(1), numberedEntry.GetNumber()) parentEntry, err := GetParentForEntry(repo, numberedEntry) assert.Nil(t, err) assert.Equal(t, uint64(0), parentEntry.GetNumber()) assert.Equal(t, nonNumberedEntry.GetID(), parentEntry.GetID()) }) } func TestGetNonGittufParentReferenceUpdaterEntryForEntry(t *testing.T) { t.Run("mix of gittuf and non gittuf entries", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // Add the first gittuf entry if err := NewReferenceEntry("refs/gittuf/policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // Add non gittuf entry if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } expectedEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } // Add non gittuf entry if err := NewReferenceEntry("refs/heads/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } parentEntry, annotations, err := GetNonGittufParentReferenceUpdaterEntryForEntry(repo, latestEntry) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, expectedEntry, parentEntry) // Add another gittuf entry and then a non gittuf entry expectedEntry = latestEntry if err := NewReferenceEntry("refs/gittuf/not-policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } if err := NewReferenceEntry("refs/gittuf/main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } // The expected entry should be from before this latest gittuf addition parentEntry, annotations, err = GetNonGittufParentReferenceUpdaterEntryForEntry(repo, latestEntry) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, expectedEntry, parentEntry) // Add annotation pertaining to the expected entry if err := NewAnnotationEntry([]gitinterface.Hash{expectedEntry.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } parentEntry, annotations, err = GetNonGittufParentReferenceUpdaterEntryForEntry(repo, latestEntry) assert.Nil(t, err) assert.Equal(t, expectedEntry, parentEntry) assertAnnotationsReferToEntry(t, parentEntry, annotations) }) t.Run("only gittuf entries", func(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) // Add the first gittuf entry if err := NewReferenceEntry("refs/gittuf/policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } _, _, err = GetNonGittufParentReferenceUpdaterEntryForEntry(repo, latestEntry) assert.ErrorIs(t, err, ErrRSLEntryNotFound) // Add another gittuf entry if err := NewReferenceEntry("refs/gittuf/not-policy", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } _, _, err = GetNonGittufParentReferenceUpdaterEntryForEntry(repo, latestEntry) assert.ErrorIs(t, err, ErrRSLEntryNotFound) }) } func TestGetFirstEntry(t *testing.T) { t.Parallel() tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) if err := NewReferenceEntry("first", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } firstEntryT, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } firstEntry := firstEntryT.(*ReferenceEntry) for i := 0; i < 5; i++ { if err := NewReferenceEntry("main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } } testEntry, annotations, err := GetFirstEntry(repo) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, firstEntry, testEntry) for i := 0; i < 5; i++ { if err := NewAnnotationEntry([]gitinterface.Hash{firstEntry.ID}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } } testEntry, annotations, err = GetFirstEntry(repo) assert.Nil(t, err) assert.Equal(t, firstEntry, testEntry) assert.Equal(t, 5, len(annotations)) assertAnnotationsReferToEntry(t, firstEntry, annotations) } func TestGetFirstReferenceUpdaterEntryForRef(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) if err := NewReferenceEntry("first", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } firstEntryT, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } firstEntry := firstEntryT.(*ReferenceEntry) for i := 0; i < 5; i++ { if err := NewReferenceEntry("main", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } } testEntry, annotations, err := GetFirstReferenceUpdaterEntryForRef(repo, "first") assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, firstEntry, testEntry) for i := 0; i < 5; i++ { if err := NewAnnotationEntry([]gitinterface.Hash{firstEntry.ID}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } } testEntry, annotations, err = GetFirstReferenceUpdaterEntryForRef(repo, "first") assert.Nil(t, err) assert.Equal(t, firstEntry, testEntry) assert.Equal(t, 5, len(annotations)) assertAnnotationsReferToEntry(t, firstEntry, annotations) } func TestSkipAllInvalidReferenceEntriesForRef(t *testing.T) { t.Run("skip latest entry", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) require.Nil(t, err) initialCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Initial commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", initialCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } toBeSkippedEntry, err := GetLatestEntry(repo) require.Nil(t, err) // Create a different commit and override the ref if err := repo.SetReference("refs/heads/main", gitinterface.ZeroHash); err != nil { t.Fatal(err) } newCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Real initial commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", newCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } if err := SkipAllInvalidReferenceEntriesForRef(repo, "refs/heads/main", false); err != nil { t.Fatal(err) } latestEntry, err := GetLatestEntry(repo) require.Nil(t, err) annotationEntry, isAnnotation := latestEntry.(*AnnotationEntry) if !isAnnotation { t.Fatal("invalid entry type") } assert.Equal(t, []gitinterface.Hash{toBeSkippedEntry.GetID()}, annotationEntry.RSLEntryIDs) }) t.Run("skip multiple entries", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) require.Nil(t, err) skippedEntries := []gitinterface.Hash{} initialCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Initial commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", initialCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } toBeSkippedEntry, err := GetLatestEntry(repo) require.Nil(t, err) skippedEntries = append(skippedEntries, toBeSkippedEntry.GetID()) // Add another commit and entry that'll be skipped later secondCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Second commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", secondCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } toBeSkippedEntry, err = GetLatestEntry(repo) require.Nil(t, err) skippedEntries = append(skippedEntries, toBeSkippedEntry.GetID()) // Create a different commit and override the ref if err := repo.SetReference("refs/heads/main", gitinterface.ZeroHash); err != nil { t.Fatal(err) } newCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Real initial commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", newCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } if err := SkipAllInvalidReferenceEntriesForRef(repo, "refs/heads/main", false); err != nil { t.Fatal(err) } latestEntry, err := GetLatestEntry(repo) require.Nil(t, err) annotationEntry, isAnnotation := latestEntry.(*AnnotationEntry) if !isAnnotation { t.Fatal("invalid entry type") } // we have to reverse the order of one of the lists slices.Reverse[[]gitinterface.Hash](skippedEntries) assert.Equal(t, skippedEntries, annotationEntry.RSLEntryIDs) }) t.Run("just one entry, nothing should change", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) require.Nil(t, err) initialCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Initial commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", initialCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } originalLatestEntry, err := GetLatestEntry(repo) require.Nil(t, err) if err := SkipAllInvalidReferenceEntriesForRef(repo, "refs/heads/main", false); err != nil { t.Fatal(err) } newLatestEntry, err := GetLatestEntry(repo) require.Nil(t, err) // Confirm no annotation was created if _, isReferenceEntry := newLatestEntry.(*ReferenceEntry); !isReferenceEntry { t.Fatal(fmt.Errorf("invalid entry type")) } assert.Equal(t, originalLatestEntry, newLatestEntry) }) t.Run("multiple entries, nothing should change", func(t *testing.T) { tmpDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tmpDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) require.Nil(t, err) initialCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Initial commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", initialCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } anotherCommitHash, err := repo.Commit(emptyTreeHash, "refs/heads/main", "Second commit\n", false) require.Nil(t, err) if err := NewReferenceEntry("refs/heads/main", anotherCommitHash).Commit(repo, false); err != nil { t.Fatal(err) } originalLatestEntry, err := GetLatestEntry(repo) require.Nil(t, err) if err := SkipAllInvalidReferenceEntriesForRef(repo, "refs/heads/main", false); err != nil { t.Fatal(err) } newLatestEntry, err := GetLatestEntry(repo) require.Nil(t, err) // Confirm no annotation was created if _, isReferenceEntry := newLatestEntry.(*ReferenceEntry); !isReferenceEntry { t.Fatal(fmt.Errorf("invalid entry type")) } assert.Equal(t, originalLatestEntry, newLatestEntry) }) } func TestGetFirstReferenceUpdaterEntryForCommit(t *testing.T) { tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeHash, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } mainRef := "refs/heads/main" initialTargetIDs := []gitinterface.Hash{} for i := 0; i < 3; i++ { commitID, err := repo.Commit(emptyTreeHash, mainRef, "Test commit", false) if err != nil { t.Fatal(err) } initialTargetIDs = append(initialTargetIDs, commitID) } // Right now, the RSL has no entries. for _, commitID := range initialTargetIDs { _, _, err = GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.ErrorIs(t, err, ErrNoRecordOfCommit) } if err := NewReferenceEntry(mainRef, initialTargetIDs[len(initialTargetIDs)-1]).Commit(repo, false); err != nil { t.Fatal(err) } // At this point, searching for any commit's entry should return the // solitary RSL entry. latestEntryT, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } for _, commitID := range initialTargetIDs { entry, annotations, err := GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, latestEntryT, entry) } // Now, let's branch off from this ref and add more commits. featureRef := "refs/heads/feature" // First, "checkout" the feature branch. if err := repo.SetReference(featureRef, initialTargetIDs[len(initialTargetIDs)-1]); err != nil { t.Fatal(err) } // Next, add some new commits to this branch. featureTargetIDs := []gitinterface.Hash{} for i := 0; i < 3; i++ { commitID, err := repo.Commit(emptyTreeHash, featureRef, "Feature commit", false) if err != nil { t.Fatal(err) } featureTargetIDs = append(featureTargetIDs, commitID) } // The RSL hasn't seen these new commits, however. for _, commitID := range featureTargetIDs { _, _, err = GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.ErrorIs(t, err, ErrNoRecordOfCommit) } if err := NewReferenceEntry(featureRef, featureTargetIDs[len(featureTargetIDs)-1]).Commit(repo, false); err != nil { t.Fatal(err) } // At this point, searching for any of the original commits' entry should // return the first RSL entry. for _, commitID := range initialTargetIDs { entry, annotations, err := GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, latestEntryT, entry) } // Searching for the feature commits should return the second entry. latestEntryT, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } for _, commitID := range featureTargetIDs { entry, annotations, err := GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, latestEntryT, entry) } // Now, fast forward main branch to the latest feature branch commit. if err := repo.SetReference(mainRef, featureTargetIDs[len(featureTargetIDs)-1]); err != nil { t.Fatal(err) } if err := NewReferenceEntry(mainRef, featureTargetIDs[len(featureTargetIDs)-1]).Commit(repo, false); err != nil { t.Fatal(err) } // Testing for any of the feature commits should return the feature branch // entry, not the main branch entry. for _, commitID := range featureTargetIDs { entry, annotations, err := GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.Nil(t, err) assert.Nil(t, annotations) assert.Equal(t, latestEntryT, entry) } // Add annotation for feature entry if err := NewAnnotationEntry([]gitinterface.Hash{latestEntryT.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry := latestEntryT.(*ReferenceEntry) for _, commitID := range featureTargetIDs { entry, annotations, err := GetFirstReferenceUpdaterEntryForCommit(repo, commitID) assert.Nil(t, err) assert.Equal(t, latestEntryT, entry) assertAnnotationsReferToEntry(t, latestEntry, annotations) } } func TestGetReferenceUpdaterEntriesInRange(t *testing.T) { refName := "refs/heads/main" anotherRefName := "refs/heads/feature" // We add a mix of reference entries and annotations, establishing expected // return values as we go along tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) expectedEntries := []ReferenceUpdaterEntry{} expectedAnnotationMap := map[string][]*AnnotationEntry{} // Add some entries to main for i := 0; i < 3; i++ { if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // We run GetLatestEntry so that the entry has its ID set as well entry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedEntries = append(expectedEntries, entry.(ReferenceUpdaterEntry)) } // Add some annotations for i := 0; i < 3; i++ { if err := NewAnnotationEntry([]gitinterface.Hash{expectedEntries[i].GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } annotation, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedAnnotationMap[expectedEntries[i].GetID().String()] = []*AnnotationEntry{annotation.(*AnnotationEntry)} } // Each entry has one annotation entries, annotationMap, err := GetReferenceUpdaterEntriesInRange(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID()) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) // Add an entry and annotation for feature branch if err := NewReferenceEntry(anotherRefName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedEntries = append(expectedEntries, latestEntry.(*ReferenceEntry)) if err := NewAnnotationEntry([]gitinterface.Hash{latestEntry.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedAnnotationMap[expectedEntries[len(expectedEntries)-1].GetID().String()] = []*AnnotationEntry{latestEntry.(*AnnotationEntry)} // Expected values include the feature branch entry and annotation entries, annotationMap, err = GetReferenceUpdaterEntriesInRange(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID()) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) // Add an annotation that refers to two valid entries if err := NewAnnotationEntry([]gitinterface.Hash{expectedEntries[0].GetID(), expectedEntries[1].GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } // This annotation is relevant to both entries annotation := latestEntry.(*AnnotationEntry) expectedAnnotationMap[expectedEntries[0].GetID().String()] = append(expectedAnnotationMap[expectedEntries[0].GetID().String()], annotation) expectedAnnotationMap[expectedEntries[1].GetID().String()] = append(expectedAnnotationMap[expectedEntries[1].GetID().String()], annotation) entries, annotationMap, err = GetReferenceUpdaterEntriesInRange(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID()) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) // Add a gittuf namespace entry and ensure it's returned as relevant if err := NewReferenceEntry("refs/gittuf/relevant", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedEntries = append(expectedEntries, latestEntry.(*ReferenceEntry)) entries, annotationMap, err = GetReferenceUpdaterEntriesInRange(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID()) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) } func TestGetReferenceUpdaterEntriesInRangeForRef(t *testing.T) { refName := "refs/heads/main" anotherRefName := "refs/heads/feature" // We add a mix of reference entries and annotations, establishing expected // return values as we go along tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) expectedEntries := []ReferenceUpdaterEntry{} expectedAnnotationMap := map[string][]*AnnotationEntry{} // Add some entries to main for i := 0; i < 3; i++ { if err := NewReferenceEntry(refName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } // We run GetLatestEntry so that the entry has its ID set as well entry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedEntries = append(expectedEntries, entry.(*ReferenceEntry)) } // Add some annotations for i := 0; i < 3; i++ { if err := NewAnnotationEntry([]gitinterface.Hash{expectedEntries[i].GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } annotation, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedAnnotationMap[expectedEntries[i].GetID().String()] = []*AnnotationEntry{annotation.(*AnnotationEntry)} } // Each entry has one annotation entries, annotationMap, err := GetReferenceUpdaterEntriesInRangeForRef(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID(), refName) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) // Add an entry and annotation for feature branch if err := NewReferenceEntry(anotherRefName, gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err := GetLatestEntry(repo) if err != nil { t.Fatal(err) } if err := NewAnnotationEntry([]gitinterface.Hash{latestEntry.GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } // Expected values do not change entries, annotationMap, err = GetReferenceUpdaterEntriesInRangeForRef(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID(), refName) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) // Add an annotation that refers to two valid entries if err := NewAnnotationEntry([]gitinterface.Hash{expectedEntries[0].GetID(), expectedEntries[1].GetID()}, false, annotationMessage).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } // This annotation is relevant to both entries annotation := latestEntry.(*AnnotationEntry) expectedAnnotationMap[expectedEntries[0].GetID().String()] = append(expectedAnnotationMap[expectedEntries[0].GetID().String()], annotation) expectedAnnotationMap[expectedEntries[1].GetID().String()] = append(expectedAnnotationMap[expectedEntries[1].GetID().String()], annotation) entries, annotationMap, err = GetReferenceUpdaterEntriesInRangeForRef(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID(), refName) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) // Add a gittuf namespace entry and ensure it's returned as relevant if err := NewReferenceEntry("refs/gittuf/relevant", gitinterface.ZeroHash).Commit(repo, false); err != nil { t.Fatal(err) } latestEntry, err = GetLatestEntry(repo) if err != nil { t.Fatal(err) } expectedEntries = append(expectedEntries, latestEntry.(*ReferenceEntry)) entries, annotationMap, err = GetReferenceUpdaterEntriesInRangeForRef(repo, expectedEntries[0].GetID(), expectedEntries[len(expectedEntries)-1].GetID(), refName) assert.Nil(t, err) assert.Equal(t, expectedEntries, entries) assert.Equal(t, expectedAnnotationMap, annotationMap) } func TestPropagateChangesFromUpstreamRepository(t *testing.T) { // Create upstreamRepo upstreamRepoLocation := t.TempDir() upstreamRepo := gitinterface.CreateTestGitRepository(t, upstreamRepoLocation, true) downstreamRepoLocation := t.TempDir() downstreamRepo := gitinterface.CreateTestGitRepository(t, downstreamRepoLocation, true) propagationDetails := &tufv01.PropagationDirective{ UpstreamReference: "refs/heads/main", UpstreamRepository: upstreamRepoLocation, DownstreamReference: "refs/heads/main", DownstreamPath: "upstream", } err := PropagateChangesFromUpstreamRepository(downstreamRepo, upstreamRepo, []tuf.PropagationDirective{propagationDetails}, false) assert.Nil(t, err) // propagation has nothing to do because no RSL exists in upstream // Add things to upstreamRepo blobAID, err := upstreamRepo.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err := upstreamRepo.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } upstreamTreeBuilder := gitinterface.NewTreeBuilder(upstreamRepo) upstreamRootTreeID, err := upstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("b", blobBID), }) if err != nil { t.Fatal(err) } upstreamCommitID, err := upstreamRepo.Commit(upstreamRootTreeID, "refs/heads/main", "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := NewReferenceEntry("refs/heads/main", upstreamCommitID).Commit(upstreamRepo, false); err != nil { t.Fatal(err) } upstreamEntry, err := GetLatestEntry(upstreamRepo) if err != nil { t.Fatal(err) } err = PropagateChangesFromUpstreamRepository(downstreamRepo, upstreamRepo, []tuf.PropagationDirective{propagationDetails}, false) // TODO: should propagation result in a new local ref? assert.ErrorIs(t, err, gitinterface.ErrReferenceNotFound) // Add things to downstreamRepo blobAID, err = downstreamRepo.WriteBlob([]byte("a")) if err != nil { t.Fatal(err) } blobBID, err = downstreamRepo.WriteBlob([]byte("b")) if err != nil { t.Fatal(err) } downstreamTreeBuilder := gitinterface.NewTreeBuilder(downstreamRepo) downstreamRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), }) if err != nil { t.Fatal(err) } downstreamCommitID, err := downstreamRepo.Commit(downstreamRootTreeID, "refs/heads/main", "Initial commit\n", false) if err != nil { t.Fatal(err) } if err := NewReferenceEntry("refs/heads/main", downstreamCommitID).Commit(downstreamRepo, false); err != nil { t.Fatal(err) } err = PropagateChangesFromUpstreamRepository(downstreamRepo, upstreamRepo, []tuf.PropagationDirective{propagationDetails}, false) assert.Nil(t, err) latestEntry, err := GetLatestEntry(downstreamRepo) if err != nil { t.Fatal(err) } propagationEntry, isPropagationEntry := latestEntry.(*PropagationEntry) if !isPropagationEntry { t.Fatal("unexpected entry type in downstream repo") } assert.Equal(t, upstreamRepoLocation, propagationEntry.UpstreamRepository) assert.Equal(t, upstreamEntry.GetID(), propagationEntry.UpstreamEntryID) downstreamRootTreeID, err = downstreamRepo.GetCommitTreeID(propagationEntry.TargetID) if err != nil { t.Fatal(err) } pathTreeID, err := downstreamRepo.GetPathIDInTree("upstream", downstreamRootTreeID) if err != nil { t.Fatal(err) } // Check the subtree ID in downstream repo matches upstream root tree ID assert.Equal(t, upstreamRootTreeID, pathTreeID) // Check the downstream tree still contains other items expectedRootTreeID, err := downstreamTreeBuilder.WriteTreeFromEntries([]gitinterface.TreeEntry{ gitinterface.NewEntryBlob("a", blobAID), gitinterface.NewEntryBlob("foo/b", blobBID), gitinterface.NewEntryBlob("upstream/a", blobAID), gitinterface.NewEntryBlob("upstream/b", blobBID), }) if err != nil { t.Fatal(err) } assert.Equal(t, expectedRootTreeID, downstreamRootTreeID) // Nothing to propagate, check that a new entry has not been added in the downstreamRepo err = PropagateChangesFromUpstreamRepository(downstreamRepo, upstreamRepo, []tuf.PropagationDirective{propagationDetails}, false) assert.Nil(t, err) latestEntry, err = GetLatestEntry(downstreamRepo) if err != nil { t.Fatal(err) } assert.Equal(t, propagationEntry.GetID(), latestEntry.GetID()) } func TestAnnotationEntryRefersTo(t *testing.T) { // We use these as stand-ins for actual RSL IDs that have the same data type tempDir := t.TempDir() repo := gitinterface.CreateTestGitRepository(t, tempDir, false) treeBuilder := gitinterface.NewTreeBuilder(repo) emptyTreeID, err := treeBuilder.WriteTreeFromEntries(nil) if err != nil { t.Fatal(err) } emptyBlobID, err := repo.WriteBlob(nil) if err != nil { t.Fatal(err) } tests := map[string]struct { annotation *AnnotationEntry entryID gitinterface.Hash expectedResult bool }{ "annotation refers to single entry, returns true": { annotation: NewAnnotationEntry([]gitinterface.Hash{emptyBlobID}, false, annotationMessage), entryID: emptyBlobID, expectedResult: true, }, "annotation refers to multiple entries, returns true": { annotation: NewAnnotationEntry([]gitinterface.Hash{emptyTreeID, emptyBlobID}, false, annotationMessage), entryID: emptyBlobID, expectedResult: true, }, "annotation refers to single entry, returns false": { annotation: NewAnnotationEntry([]gitinterface.Hash{emptyBlobID}, false, annotationMessage), entryID: gitinterface.ZeroHash, expectedResult: false, }, "annotation refers to multiple entries, returns false": { annotation: NewAnnotationEntry([]gitinterface.Hash{emptyTreeID, emptyBlobID}, false, annotationMessage), entryID: gitinterface.ZeroHash, expectedResult: false, }, } for name, test := range tests { result := test.annotation.RefersTo(test.entryID) assert.Equal(t, test.expectedResult, result, fmt.Sprintf("unexpected result in test '%s'", name)) } } func TestReferenceEntryCreateCommitMessage(t *testing.T) { nonZeroHash, err := gitinterface.NewHash("abcdef12345678900987654321fedcbaabcdef12") if err != nil { t.Fatal(err) } tests := map[string]struct { entry *ReferenceEntry expectedMessage string }{ "entry, fully resolved ref": { entry: &ReferenceEntry{ RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, plumbing.ZeroHash.String()), }, "entry, non-zero commit": { entry: &ReferenceEntry{ RefName: "refs/heads/main", TargetID: nonZeroHash, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, "abcdef12345678900987654321fedcbaabcdef12"), }, "entry, fully resolved ref, small number": { entry: &ReferenceEntry{ RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, Number: 1, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %d", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, plumbing.ZeroHash.String(), NumberKey, 1), }, "entry, fully resolved ref, large number": { entry: &ReferenceEntry{ RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, Number: math.MaxUint64, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %d", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, plumbing.ZeroHash.String(), NumberKey, uint64(math.MaxUint64)), }, } for name, test := range tests { t.Run(name, func(t *testing.T) { message, _ := test.entry.createCommitMessage(true) if !assert.Equal(t, test.expectedMessage, message) { t.Errorf("expected\n%s\n\ngot\n%s", test.expectedMessage, message) } }) } } func TestAnnotationEntryCreateCommitMessage(t *testing.T) { tests := map[string]struct { entry *AnnotationEntry expectedMessage string }{ "annotation, no message": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "", }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true"), }, "annotation, with message": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "message", }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s\n%s\n%s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", BeginMessage, base64.StdEncoding.EncodeToString([]byte("message")), EndMessage), }, "annotation, with multi-line message": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "message1\nmessage2", }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s\n%s\n%s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", BeginMessage, base64.StdEncoding.EncodeToString([]byte("message1\nmessage2")), EndMessage), }, "annotation, no message, skip false": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: false, Message: "", }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "false"), }, "annotation, no message, skip false, multiple entry IDs": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash, gitinterface.ZeroHash}, Skip: false, Message: "", }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "false"), }, "annotation, no message, small number": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "", Number: 1, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %d", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", NumberKey, 1), }, "annotation, no message, large number": { entry: &AnnotationEntry{ RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "", Number: math.MaxUint64, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %d", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", NumberKey, uint64(math.MaxUint64)), }, } for name, test := range tests { t.Run(name, func(t *testing.T) { message, err := test.entry.createCommitMessage(true) if err != nil { t.Fatal(err) } if !assert.Equal(t, test.expectedMessage, message) { t.Errorf("expected\n%s\n\ngot\n%s", test.expectedMessage, message) } }) } } func TestPropagationEntryCreateCommitMessage(t *testing.T) { nonZeroHash, err := gitinterface.NewHash("abcdef12345678900987654321fedcbaabcdef12") if err != nil { t.Fatal(err) } upstreamRepository := "https://git.example.com/example/repository" tests := map[string]struct { entry *PropagationEntry expectedMessage string }{ "entry, fully resolved ref": { entry: &PropagationEntry{ RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, UpstreamRepository: upstreamRepository, UpstreamEntryID: gitinterface.ZeroHash, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s\n%s: %s", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String(), UpstreamRepositoryKey, upstreamRepository, UpstreamEntryIDKey, gitinterface.ZeroHash.String()), }, "entry, non-zero commit": { entry: &PropagationEntry{ RefName: "refs/heads/main", TargetID: nonZeroHash, UpstreamRepository: upstreamRepository, UpstreamEntryID: nonZeroHash, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s\n%s: %s", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, "abcdef12345678900987654321fedcbaabcdef12", UpstreamRepositoryKey, upstreamRepository, UpstreamEntryIDKey, "abcdef12345678900987654321fedcbaabcdef12"), }, "entry, fully resolved ref, small number": { entry: &PropagationEntry{ RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, UpstreamRepository: upstreamRepository, UpstreamEntryID: gitinterface.ZeroHash, Number: 1, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s\n%s: %s\n%s: %d", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String(), UpstreamRepositoryKey, upstreamRepository, UpstreamEntryIDKey, gitinterface.ZeroHash.String(), NumberKey, 1), }, "entry, fully resolved ref, large number": { entry: &PropagationEntry{ RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, UpstreamRepository: upstreamRepository, UpstreamEntryID: gitinterface.ZeroHash, Number: math.MaxUint64, }, expectedMessage: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s\n%s: %s\n%s: %d", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String(), UpstreamRepositoryKey, upstreamRepository, UpstreamEntryIDKey, gitinterface.ZeroHash.String(), NumberKey, uint64(math.MaxUint64)), }, } for name, test := range tests { t.Run(name, func(t *testing.T) { message, _ := test.entry.createCommitMessage(true) if !assert.Equal(t, test.expectedMessage, message) { t.Errorf("expected\n%s\n\ngot\n%s", test.expectedMessage, message) } }) } } func TestParseRSLEntryText(t *testing.T) { nonZeroHash, err := gitinterface.NewHash("abcdef12345678900987654321fedcbaabcdef12") if err != nil { t.Fatal(err) } upstreamRepository := "https://git.example.com/example/repository" tests := map[string]struct { expectedEntry Entry expectedError error message string }{ "entry, fully resolved ref": { expectedEntry: &ReferenceEntry{ ID: gitinterface.ZeroHash, RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String()), }, "entry, non-zero commit": { expectedEntry: &ReferenceEntry{ ID: gitinterface.ZeroHash, RefName: "refs/heads/main", TargetID: nonZeroHash, }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", ReferenceEntryHeader, RefKey, "refs/heads/main", TargetIDKey, "abcdef12345678900987654321fedcbaabcdef12"), }, "entry, missing header": { expectedError: ErrInvalidRSLEntry, message: fmt.Sprintf("%s: %s\n%s: %s", RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String()), }, "entry, missing information": { expectedError: ErrInvalidRSLEntry, message: fmt.Sprintf("%s\n\n%s: %s", ReferenceEntryHeader, RefKey, "refs/heads/main"), }, "annotation, no message": { expectedEntry: &AnnotationEntry{ ID: gitinterface.ZeroHash, RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "", }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true"), }, "annotation, with message": { expectedEntry: &AnnotationEntry{ ID: gitinterface.ZeroHash, RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "message", }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s\n%s\n%s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", BeginMessage, base64.StdEncoding.EncodeToString([]byte("message")), EndMessage), }, "annotation, with multi-line message": { expectedEntry: &AnnotationEntry{ ID: gitinterface.ZeroHash, RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: true, Message: "message1\nmessage2", }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s\n%s\n%s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", BeginMessage, base64.StdEncoding.EncodeToString([]byte("message1\nmessage2")), EndMessage), }, "annotation, no message, skip false": { expectedEntry: &AnnotationEntry{ ID: gitinterface.ZeroHash, RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash}, Skip: false, Message: "", }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "false"), }, "annotation, no message, skip false, multiple entry IDs": { expectedEntry: &AnnotationEntry{ ID: gitinterface.ZeroHash, RSLEntryIDs: []gitinterface.Hash{gitinterface.ZeroHash, gitinterface.ZeroHash}, Skip: false, Message: "", }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String(), EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "false"), }, "annotation, missing header": { expectedError: ErrInvalidRSLEntry, message: fmt.Sprintf("%s: %s\n%s: %s\n%s\n%s\n%s", EntryIDKey, gitinterface.ZeroHash.String(), SkipKey, "true", BeginMessage, base64.StdEncoding.EncodeToString([]byte("message")), EndMessage), }, "annotation, missing information": { expectedError: ErrInvalidRSLEntry, message: fmt.Sprintf("%s\n\n%s: %s", AnnotationEntryHeader, EntryIDKey, gitinterface.ZeroHash.String()), }, "propagation entry, fully resolved ref": { expectedEntry: &PropagationEntry{ ID: gitinterface.ZeroHash, RefName: "refs/heads/main", TargetID: gitinterface.ZeroHash, UpstreamRepository: upstreamRepository, UpstreamEntryID: gitinterface.ZeroHash, }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s\n%s: %s", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, gitinterface.ZeroHash.String(), UpstreamRepositoryKey, upstreamRepository, UpstreamEntryIDKey, gitinterface.ZeroHash.String()), }, "propagation entry, non-zero commit": { expectedEntry: &PropagationEntry{ ID: gitinterface.ZeroHash, RefName: "refs/heads/main", TargetID: nonZeroHash, UpstreamRepository: upstreamRepository, UpstreamEntryID: nonZeroHash, }, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s\n%s: %s", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, "abcdef12345678900987654321fedcbaabcdef12", UpstreamRepositoryKey, upstreamRepository, UpstreamEntryIDKey, "abcdef12345678900987654321fedcbaabcdef12"), }, "propagation entry, missing information": { expectedError: ErrInvalidRSLEntry, message: fmt.Sprintf("%s\n\n%s: %s\n%s: %s\n%s: %s", PropagationEntryHeader, RefKey, "refs/heads/main", TargetIDKey, "abcdef12345678900987654321fedcbaabcdef12", UpstreamRepositoryKey, upstreamRepository), }, } for name, test := range tests { t.Run(name, func(t *testing.T) { entry, err := parseRSLEntryText(gitinterface.ZeroHash, test.message) if err != nil { assert.ErrorIs(t, err, test.expectedError) } else if !assert.Equal(t, test.expectedEntry, entry) { t.Errorf("expected\n%+v\n\ngot\n%+v", test.expectedEntry, entry) } }) } } func assertAnnotationsReferToEntry(t *testing.T, entry ReferenceUpdaterEntry, annotations []*AnnotationEntry) { t.Helper() if entry == nil || annotations == nil { t.Error("expected entry and annotations, received nil") } for _, annotation := range annotations { assert.True(t, annotation.RefersTo(entry.GetID())) assert.Equal(t, annotationMessage, annotation.Message) } } gittuf-0.9.0/internal/signerverifier/000077500000000000000000000000001475150141000176575ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/common/000077500000000000000000000000001475150141000211475ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/common/common.go000066400000000000000000000022671475150141000227750ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package common import ( "crypto/x509" "errors" "fmt" "log/slog" "os" "github.com/sigstore/sigstore/pkg/cryptoutils" ) var ( ErrSignatureVerificationFailed = errors.New("failed to verify signature") ErrNotPrivateKey = errors.New("loaded key is not a private key") ErrUnknownKeyType = errors.New("unknown key type") ErrInvalidThreshold = errors.New("threshold is either less than 1 or greater than number of provided public keys") ) // LoadCertsFromPath opens the file at the specified path and parses the // certificates present in PEM form. This is similar to a helper in // https://github.com/sigstore/sigstore and is used in gittuf's sigstore signing // and verification flows. func LoadCertsFromPath(path string) ([]*x509.Certificate, error) { slog.Debug(fmt.Sprintf("Loading %s...", path)) pemBytes, err := os.ReadFile(path) if err != nil { return nil, err } certs, err := cryptoutils.UnmarshalCertificatesFromPEM(pemBytes) if err != nil { return nil, err } if len(certs) == 0 { return nil, fmt.Errorf("no certificates in file %s", path) } return certs, nil } gittuf-0.9.0/internal/signerverifier/dsse/000077500000000000000000000000001475150141000206155ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/dsse/dsse.go000066400000000000000000000102631475150141000221040ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package dsse import ( "context" "encoding/base64" "encoding/json" "fmt" "github.com/gittuf/gittuf/internal/signerverifier/common" "github.com/gittuf/gittuf/internal/signerverifier/sigstore" "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" protobundle "github.com/sigstore/protobuf-specs/gen/pb-go/bundle/v1" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" ) const PayloadType = "application/vnd.gittuf+json" // CreateEnvelope is an opinionated interface to create a DSSE envelope. It // accepts instances of tuf.RootMetadata, tuf.TargetsMetadata, etc. and marshals // the input prior to storing it as the envelope's payload. func CreateEnvelope(v any) (*dsse.Envelope, error) { b, err := json.Marshal(v) if err != nil { return nil, err } return &dsse.Envelope{ Signatures: []dsse.Signature{}, PayloadType: PayloadType, Payload: base64.StdEncoding.EncodeToString(b), }, nil } // SignEnvelope is an opinionated API to sign DSSE envelopes. It's opinionated // because it assumes the payload is Base 64 encoded, which is the expectation // for gittuf metadata. If one or more signatures from the provided signing key // already exist, they are all removed in favor of the new signature from that // key. func SignEnvelope(ctx context.Context, envelope *dsse.Envelope, signer dsse.Signer) (*dsse.Envelope, error) { keyID, err := signer.KeyID() if err != nil { return nil, err } payload, err := base64.StdEncoding.DecodeString(envelope.Payload) if err != nil { return nil, err } pae := dsse.PAE(envelope.PayloadType, payload) sigBytes, err := signer.Sign(ctx, pae) if err != nil { return nil, err } var signature dsse.Signature if _, isSigstoreSigner := signer.(*sigstore.Signer); isSigstoreSigner { // Unpack the bundle to get the signature + verification material // Set extension in the signature object bundle := protobundle.Bundle{} if err := protojson.Unmarshal(sigBytes, &bundle); err != nil { return nil, err } actualSigBytes, err := protojson.Marshal(bundle.GetMessageSignature()) if err != nil { return nil, err } verificationMaterial := bundle.GetVerificationMaterial() verificationMaterialBytes, err := protojson.Marshal(verificationMaterial) if err != nil { return nil, err } verificationMaterialStruct := new(structpb.Struct) if err := protojson.Unmarshal(verificationMaterialBytes, verificationMaterialStruct); err != nil { return nil, err } signature = dsse.Signature{ Sig: base64.StdEncoding.EncodeToString(actualSigBytes), KeyID: keyID, Extension: &dsse.Extension{ Kind: sigstore.ExtensionMimeType, Ext: verificationMaterialStruct, }, } } else { signature = dsse.Signature{ Sig: base64.StdEncoding.EncodeToString(sigBytes), KeyID: keyID, } } // Preserve signatures that aren't from signer newSignatures := []dsse.Signature{} for _, sig := range envelope.Signatures { if sig.KeyID != keyID { newSignatures = append(newSignatures, sig) } } // Attach new signature from signer newSignatures = append(newSignatures, signature) // Replace existing list of signatures with new signatures in envelope envelope.Signatures = newSignatures return envelope, nil } // VerifyEnvelope verifies a DSSE envelope against an expected threshold using // a slice of verifiers passed into it. Threshold indicates the number of // providers that must validate the envelope. func VerifyEnvelope(ctx context.Context, envelope *dsse.Envelope, verifiers []dsse.Verifier, threshold int) ([]dsse.AcceptedKey, error) { if threshold < 1 { return nil, common.ErrInvalidThreshold } ev, err := dsse.NewEnvelopeVerifier(verifiers...) if err != nil { return nil, err } // We verify with threshold == 1 because we want control over the threshold // checks: we get all the verified keys back acceptedKeys, err := ev.Verify(ctx, envelope) if err != nil { return nil, err } if len(acceptedKeys) < threshold { return acceptedKeys, fmt.Errorf("accepted signatures do not match threshold, Found: %d, Expected %d", len(acceptedKeys), threshold) } return acceptedKeys, nil } gittuf-0.9.0/internal/signerverifier/dsse/dsse_test.go000066400000000000000000000055271475150141000231520ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package dsse import ( "context" "encoding/base64" "os" "path/filepath" "testing" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/stretchr/testify/assert" ) func TestCreateEnvelope(t *testing.T) { rootMetadata := tufv01.NewRootMetadata() env, err := CreateEnvelope(rootMetadata) assert.Nil(t, err) assert.Equal(t, PayloadType, env.PayloadType) assert.Equal(t, "eyJ0eXBlIjoicm9vdCIsImV4cGlyZXMiOiIiLCJrZXlzIjpudWxsLCJyb2xlcyI6bnVsbCwiZ2l0aHViQXBwcm92YWxzVHJ1c3RlZCI6ZmFsc2V9", env.Payload) } func TestSignEnvelope(t *testing.T) { keyPath := setupTestECDSAPair(t) signer, err := loadSSHSigner(keyPath) if err != nil { t.Fatal(err) } env, err := createSignedEnvelope(signer) if err != nil { t.Fatal(err) } assert.Len(t, env.Signatures, 1) assert.Equal(t, "SHA256:oNYBImx035m3rl1Sn/+j5DPrlS9+zXn7k3mjNrC5eto", env.Signatures[0].KeyID) env, err = SignEnvelope(context.Background(), env, signer) assert.Nil(t, err) assert.Len(t, env.Signatures, 1) assert.Equal(t, "SHA256:oNYBImx035m3rl1Sn/+j5DPrlS9+zXn7k3mjNrC5eto", env.Signatures[0].KeyID) } func TestVerifyEnvelope(t *testing.T) { keyPath := setupTestECDSAPair(t) signer, err := loadSSHSigner(keyPath) if err != nil { t.Fatal(err) } keyID, err := signer.KeyID() if err != nil { t.Fatal(err) } env, err := createSignedEnvelope(signer) if err != nil { t.Fatal(err) } acceptedKeys, err := VerifyEnvelope(context.Background(), env, []sslibdsse.Verifier{signer.Verifier}, 1) assert.Nil(t, err) assert.Equal(t, keyID, acceptedKeys[0].KeyID) } func loadSSHSigner(keyPath string) (*ssh.Signer, error) { key, err := ssh.NewKeyFromFile(keyPath) if err != nil { return nil, err } verifier, err := ssh.NewVerifierFromKey(key) if err != nil { return nil, err } return &ssh.Signer{ Verifier: verifier, Path: keyPath, }, nil } func createSignedEnvelope(signer *ssh.Signer) (*sslibdsse.Envelope, error) { message := []byte("test payload") payload := base64.StdEncoding.EncodeToString(message) env := &sslibdsse.Envelope{ PayloadType: "application/vnd.gittuf+text", Payload: payload, Signatures: []sslibdsse.Signature{}, } env, err := SignEnvelope(context.Background(), env, signer) if err != nil { return nil, err } return env, nil } func setupTestECDSAPair(t *testing.T) string { tmpDir := t.TempDir() privPath := filepath.Join(tmpDir, "ecdsa") if err := os.WriteFile(privPath, artifacts.SSHECDSAPrivate, 0o600); err != nil { t.Fatal(err) } if err := os.WriteFile(privPath+".pub", artifacts.SSHECDSAPublicSSH, 0o600); err != nil { t.Fatal(err) } return privPath } gittuf-0.9.0/internal/signerverifier/gpg/000077500000000000000000000000001475150141000204345ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/gpg/gpg.go000066400000000000000000000020351475150141000215400ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gpg import ( "bytes" "fmt" "strings" "github.com/ProtonMail/go-crypto/openpgp" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) const KeyType = "gpg" // LoadGPGKeyFromBytes returns a signerverifier.SSLibKey for a GPG / PGP key passed in as // armored bytes. The returned signerverifier.SSLibKey uses the primary key's fingerprint as the // key ID. func LoadGPGKeyFromBytes(contents []byte) (*signerverifier.SSLibKey, error) { keyring, err := openpgp.ReadArmoredKeyRing(bytes.NewReader(contents)) if err != nil { return nil, err } // TODO: check if this is correct for subkeys fingerprint := fmt.Sprintf("%x", keyring[0].PrimaryKey.Fingerprint) publicKey := strings.TrimSpace(string(contents)) gpgKey := &signerverifier.SSLibKey{ KeyID: fingerprint, KeyType: KeyType, Scheme: KeyType, // TODO: this should use the underlying key algorithm KeyVal: signerverifier.KeyVal{ Public: publicKey, }, } return gpgKey, nil } gittuf-0.9.0/internal/signerverifier/gpg/gpg_test.go000066400000000000000000000007711475150141000226040ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package gpg import ( "testing" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/stretchr/testify/assert" ) func TestLoadGPGKeyFromBytes(t *testing.T) { keyBytes := artifacts.GPGKey1Public key, err := LoadGPGKeyFromBytes(keyBytes) assert.Nil(t, err) assert.Equal(t, KeyType, key.KeyType) assert.Equal(t, KeyType, key.Scheme) assert.Equal(t, "157507bbe151e378ce8126c1dcfe043cdd2db96e", key.KeyID) } gittuf-0.9.0/internal/signerverifier/sigstore/000077500000000000000000000000001475150141000215165ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/sigstore/helpers.go000066400000000000000000000100671475150141000235130ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package sigstore import ( "encoding/base64" "encoding/json" "errors" "fmt" "io" "log/slog" "net/http" "net/url" "strings" "github.com/gittuf/gittuf/internal/signerverifier/common" "github.com/sigstore/sigstore-go/pkg/root" ) const fulcioConfigurationEndpoint = "/api/v2/configuration" func parseTokenForIdentityAndIssuer(token, fulcioURL string) (string, string, error) { tokenParts := strings.Split(token, ".") if len(tokenParts) < 3 { return "", "", fmt.Errorf("invalid token") } token = tokenParts[1] tokenBytes, err := base64.RawURLEncoding.DecodeString(token) if err != nil { return "", "", err } tok := &idToken{} if err := json.Unmarshal(tokenBytes, tok); err != nil { return "", "", err } issuer := issuerFromToken(tok) identity := subjectFromToken(tok) if fulcioURL != "" { slog.Debug(fmt.Sprintf("Querying '%s' for IDP configurations to see if a subject domain applies...", fulcioURL)) fulcio, err := url.Parse(fulcioURL) if err != nil { return "", "", fmt.Errorf("unable to query Fulcio instance '%s': %w", fulcioURL, err) } fulcio.Path = fulcioConfigurationEndpoint configurationEndpoint := fulcio.String() response, err := http.Get(configurationEndpoint) //nolint:gosec if err != nil { return "", "", fmt.Errorf("unable to query Fulcio instance '%s': %w", fulcioURL, err) } responseData, err := io.ReadAll(response.Body) if err != nil { return "", "", fmt.Errorf("unable to query Fulcio instance '%s': %w", fulcioURL, err) } type configResponse struct { Issuers []map[string]string `json:"issuers"` } responseObj := configResponse{Issuers: []map[string]string{}} if err := json.Unmarshal(responseData, &responseObj); err != nil { return "", "", fmt.Errorf("unable to query Fulcio instance '%s': %w", fulcioURL, err) } for _, issuerConfig := range responseObj.Issuers { if issuerConfig["issuerUrl"] != issuer { continue } issuerType, hasIssuerType := issuerConfig["issuerType"] if !hasIssuerType { slog.Debug("Fulcio instance does not list issuer type, cannot determine if subject domain must be added to identity") break } if issuerType == "username" || issuerType == "uri" { subjectDomain, hasSubjectDomain := issuerConfig["subjectDomain"] if !hasSubjectDomain { slog.Debug("Fulcio instance lists issuer type but does not list subject domain, cannot determine subject domain to add to identity") break } // Per the Fulcio spec, the subject domain is added after a '!' slog.Debug(fmt.Sprintf("Adding subject domain '%s' to identity '%s'...", subjectDomain, identity)) identity = fmt.Sprintf("%s!%s", identity, subjectDomain) } break } } return identity, issuer, nil } type idToken struct { Issuer string `json:"iss"` Subject string `json:"sub"` Email string `json:"email"` EmailVerified stringAsBool `json:"email_verified"` FederatedClaims *federatedClaims `json:"federated_claims"` } type stringAsBool bool func (sb *stringAsBool) UnmarshalJSON(b []byte) error { switch string(b) { case "true", `"true"`, "True", `"True"`: *sb = true case "false", `"false"`, "False", `"False"`: *sb = false default: return errors.New("invalid value for boolean") } return nil } type federatedClaims struct { ConnectorID string `json:"connector_id"` } func issuerFromToken(tok *idToken) string { if tok.FederatedClaims != nil && tok.FederatedClaims.ConnectorID != "" { return tok.FederatedClaims.ConnectorID } return tok.Issuer } func subjectFromToken(tok *idToken) string { if tok.Email != "" && tok.EmailVerified { return tok.Email } return tok.Subject } func parsePEMFile(path string) (*root.CertificateAuthority, error) { // This is taken from sigstore/sigstore-go certs, err := common.LoadCertsFromPath(path) if err != nil { return nil, err } var ca root.CertificateAuthority ca.Root = certs[len(certs)-1] if len(certs) > 1 { ca.Intermediates = certs[:len(certs)-1] } return &ca, nil } gittuf-0.9.0/internal/signerverifier/sigstore/options/000077500000000000000000000000001475150141000232115ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/sigstore/options/signer/000077500000000000000000000000001475150141000245005ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/sigstore/options/signer/signer.go000066400000000000000000000021331475150141000263150ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package signer const ( defaultIssuerURL = "https://oauth2.sigstore.dev/auth" defaultClientID = "sigstore" defaultFulcioURL = "https://fulcio.sigstore.dev" defaultRekorURL = "https://rekor.sigstore.dev" ) type Options struct { IssuerURL string ClientID string RedirectURL string FulcioURL string RekorURL string } var DefaultOptions = &Options{ IssuerURL: defaultIssuerURL, ClientID: defaultClientID, FulcioURL: defaultFulcioURL, RekorURL: defaultRekorURL, } type Option func(o *Options) func WithIssuerURL(issuerURL string) Option { return func(o *Options) { o.IssuerURL = issuerURL } } func WithClientID(clientID string) Option { return func(o *Options) { o.ClientID = clientID } } func WithRedirectURL(redirectURL string) Option { return func(o *Options) { o.RedirectURL = redirectURL } } func WithFulcioURL(fulcioURL string) Option { return func(o *Options) { o.FulcioURL = fulcioURL } } func WithRekorURL(rekorURL string) Option { return func(o *Options) { o.RekorURL = rekorURL } } gittuf-0.9.0/internal/signerverifier/sigstore/options/verifier/000077500000000000000000000000001475150141000250245ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/sigstore/options/verifier/verifier.go000066400000000000000000000005771475150141000271770ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package verifier const ( defaultRekorURL = "https://rekor.sigstore.dev" ) type Options struct { RekorURL string } var DefaultOptions = &Options{ RekorURL: defaultRekorURL, } type Option func(o *Options) func WithRekorURL(rekorURL string) Option { return func(o *Options) { o.RekorURL = rekorURL } } gittuf-0.9.0/internal/signerverifier/sigstore/sigstore.go000066400000000000000000000250461475150141000237130ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package sigstore import ( "bytes" "context" "crypto" "crypto/sha256" "crypto/x509" "encoding/hex" "encoding/pem" "fmt" "log" "log/slog" "os" "time" signeropts "github.com/gittuf/gittuf/internal/signerverifier/sigstore/options/signer" verifieropts "github.com/gittuf/gittuf/internal/signerverifier/sigstore/options/verifier" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" protobundle "github.com/sigstore/protobuf-specs/gen/pb-go/bundle/v1" protocommon "github.com/sigstore/protobuf-specs/gen/pb-go/common/v1" "github.com/sigstore/sigstore-go/pkg/bundle" "github.com/sigstore/sigstore-go/pkg/root" "github.com/sigstore/sigstore-go/pkg/sign" sigstoretuf "github.com/sigstore/sigstore-go/pkg/tuf" "github.com/sigstore/sigstore-go/pkg/verify" "github.com/sigstore/sigstore/pkg/oauthflow" "google.golang.org/protobuf/encoding/protojson" "google.golang.org/protobuf/types/known/structpb" ) const ( KeyType = "sigstore-oidc" KeyScheme = "fulcio" ExtensionMimeType = "application/vnd.dev.sigstore.verificationmaterial;version=0.3" GitConfigIssuer = "gitsign.issuer" GitConfigClientID = "gitsign.clientid" GitConfigFulcio = "gitsign.fulcio" GitConfigRekor = "gitsign.rekor" GitConfigRedirectURL = "gitsign.redirecturl" EnvSigstoreRootFile = "SIGSTORE_ROOT_FILE" EnvSigstoreCTLogPublicKeyFile = "SIGSTORE_CT_LOG_PUBLIC_KEY_FILE" EnvSigstoreRekorPublicKey = "SIGSTORE_REKOR_PUBLIC_KEY" sigstoreBundleMimeType = "application/vnd.dev.sigstore.bundle+json;version=0.3" ) type Verifier struct { rekorURL string issuer string identity string ext *structpb.Struct } func NewVerifierFromIdentityAndIssuer(identity, issuer string, opts ...verifieropts.Option) *Verifier { options := verifieropts.DefaultOptions for _, fn := range opts { fn(options) } return &Verifier{ rekorURL: options.RekorURL, issuer: issuer, identity: identity, } } func (v *Verifier) Verify(_ context.Context, data, sig []byte) error { // data is PAE(envelope) // sig is raw sigBytes // extension is set in the verifier slog.Debug("Using Sigstore verifier...") trustedRoot, privateInstance, err := v.getTUFRoot() if err != nil { slog.Debug(fmt.Sprintf("Error getting TUF root: %v", err)) return err } slog.Debug("Loaded Sigstore instance's root of trust") opts := []verify.VerifierOption{ verify.WithTransparencyLog(1), verify.WithIntegratedTimestamps(1), } if privateInstance { // privateInstance requires online verification if rekor is configured // using env var rather than TUF. // This is because the trusted_root.json delivered via TUF indicates // from when the log can be trusted, which we cannot decide (without a // custom env var just for that). opts = append(opts, verify.WithOnlineVerification()) } sev, err := verify.NewSignedEntityVerifier(trustedRoot, opts...) if err != nil { slog.Debug(fmt.Sprintf("Error creating signed entity verifier: %v", err)) return err } verificationMaterial := new(protobundle.VerificationMaterial) extBytes, err := protojson.Marshal(v.ext) if err != nil { return err } if err := protojson.Unmarshal(extBytes, verificationMaterial); err != nil { slog.Debug(fmt.Sprintf("Error creating verification material: %v", err)) return err } messageSignature := new(protocommon.MessageSignature) if err := protojson.Unmarshal(sig, messageSignature); err != nil { slog.Debug(fmt.Sprintf("Invalid Sigstore signature: %v", err)) return err } // create protobuf bundle pbBundle := &protobundle.Bundle{ MediaType: sigstoreBundleMimeType, VerificationMaterial: verificationMaterial, Content: &protobundle.Bundle_MessageSignature{ MessageSignature: messageSignature, }, } apiBundle, err := bundle.NewBundle(pbBundle) if err != nil { slog.Debug(fmt.Sprintf("Unable to create Sigstore bundle for verification: %v", err)) return err } expectedIdentity, err := verify.NewShortCertificateIdentity(v.issuer, "", v.identity, "") if err != nil { slog.Debug(fmt.Sprintf("Unable to create expected identity constraint: %v", err)) return err } result, err := sev.Verify( apiBundle, verify.NewPolicy( verify.WithArtifact(bytes.NewBuffer(data)), verify.WithCertificateIdentity(expectedIdentity), ), ) if err != nil { slog.Debug(fmt.Sprintf("Unable to verify Sigstore signature: %v", err)) return err } slog.Debug(fmt.Sprintf("Verified Sigstore signature issued by '%s' for '%s'", result.VerifiedIdentity.Issuer.Issuer, result.VerifiedIdentity.SubjectAlternativeName.SubjectAlternativeName)) return nil } func (v *Verifier) KeyID() (string, error) { return fmt.Sprintf("%s::%s", v.identity, v.issuer), nil } func (v *Verifier) Public() crypto.PublicKey { // TODO return nil } func (v *Verifier) SetExtension(ext *structpb.Struct) { v.ext = ext } func (v *Verifier) ExpectedExtensionKind() string { // TODO: versioning? return ExtensionMimeType } func (v *Verifier) getTUFRoot() (root.TrustedMaterial, bool, error) { // The env vars we look at for private sigstore: // SIGSTORE_ROOT_FILE -> the Fulcio root // SIGSTORE_CT_LOG_PUBLIC_KEY_FILE -> Fulcio's CT Log pubkey // SIGSTORE_REKOR_PUBLIC_KEY -> Rekor's pubkey // TODO: Support ctlog and tsa fulcioRootFilePath := os.Getenv(EnvSigstoreRootFile) ctLogPublicKeyFilePath := os.Getenv(EnvSigstoreCTLogPublicKeyFile) rekorPublicKeyFilePath := os.Getenv(EnvSigstoreRekorPublicKey) if fulcioRootFilePath != "" || ctLogPublicKeyFilePath != "" || rekorPublicKeyFilePath != "" { // if any env var is set, require all? if fulcioRootFilePath == "" || ctLogPublicKeyFilePath == "" || rekorPublicKeyFilePath == "" { return nil, false, fmt.Errorf("partial env var set") // TODO } slog.Debug("Using environment variables to establish trust for Sigstore instance...") fulcioCertAuthorities := []root.CertificateAuthority{} cert, err := parsePEMFile(fulcioRootFilePath) if err != nil { return nil, false, err } fulcioCertAuthorities = append(fulcioCertAuthorities, *cert) rekorPubKeyBytes, err := os.ReadFile(rekorPublicKeyFilePath) if err != nil { return nil, false, err } block, _ := pem.Decode(rekorPubKeyBytes) if block == nil { return nil, false, fmt.Errorf("failed to decode rekor public key") } rekorKey, err := x509.ParsePKIXPublicKey(block.Bytes) if err != nil { return nil, false, err } keyHash := sha256.Sum256(block.Bytes) keyID := hex.EncodeToString(keyHash[:]) rekorTransparencyLog := &root.TransparencyLog{ BaseURL: v.rekorURL, HashFunc: crypto.SHA256, ID: keyHash[:], PublicKey: rekorKey, SignatureHashFunc: crypto.SHA256, } rekorTransparencyLogs := map[string]*root.TransparencyLog{ keyID: rekorTransparencyLog, } // TODO: CT Log // TODO TSA trustedRoot, err := root.NewTrustedRoot(root.TrustedRootMediaType01, fulcioCertAuthorities, nil, nil, rekorTransparencyLogs) return trustedRoot, true, err } // Use the TUF flow // TODO: support custom sigstore TUF root URL tufClient, err := sigstoretuf.New(sigstoretuf.DefaultOptions()) if err != nil { return nil, false, err } trustedRootJSON, err := tufClient.GetTarget("trusted_root.json") if err != nil { return nil, false, err } trustedRoot, err := root.NewTrustedRootFromJSON(trustedRootJSON) return trustedRoot, false, err } type Signer struct { issuerURL string clientID string redirectURL string fulcioURL string rekorURL string token string *Verifier } func NewSigner(opts ...signeropts.Option) *Signer { options := signeropts.DefaultOptions for _, fn := range opts { fn(options) } return &Signer{ issuerURL: options.IssuerURL, clientID: options.ClientID, redirectURL: options.RedirectURL, fulcioURL: options.FulcioURL, rekorURL: options.RekorURL, Verifier: &Verifier{ rekorURL: options.RekorURL, }, } } func (s *Signer) Sign(_ context.Context, data []byte) ([]byte, error) { content := &sign.PlainData{Data: data} keypair, err := sign.NewEphemeralKeypair(nil) if err != nil { return nil, err } // TODO: support private sigstore by reading config opts := sign.BundleOptions{} // We reuse the token if it's already been fetched once for this signer // object // getIDToken also populates the Verifier's identity and issuer pieces token, err := s.getIDToken() if err != nil { return nil, err } opts.CertificateProviderOptions = &sign.CertificateProviderOptions{IDToken: token} fulcio := s.getFulcioInstance() opts.CertificateProvider = fulcio // TODO: TSA support? rekor := s.getRekorInstance() opts.TransparencyLogs = append(opts.TransparencyLogs, rekor) bundle, err := sign.Bundle(content, keypair, opts) if err != nil { return nil, err } bundleJSON, err := protojson.Marshal(bundle) if err != nil { log.Fatal(err) } return bundleJSON, nil } func (s *Signer) KeyID() (string, error) { // verifier can't return error verifierKeyID, _ := s.Verifier.KeyID() //nolint:errcheck if verifierKeyID == "::" { // verifier.identity and verifier.issuer are empty resulting in this // return value // getIDToken will populate verifier _, err := s.getIDToken() if err != nil { return "", err } } return s.Verifier.KeyID() } // MetadataKey returns the securesystemslib representation of the key, used for // its representation in gittuf metadata. func (s *Signer) MetadataKey() (*signerverifier.SSLibKey, error) { keyID, err := s.KeyID() if err != nil { return nil, err } return &signerverifier.SSLibKey{ KeyID: keyID, KeyType: KeyType, Scheme: KeyScheme, KeyVal: signerverifier.KeyVal{ Identity: s.Verifier.identity, Issuer: s.Verifier.issuer, }, }, nil } func (s *Signer) getIDToken() (string, error) { if s.token == "" { // TODO: support client secret? token, err := oauthflow.OIDConnect(s.issuerURL, s.clientID, "", s.redirectURL, oauthflow.DefaultIDTokenGetter) if err != nil { return "", err } s.token = token.RawString // Set identity and issuer pieces identity, issuer, err := parseTokenForIdentityAndIssuer(s.token, s.fulcioURL) if err != nil { return "", err } s.Verifier.identity = identity s.Verifier.issuer = issuer } return s.token, nil } func (s *Signer) getFulcioInstance() *sign.Fulcio { fulcioOpts := &sign.FulcioOptions{ BaseURL: s.fulcioURL, Timeout: time.Minute, Retries: 1, } return sign.NewFulcio(fulcioOpts) } func (s *Signer) getRekorInstance() *sign.Rekor { rekorOpts := &sign.RekorOptions{ BaseURL: s.rekorURL, Timeout: 90 * time.Second, Retries: 1, } return sign.NewRekor(rekorOpts) } gittuf-0.9.0/internal/signerverifier/ssh/000077500000000000000000000000001475150141000204545ustar00rootroot00000000000000gittuf-0.9.0/internal/signerverifier/ssh/ssh.go000066400000000000000000000147361475150141000216130ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package ssh import ( "bytes" "context" "crypto" "crypto/sha256" "encoding/base64" "encoding/hex" "fmt" "os" "os/exec" "path/filepath" "strings" "testing" "github.com/hiddeco/sshsig" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" "golang.org/x/crypto/ssh" ) const ( SigNamespace = "git" KeyType = "ssh" ) // Verifier is a dsse.Verifier implementation for SSH keys. type Verifier struct { keyID string sshKey ssh.PublicKey } // Verify implements the dsse.Verifier.Verify interface for SSH keys. func (v *Verifier) Verify(_ context.Context, data []byte, sig []byte) error { signature, err := sshsig.Unarmor(sig) if err != nil { return fmt.Errorf("failed to parse ssh signature: %w", err) } message := bytes.NewReader(data) // ssh-keygen uses sha512 to sign with **any*** key hash := sshsig.HashSHA512 if err := sshsig.Verify(message, signature, v.sshKey, hash, SigNamespace); err != nil { return fmt.Errorf("failed to verify ssh signature: %w", err) } return nil } // KeyID implements the dsse.Verifier.KeyID interface for SSH keys. // FIXME: consider removing error in interface; a dsse.Verifier needs a keyid func (v *Verifier) KeyID() (string, error) { return v.keyID, nil } // Public implements the dsse.Verifier.Public interface for SSH keys. // FIXME: consider removing in interface, "Verify()" is all that's needed func (v *Verifier) Public() crypto.PublicKey { return v.sshKey.(ssh.CryptoPublicKey).CryptoPublicKey() } func (v *Verifier) MetadataKey() *signerverifier.SSLibKey { return newSSHKey(v.sshKey, v.keyID) } // Signer is a dsse.Signer implementation for SSH keys. type Signer struct { Path string *Verifier } // Sign implements the dsse.Signer.Sign interface for SSH keys. // It signs using "s.Path" to a public or private, encrypted or plaintext, rsa, // ecdsa or ed25519 key file in a format supported by "ssh-keygen". This aligns // with the git "user.signingKey" option. // https://git-scm.com/docs/git-config#Documentation/git-config.txt-usersigningKey func (s *Signer) Sign(_ context.Context, data []byte) ([]byte, error) { cmd := exec.Command("ssh-keygen", "-Y", "sign", "-n", SigNamespace, "-f", s.Path) //nolint:gosec cmd.Stdin = bytes.NewBuffer(data) output, err := cmd.Output() if err != nil { return nil, fmt.Errorf("failed to run command %v: %w", cmd, err) } return output, nil } // NewKeyFromFile imports an ssh SSlibKey from the passed path. // The path can point to a public or private, encrypted or plaintext, rsa, // ecdsa or ed25519 key file in a format supported by "ssh-keygen". This aligns // with the git "user.signingKey" option. // https://git-scm.com/docs/git-config#Documentation/git-config.txt-usersigningKey func NewKeyFromFile(path string) (*signerverifier.SSLibKey, error) { cmd := exec.Command("ssh-keygen", "-m", "rfc4716", "-e", "-f", path) output, err := cmd.CombinedOutput() if err != nil { return nil, fmt.Errorf("failed to run command %v: %w %s", cmd, err, string(output)) } sshPub, err := parseSSH2Key(string(output)) if err != nil { return nil, fmt.Errorf("failed to parse SSH2 key: %w", err) } return newSSHKey(sshPub, ""), nil } // NewKeyFromBytes returns an ssh SSLibKey from the passed bytes. It's meant to // be used for tests as that's when we directly deal with key bytes. func NewKeyFromBytes(t *testing.T, keyB []byte) *signerverifier.SSLibKey { t.Helper() testName := strings.ReplaceAll(t.Name(), " ", "__") testName = strings.ReplaceAll(testName, "/", "__") testName = strings.ReplaceAll(testName, "\\", "__") hash := sha256.Sum256(keyB) keyName := fmt.Sprintf("%s-%s", testName, hex.EncodeToString(hash[:])) keyPath := filepath.Join(os.TempDir(), keyName) if err := os.WriteFile(keyPath, keyB, 0o600); err != nil { t.Fatal(err) } defer os.Remove(keyPath) //nolint:errcheck key, err := NewKeyFromFile(keyPath) if err != nil { t.Fatal(err) } return key } // NewVerifierFromKey creates a new Verifier from SSlibKey of type ssh. func NewVerifierFromKey(key *signerverifier.SSLibKey) (*Verifier, error) { if key.KeyType != KeyType { return nil, fmt.Errorf("wrong keyType: %s", key.KeyType) } sshKey, err := parseSSH2Body(key.KeyVal.Public) if err != nil { return nil, fmt.Errorf("failed to parse ssh public key material: %w", err) } return &Verifier{ keyID: key.KeyID, sshKey: sshKey, }, nil } // NewSignerFromFile creates an SSH signer from the passed path. func NewSignerFromFile(path string) (*Signer, error) { keyObj, err := NewKeyFromFile(path) if err != nil { return nil, err } verifier, err := NewVerifierFromKey(keyObj) if err != nil { return nil, err } return &Signer{ Verifier: verifier, Path: path, }, nil } // parseSSH2Body parses a base64-encoded SSH2 wire format key. func parseSSH2Body(body string) (ssh.PublicKey, error) { bodyBytes, err := base64.StdEncoding.DecodeString(body) if err != nil { return nil, err } return ssh.ParsePublicKey(bodyBytes) } // parseSSH2Key parses a SSH2 public key as defined in RFC4716 (section 3.) // NOTE: // - only supports "\n" as line termination character // - does not validate line length, or header tag or value format // - discards headers func parseSSH2Key(data string) (ssh.PublicKey, error) { beginMark := "---- BEGIN SSH2 PUBLIC KEY ----" endMark := "---- END SSH2 PUBLIC KEY ----" lineSep := "\n" headerSep := ":" continues := "\\" // Normalize and trim newlines data = strings.ReplaceAll(data, "\r\n", lineSep) data = strings.TrimSpace(data) // Strip begin and end markers lines := strings.Split(data, lineSep) if lines[0] != beginMark { return nil, fmt.Errorf("expected '%s' in '%s'", beginMark, lines[0]) } last := len(lines) - 1 if lines[last] != endMark { return nil, fmt.Errorf("expected '%s' in '%s'", endMark, lines[last]) } lines = lines[1:last] // Strip headers var i int for i = 0; i < len(lines); i++ { if strings.Contains(lines[i], headerSep) { continue } // Skip i==1, first line can not be a continued line if i > 0 && strings.HasSuffix(lines[i-1], continues) { continue } break } // Parse key material body := strings.Join(lines[i:], "") return parseSSH2Body(body) } func newSSHKey(key ssh.PublicKey, keyID string) *signerverifier.SSLibKey { if keyID == "" { keyID = ssh.FingerprintSHA256(key) } return &signerverifier.SSLibKey{ KeyID: keyID, KeyType: KeyType, Scheme: key.Type(), KeyVal: signerverifier.KeyVal{Public: base64.StdEncoding.EncodeToString(key.Marshal())}, } } gittuf-0.9.0/internal/signerverifier/ssh/ssh_test.go000066400000000000000000000105451475150141000226440ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package ssh import ( "context" "os" "path/filepath" "runtime" "strings" "testing" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" "github.com/stretchr/testify/assert" ) // Basic smoke test for ssh package for all supported keys func TestSSH(t *testing.T) { keyidRSA := "SHA256:ESJezAOo+BsiEpddzRXS6+wtF16FID4NCd+3gj96rFo" keyidECDSA := "SHA256:oNYBImx035m3rl1Sn/+j5DPrlS9+zXn7k3mjNrC5eto" keyidEd25519 := "SHA256:cewFulOIcROWnolPTGEQXG4q7xvLIn3kNTCMqdfoP4E" tests := []struct { keyName string keyBytes []byte keyID string }{ {"rsa", artifacts.SSHRSAPrivate, keyidRSA}, {"rsa.pub", artifacts.SSHRSAPublicSSH, keyidRSA}, {"rsa_enc", artifacts.SSHRSAPrivateEnc, keyidRSA}, {"rsa_enc.pub", artifacts.SSHRSAPublicSSH, keyidRSA}, {"ecdsa", artifacts.SSHECDSAPrivate, keyidECDSA}, {"ecdsa.pub", artifacts.SSHECDSAPublicSSH, keyidECDSA}, {"ecdsa_enc", artifacts.SSHECDSAPrivateEnc, keyidECDSA}, {"ecdsa_enc.pub", artifacts.SSHECDSAPublicSSH, keyidECDSA}, {"ed25519", artifacts.SSHED25519Private, keyidEd25519}, {"ed25519.pub", artifacts.SSHED25519PublicSSH, keyidEd25519}, {"ed25519_enc", artifacts.SSHED25519PrivateEnc, keyidEd25519}, {"ed25519_enc.pub", artifacts.SSHED25519PublicSSH, keyidEd25519}, } // Setup tests tmpDir := t.TempDir() // Write script to mock password prompt scriptPath := filepath.Join(tmpDir, "askpass.sh") if err := os.WriteFile(scriptPath, artifacts.AskpassScript, 0o500); err != nil { //nolint:gosec t.Fatal(err) } // Write test key pairs to temp dir with permissions required by ssh-keygen for _, test := range tests { keyPath := filepath.Join(tmpDir, test.keyName) if err := os.WriteFile(keyPath, test.keyBytes, 0o600); err != nil { t.Fatal(err) } } data := []byte("DATA") notData := []byte("NOT DATA") // Run tests for _, test := range tests { t.Run(test.keyName, func(t *testing.T) { if strings.Contains(test.keyName, "_enc") { if runtime.GOOS == "windows" { t.Skip("TODO: test encrypted keys on windows") } t.Setenv("SSH_ASKPASS", scriptPath) t.Setenv("SSH_ASKPASS_REQUIRE", "force") } keyPath := filepath.Join(tmpDir, test.keyName) key, err := NewKeyFromFile(keyPath) if err != nil { t.Fatalf("%s: %v", test.keyName, err) } assert.Equal(t, key.KeyID, test.keyID, ) verifier, err := NewVerifierFromKey(key) if err != nil { t.Fatalf("%s: %v", test.keyName, err) } signer, err := NewSignerFromFile(keyPath) if err != nil { t.Fatalf("%s: %v", test.keyName, err) } sig, err := signer.Sign(context.Background(), data) if err != nil { t.Fatalf("%s: %v", test.keyName, err) } err = verifier.Verify(context.Background(), data, sig) if err != nil { t.Fatalf("%s: %v", test.keyName, err) } err = verifier.Verify(context.Background(), notData, sig) if err == nil { t.Fatalf("%s: %v", test.keyName, err) } }) } } // Test parseSSH2Key helper function (rsa only) func TestParseSSH2Key(t *testing.T) { data := `---- BEGIN SSH2 PUBLIC KEY ---- Comment: "3072-bit RSA, converted by me@me.me from OpenSSH" AAAAB3NzaC1yc2EAAAADAQABAAABgQDEI4rdCY/zA3oOMet1JYJ+VugUapNfj7hcAZem1C Rusd5FTiWVmNh4yywgA+1JWDsBnyLfbOZBiz4fiQQ++bRF/mDXQx2Qr2xgCS27tNyyv8tf ERGuglAu69T7aLsfPGn4WCaVX3+OuALZVaQl/F5MzoDkiaZkCsBrVZkfL3393Zlhseb/bY 87f7UOwArq3WMMK9Qp0cO8/8rsZnzu3nFClYSILKUx7Vrf7uSaUtl39Dh/QMX1m6Ax0Mh4 3gMnk+Fbrhai+BWo3Y58A5+LBUL3jqDkmXzFvhYJgGKISU5nfKCHDDqlug+l5wJmGus1G8 jZ5uY7s2ZHS5yumPQNoCIZztmLm0DgQqNN4J+Yub5+L6yCgA1Q6mKq/631/DyHvF8e5Gln COb1zE7zaJacJ42tNdVq7Z3x+Hik9PRfgBPt1oF41SFSCp0YRPLxLMFdTjNgV3HZXVNlq6 6IhyoDZ2hjd5XmMmq7h1a8IybBsItJ8Ikk4X12vIzCSqOlylZS4+U= ---- END SSH2 PUBLIC KEY ----` key, err := parseSSH2Key(data) if err != nil { t.Fatalf("%v", err) } assert.Equal(t, key.Type(), "ssh-rsa") } func TestNewVerifierFromKey(t *testing.T) { sslibKey := &signerverifier.SSLibKey{ KeyID: "SHA256:cewFulOIcROWnolPTGEQXG4q7xvLIn3kNTCMqdfoP4E", KeyType: "ssh", Scheme: "ssh-ed25519", KeyVal: signerverifier.KeyVal{Public: "AAAAC3NzaC1lZDI1NTE5AAAAIPu3Q15xYZOCg7kzYoApSgy/fPumLVHgSQO+bjSwdGQg"}, } verifier, err := NewVerifierFromKey(sslibKey) if err != nil { t.Fatalf("%v", err) } keyid, _ := verifier.KeyID() assert.Equal(t, sslibKey.KeyID, keyid) } gittuf-0.9.0/internal/testartifacts/000077500000000000000000000000001475150141000175145ustar00rootroot00000000000000gittuf-0.9.0/internal/testartifacts/gpg.go000066400000000000000000000005121475150141000206160ustar00rootroot00000000000000// SPDX-License-Identifier: Apache-2.0 package artifacts import _ "embed" //go:embed testdata/keys/gpg/1.pub.asc var GPGKey1Public []byte //go:embed testdata/keys/gpg/1.asc var GPGKey1Private []byte //go:embed testdata/keys/gpg/2.pub.asc var GPGKey2Public []byte //go:embed testdata/keys/gpg/2.asc var GPGKey2Private []byte gittuf-0.9.0/internal/testartifacts/scripts.go000066400000000000000000000002151475150141000215300ustar00rootroot00000000000000// SPDX-License-Identifier: Apache-2.0 package artifacts import _ "embed" //go:embed testdata/scripts/askpass.sh var AskpassScript []byte gittuf-0.9.0/internal/testartifacts/ssh.go000066400000000000000000000015601475150141000206420ustar00rootroot00000000000000// SPDX-License-Identifier: Apache-2.0 package artifacts import _ "embed" //go:embed testdata/keys/ssh/rsa.pem var SSHRSAPublic []byte //go:embed testdata/keys/ssh/rsa.pub var SSHRSAPublicSSH []byte //go:embed testdata/keys/ssh/rsa var SSHRSAPrivate []byte //go:embed testdata/keys/ssh/rsa_enc var SSHRSAPrivateEnc []byte //go:embed testdata/keys/ssh/ecdsa.pem var SSHECDSAPublic []byte //go:embed testdata/keys/ssh/ecdsa.pub var SSHECDSAPublicSSH []byte //go:embed testdata/keys/ssh/ecdsa var SSHECDSAPrivate []byte //go:embed testdata/keys/ssh/ecdsa_enc var SSHECDSAPrivateEnc []byte //go:embed testdata/keys/ssh/ed25519.pem var SSHED25519Public []byte //go:embed testdata/keys/ssh/ed25519.pub var SSHED25519PublicSSH []byte //go:embed testdata/keys/ssh/ed25519 var SSHED25519Private []byte //go:embed testdata/keys/ssh/ed25519_enc var SSHED25519PrivateEnc []byte gittuf-0.9.0/internal/testartifacts/testdata/000077500000000000000000000000001475150141000213255ustar00rootroot00000000000000gittuf-0.9.0/internal/testartifacts/testdata/keys/000077500000000000000000000000001475150141000223005ustar00rootroot00000000000000gittuf-0.9.0/internal/testartifacts/testdata/keys/gpg/000077500000000000000000000000001475150141000230555ustar00rootroot00000000000000gittuf-0.9.0/internal/testartifacts/testdata/keys/gpg/1.asc000066400000000000000000000117531475150141000237140ustar00rootroot00000000000000-----BEGIN PGP PRIVATE KEY BLOCK----- lQVYBGSI3XgBDADTC7wATx/R2ln+S1V/mpuSbae+6DNLZcQmFdc3zBFBhaKK3OtC 7UBiPkkdmtpDpX8UsUnW4QrmK4bjKCU/kBhwuD+SQ/IAyftgMJAv5XsEmy4gKsYg o+DR/muWpI+uYnJYfS3ncGZD0nvgsN9kcx6qkRLD4cqHhu31oN3r9j9TgjPrUo78 x1tGGD44n02DuJj4hSaXliiBGlM49lIbKDiEWyrPX99vylBViFpyARdOJj7mchVV Iqel6zkYd90D/w2WjRvXYbv0ZiRb1SgroOCm1s4hNsWW2JCYETOuPMq7jvzYYz6o Dw5VpHpfo2jXS93Nff2zTj2GhVhYSeaFHxw9fU0ylM6XxP1Jux0dH23Q9j/LnsBl 9q0fleREPjA/4sYOGqEt4od9rJBnXxAOFt0QO3tzAnL2JT5DjU4g9kBYtUPgyU5s VuucudgTE96dSJ3X6hVnD9LxwuwhJUswv4ASpV54hKqX+eDNgZZbzKRt5q4Cjx4L Q2dom+VnrXaqS+EAEQEAAQAL/R7kvVkIPB6yiXDtoy7iOXrGXYNXJ0cWtw9iLBaJ Y09FmiUcsL2j0ERxXK+ePuwfGkqzpxzy5W8LGRxAgK9czw8VEY0+L/tIlekfQsQY IhlrA+YGjOkzRBJJXYN4RGYhGB72+BTEgdvUwHiEO7dPGMKcKPSOpWCQcl399e54 Jg355dLBS5gaMLTCkL0mKKkrCQ18JEmVXAmGCn+woxVBnoBVfh9MJ/+LzJWwt5iv qzMv6w49zfyf7Y2esMVxqmNO3vDIJ1pCdkLoW66ViYnrBK5TkBuF7OJXnILrjADx oQ1WAOiHHebaoCne5D4vy5JkR0vGKxg0TlbOuKPwJtnJquATB23OK2bMbr2ZBtlB oR4gKe3RCLrN18Wflxqo2Gl3SbMMlhe3FcpHZb0g0o1uYU7iG6rGwrsbh/BcdCoe llh3DJwdqaGF02T6A/Wzw8ByetLhUaVmLMr2DXXxFqOr7rJYyn4pK/5bz7M84tZd qru15MnRjzGNOCX1VvkN4I3gBwYA5caCYRM+T219vDfgs9wa25BGszBXbw4JMtRF Qo2IVpyj5opaZw2MrVcXfV06SeR499cdShkW+O5GINDgqyucy6rUjIVNCv8M9klC FLev0TIkRYnjWHDoS9QbagAfnbObj0ZK9kQG1tiOEwqXHQ9Pu3rV3tIQ/ggQb6tZ KO4CxxHBsi+qvyilvHqYvDKkB9j34rClSYRQ4fkTFdwGuLmpnIaALqARpFflL/xw eGhXYk4oTYWOasj65sSihaOhmAPvBgDrIf24EA+xhfUeQRfuIJQtjILxx7XWOrcl nrugYidnVV4zZcn09413LJndndxpaQDYcP2DmfpLHekXXPubJe15AL4WNsD3jnyi K8GYP/jRm3cXy2LGO3n9oQXjHdCHS0YhvtakkXQdDMRd5RJ6jRN7RjBvmyzAAJky UqHsjRCQTqmT3/Jx43AVSQ5GD/tSaUbYCnIH6I13PGhQiCJwWVnjveZ4A1xgZD8F ByQDeuZ48kouY+biC4DgUYhVM0wonS8GAOAg+4hDecvfswS/R6vHjJ5iJdCkhUdw lv/LtU6kcirT9NZ2Kumgggf3uiggxq7vNd1bAxCeKLq6CeFLhhWXm0XskCtPNzeE mdQNvvddZes9C61wKHdsIRIowNdp1stxRlV4T33HiVjnMt0BuZCwV69Tq5LPUSRx kLSvxv5KdSXLD1WJj34Z/O4KpIL7kx6oxKi97+rnogn1XXZFxW6A3l/lp5Zro/dg agHob6LpKHC88S1q7jgtsZtsgu71otMCidzNtDZnaXR0dWYgVGVzdCBLZXkgKFRl c3Qga2V5IGZvciBnaXR0dWYpIDxnaXR0dWZAc2FreS5pbj6JAc4EEwEIADgWIQQV dQe74VHjeM6BJsHc/gQ83S25bgUCZIjdeAIbAwULCQgHAgYVCgkICwIEFgIDAQIe AQIXgAAKCRDc/gQ83S25bsPTDADI3hT+sOIKnuQvzyG/PJV13EApQ6Axi9F6+pj3 6n5nq/z8U6Ky3bcbiPelxlgRRzqjFAnbKGr1qXL2M3GyjAAEqPt3P6S3s4GS2FYg 8dILIceXE9s/C7BpnrCSTRqXGnPbvmEA1R8atW3OKTTUIFROgVCAsTtpThcQETMV lMGXhcui1qGPrBfRkemmWaKp2x2fGxvLcsI5FcGMe2EG70FsteN7ub09Ew9QRDpi 5CwTeuPFqZqo/4477YCJLs1QfdmN4wdTV8M+yZcGUCgwTBJvI3lpFUI9f/dYqULS PKajOwg11CmzXRbRJMCw59ZCeO2pLrQQjSFKJQcmlIYToxJHc2DgVFiSpiuPsMY/ NS0wkEuybm6zR7uA7qJln88capCl+SlNhIVjuyvkyQ9vRoowSBpwpVV2TPA6mqXI 9uO6fCv82x++iJbsqzfgFTJbg2p7jTdoXiZXfOqy6tqwvUDSNY4tAUzDQ7hYYJu3 a6V1f9t7/cFd5AqdwmFoDAMcW0qdBVgEZIjdeAEMAKkV+Q2buAFWoL5IUqhQto0V hcdw+J39111qeYyC+Z0eIJ3NaVtPWHTH2Fa2u7oxpWbmaVNjGNXgn4tRY6ZGyR0Q 5udm18NJ5gpKB8D/UKW/u4fwgoHKvd768cU35yqP9llt+b43iUSbPKV0UcI1M32T N8fHouPje4NzJuhYn5D/KkIFEKUFo4fVRcxgfCixu1eA1zGq4HIlKR4TKIr5qZD4 JO9a37+jMa/uo7k70ww2AgPnz0fb0bF4F3nnRsgF4WplaJpLfmw+uAQPvIPKb0qP gk/Gu8VyePDCYCe6FkbV/lgHQUsvvFSOn/NkPbK0OuYJ8pZ2CheUX0rcfDU3tnjd MAkbJ4gAuBBuzp5gqvTV78UHC3NV5Ml092+ZAc87WtbARwVTY6JKzbmtjG+DvMBe EDu73haRav8h/youvLRHSHNcIwYjdFeTGTDUhS3cVCmKQh1syf227Bm8U2rBjR9z IKGQaObzcu0dg2WKXJE8kUbB7w867mLhG9hxBD2U2wARAQABAAv+MqVAOhFAZjY6 Y4RqLbwvUcb2kSyHa1tTHMn9qla4yZTylBGo0w7LIf+SDzcgt76TGmU9exqdn8Ju R16TiIR/TNN5NiqZygbifoZ6GBbhYyQw2aDylknR4W4s6LO77AW+9j54pb8gwIn2 +mAC9TbWO3DTLmXfpn7GyjK9nq6c7ONhFvFJcQRlXRRGmdb0mYPS53zR+DCElewH itiWNjpdzexls8P2bQJGqbnDFIKUzn2G0QCoF5ACGTQtY/ggPV11EHQKFPfBViYF W5vhgImlidQQxDXsV+48L26ju+Di8SJwEbNSDwubocmMdJ8xzHs8Tr4Rzqnu9GU0 pQ3u6E3w3IR4s0/NnNOqHwSUijieqwoPc58X0Lgj6z+c4QD5GKQsRMOVKiS60/vt 9EyIjqusOw+VTh9Kyv2FJRhzOeYuKElsy8MnvDlhg81d774DKyt5jogbDit1oSIo 8CULr7INTNEpaJxKDwLucBAFfxWRQx0PGUbmW1x2gjs7K4B3KxdRBgDDPaAhL1Qx 1LxPf7RvS+4XxgCqDvaiz+aHv68tQNz/3p5sFJUtYrZHEunfqeMCogrAmgTGaDNR Xar6rS5CROH02TDWMXQ89RBNou9vpPkae91Pietz7OvsJyAgg6Zxm/2fcRJt8DOP zBYSxOkd/yBsxAVMcFOg3SceMr+wTVQb1O+PjJ0kASbdtbbzEf5NFq3aAgLfnzRu Sj9mCn+KOHiLkR1tYhdjdv7sZ6MlzukqY0TBNAEfBALMzpGEm/t67JUGAN20pdkz PngvkJbWpgfG6KYGYBNVboRigv/weHNo/V2JKQrPwD9SPo4otqYOND8Ra+AhizYQ 8qBJoG9Yip0m7YXZn3qUw3twI5DW1Q1VGb58d/NAaYbbcKS2CgFcA15Iu3CkBtif CNW2e44LtTIB347JEJKJrv9qcyiCotiRXE313gaUS84+mwewp9dO8XzrdbgF8bHJ DGkcrbcV6JOzI3WfAOlapmUxAOnoiLOaX6X9jQT3sukBS9kHl+7YaoyvrwX8C9RJ tCQbEL/1LSe/eV42U2B/PlR5it5CS8sChRVftELSoRjaI7hOdzHT4o0t1lZG9y5N 9aQQDytQQ3jR/8QaF2rLLcAooIlGwftNWyK9gw0VuRfnx+kDjyIrnd3rrmfdwa+f g+UP6OPLxp6WwujBLpze28jdSzTJ1rUGx35pr1HG4+FF1i3ZLgs/06kOJJTEnysI Jy6f3ysYuYzBoOovadu0KfRgEQZf/65vatrpT2HHQ7wQzEFGF8c2Nv0wDCN61x+J AbYEGAEIACAWIQQVdQe74VHjeM6BJsHc/gQ83S25bgUCZIjdeAIbDAAKCRDc/gQ8 3S25bgOYDADHfVcBcr1LCbm3J3MvZ4RuJno5SYpeB5FGLktFVsozOwNZUo0qS+1v +vyuHIRreqLxCORbCdLluo4rMCH68AqI6hyvGugvkpfb5cx0dF9JGdWQBQZC3RKF G0C70ErnHoxbkLhfviD/jZGYfxOAFn1HHgnAVaDlnqQ4fi3SPeXAp3/8/7iqv+vz VAoYMysEZo8lP9GWvPiyYTCSvVbmL0//lA3vCZSFB3DzK4Ae6UhTrdLO2FMQSdft H1pJZJl//1UgpnxVVmGLrT1573Q3QPWuDCfBzuTnO7mg61tm6uvvvLvzSccZfzVC u0VLOsE7Ti7y7oB2IMgfDUQOLsSU6U0ye035bUeU0ax3+uEjqOmBXBytqoQk/sXt LtuZzIp3UbldsNRD+TomlBj8KoNxB4HQSQW/e0B/E+0e1yrhy82rJYEaxEpSYylS ksPMuTNq9volVoo6INBsHY/kPgO9/+i7eguAsnqX5a4LiPrmGQD0FtJ8Yn2dpqgl XSOG5KBujIE= =Sv5S -----END PGP PRIVATE KEY BLOCK----- gittuf-0.9.0/internal/testartifacts/testdata/keys/gpg/1.pub.asc000066400000000000000000000046441475150141000245020ustar00rootroot00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- mQGNBGSI3XgBDADTC7wATx/R2ln+S1V/mpuSbae+6DNLZcQmFdc3zBFBhaKK3OtC 7UBiPkkdmtpDpX8UsUnW4QrmK4bjKCU/kBhwuD+SQ/IAyftgMJAv5XsEmy4gKsYg o+DR/muWpI+uYnJYfS3ncGZD0nvgsN9kcx6qkRLD4cqHhu31oN3r9j9TgjPrUo78 x1tGGD44n02DuJj4hSaXliiBGlM49lIbKDiEWyrPX99vylBViFpyARdOJj7mchVV Iqel6zkYd90D/w2WjRvXYbv0ZiRb1SgroOCm1s4hNsWW2JCYETOuPMq7jvzYYz6o Dw5VpHpfo2jXS93Nff2zTj2GhVhYSeaFHxw9fU0ylM6XxP1Jux0dH23Q9j/LnsBl 9q0fleREPjA/4sYOGqEt4od9rJBnXxAOFt0QO3tzAnL2JT5DjU4g9kBYtUPgyU5s VuucudgTE96dSJ3X6hVnD9LxwuwhJUswv4ASpV54hKqX+eDNgZZbzKRt5q4Cjx4L Q2dom+VnrXaqS+EAEQEAAbQ2Z2l0dHVmIFRlc3QgS2V5IChUZXN0IGtleSBmb3Ig Z2l0dHVmKSA8Z2l0dHVmQHNha3kuaW4+iQHOBBMBCAA4FiEEFXUHu+FR43jOgSbB 3P4EPN0tuW4FAmSI3XgCGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQ3P4E PN0tuW7D0wwAyN4U/rDiCp7kL88hvzyVddxAKUOgMYvRevqY9+p+Z6v8/FOist23 G4j3pcZYEUc6oxQJ2yhq9aly9jNxsowABKj7dz+kt7OBkthWIPHSCyHHlxPbPwuw aZ6wkk0alxpz275hANUfGrVtzik01CBUToFQgLE7aU4XEBEzFZTBl4XLotahj6wX 0ZHpplmiqdsdnxsby3LCORXBjHthBu9BbLXje7m9PRMPUEQ6YuQsE3rjxamaqP+O O+2AiS7NUH3ZjeMHU1fDPsmXBlAoMEwSbyN5aRVCPX/3WKlC0jymozsINdQps10W 0STAsOfWQnjtqS60EI0hSiUHJpSGE6MSR3Ng4FRYkqYrj7DGPzUtMJBLsm5us0e7 gO6iZZ/PHGqQpfkpTYSFY7sr5MkPb0aKMEgacKVVdkzwOpqlyPbjunwr/NsfvoiW 7Ks34BUyW4Nqe403aF4mV3zqsurasL1A0jWOLQFMw0O4WGCbt2uldX/be/3BXeQK ncJhaAwDHFtKuQGNBGSI3XgBDACpFfkNm7gBVqC+SFKoULaNFYXHcPid/dddanmM gvmdHiCdzWlbT1h0x9hWtru6MaVm5mlTYxjV4J+LUWOmRskdEObnZtfDSeYKSgfA /1Clv7uH8IKByr3e+vHFN+cqj/ZZbfm+N4lEmzyldFHCNTN9kzfHx6Lj43uDcybo WJ+Q/ypCBRClBaOH1UXMYHwosbtXgNcxquByJSkeEyiK+amQ+CTvWt+/ozGv7qO5 O9MMNgID589H29GxeBd550bIBeFqZWiaS35sPrgED7yDym9Kj4JPxrvFcnjwwmAn uhZG1f5YB0FLL7xUjp/zZD2ytDrmCfKWdgoXlF9K3Hw1N7Z43TAJGyeIALgQbs6e YKr01e/FBwtzVeTJdPdvmQHPO1rWwEcFU2OiSs25rYxvg7zAXhA7u94WkWr/If8q Lry0R0hzXCMGI3RXkxkw1IUt3FQpikIdbMn9tuwZvFNqwY0fcyChkGjm83LtHYNl ilyRPJFGwe8POu5i4RvYcQQ9lNsAEQEAAYkBtgQYAQgAIBYhBBV1B7vhUeN4zoEm wdz+BDzdLbluBQJkiN14AhsMAAoJENz+BDzdLbluA5gMAMd9VwFyvUsJubcncy9n hG4mejlJil4HkUYuS0VWyjM7A1lSjSpL7W/6/K4chGt6ovEI5FsJ0uW6jiswIfrw CojqHK8a6C+Sl9vlzHR0X0kZ1ZAFBkLdEoUbQLvQSucejFuQuF++IP+NkZh/E4AW fUceCcBVoOWepDh+LdI95cCnf/z/uKq/6/NUChgzKwRmjyU/0Za8+LJhMJK9VuYv T/+UDe8JlIUHcPMrgB7pSFOt0s7YUxBJ1+0fWklkmX//VSCmfFVWYYutPXnvdDdA 9a4MJ8HO5Oc7uaDrW2bq6++8u/NJxxl/NUK7RUs6wTtOLvLugHYgyB8NRA4uxJTp TTJ7TfltR5TRrHf64SOo6YFcHK2qhCT+xe0u25nMindRuV2w1EP5OiaUGPwqg3EH gdBJBb97QH8T7R7XKuHLzaslgRrESlJjKVKSw8y5M2r2+iVWijog0Gwdj+Q+A73/ 6Lt6C4CyepflrguI+uYZAPQW0nxifZ2mqCVdI4bkoG6MgQ== =7iQ4 -----END PGP PUBLIC KEY BLOCK----- gittuf-0.9.0/internal/testartifacts/testdata/keys/gpg/2.asc000066400000000000000000000117531475150141000237150ustar00rootroot00000000000000-----BEGIN PGP PRIVATE KEY BLOCK----- lQVYBGUthjIBDADNYB6H6yG12QDX66iZrK0UZJGMjLzwGnUegRrev+jCVKuATh2i 9GnIsdAcQjlB34F+NmEJvOsK28wwR0M0WKNhuH169FnXtQNoQBPb2nzG9gpl4JlR JtzWHuJhG2aPhW4EUZcEoQY9dkSMQRbLwRC3FS8k7WHvY4QkI+unj84ulGbMzuVw qLTg/D2nt2L1Bn5RZcdrt27MrXTI57ztVPJV6sE1jlYMlSpb0I//YZcRQkA2wZur ACewEXdu7kabRzFU7z2vOaOIZq+99thBmwfzzCsMBDbhEQls7Vg1W3u2m3Xdaqli XLw+oKdr7EjyUCg+lIhyvt+9tv97vThzXRxD8YrAyy1HOg9wXK6g/bzm/TpRsZ2M iOspYJhZ+yUHGtVkAHja5BwtHcsim2sMYhiW72211QyRwNlzQVWWf2H5QZ0jv+y/ OUe4575ltb7dV1Zs9yT2+5aVwoNrN7vpJ0MMu8/Dkr0O2b+2o/U0H1qqCAstZDYX /+deX1GQE+x2znEAEQEAAQAL/2Xx7gXS+k9NsiT8NhteQyYJy9QEMm9O+s8TEBb9 FRmNtPoCDKUMhQwZ869rUge4XgucC6wtSlEADQnz/dCYpTVHQoZLj+NK16+iixkJ igGrNTsf84r8awegmgfQ9aaNebCMHZYvJi1RLTKfxqSKp/Txf47phMEzWW8lI39w G0003hiRJQQ4wlyh6Sr9FZKCUCawin6WNdpEyeFGYxo+HfL3DkQ67MzfKIEd9LS0 qSvogn6LuW13LLwx12LJxWOjwN3tLTJjJAHAv0o5ApOYmRIXmUUubEYoZCSEDTW+ l9KiD5dHuR3SzrP1Y7X8DUsT9rDMr9sYB/rj6G6kUDgMRv3dMyBILJWVDj6nJ8nb oZZ1CC0eSd7rsiHlb3ktISKCNfl9jNGIlBe5YzVHTg/elbpiw08MTmBQdtUTIn91 LaLzxgz8sA1EPGgkY3kevNRhedXSPtAvbDPT4SgxIYQwNdHSCjje2B7KInKRNahQ cSrex01zg/ZYGi4aAyKKveyzfwYA34uFR93nBgMElRFZi+qUd6pO1d5zt5LbTrl1 so314ag0OuNc0JCmzNOgnZYcQuRiw+TxzIThkcOetTqkSjX7BPZAUk+GA0uhFBmT /8peTd2yfbcrJ2iqJaw62JcFGFtONXpyxWuS5QBnuHc5x2FOaTB7txIkW2CXOtVz 565o4WzugIEPE+vvrKilKDUkzyPZ8AN9Od6WGIZPcecpOpw5YZVpQeLtjdSyrdxO og5rlj3dHKxF09saZeVqYef2V4znBgDrMUr8D9BxbtSytISeZQ/brQ2q9zc/0hqU Sa1S8HtKBMqV4k7uELczIw9thMM1RpM3Sk81q0BJQ8OoG+rBbX0qiSGF72JdHK0m 3KNs7PGVhqUUuiaes3/31k14ipFXAnnpmqKMZXnP7VzQnYC6d4dfXqav/yWqrnhv g72fAGaIx41fQsbRK7ybSczIN3oL0nrSRyW8DJBXhl0HBlQWQDNhJ/dQa6aWJpWE jQXL9rWpFB9XDUESul5FiOVoyoQkxucGAI4/+O68/VwZd+LEGhbs0NU+4yYaPThB pnB7kQOu4JO73Un9xtzhWEJ/Fx2OeXLcW2agIkyvD/36K1hgR3g/syQhHDRNIZC2 YXq9bdRDFspzxgtU0CSorWwdS+nGjLP81qEqYwW/49xR84lkozZUlWf7DS3d9pQQ ad/hEZkdPD989WHtVXPFdkwLtN5CCodEo2Xi9GuxSeMr1bmJw7dB553qE7ZzbowR 6auDzPt0Y+/b5xhD2FgPYJhIzcbY7D+dmdYQtDZnaXR0dWYgVGVzdCBLZXkgKFRl c3Qga2V5IGZvciBnaXR0dWYpIDxnaXR0dWZAc2FreS5pbj6JAc4EEwEIADgWIQR3 B+h/EN9JhHK6vDLlF+IRyyOp6QUCZS2GMgIbAwULCQgHAgYVCgkICwIEFgIDAQIe AQIXgAAKCRDlF+IRyyOp6VBSC/wK1RCkVyTOEXZThfLTZzECXxY8nEvvmVcKvrjO ZhO+ZnSICRe7+VzK1Ilmvq8Khl03xddPyYbHOzfy83UaAUx+nxS3KXtpRXW9RztF loerCpJ5O3bvAB+q5xe0woNn3GQY4Zl80QFvUEvV5JKGX3Jr7S21uWC+e95RGL8O TWbCLkX1LljH/u+ghQSSzIM2kcPlSExQNQFf7idIr4Ucmc76pQQVyRTSagNqOeXk ou2h+RydIKZBVz1qmiE2amRtx26gN6OHdChhGIBJgOlRPu7117jzf29yP1MMW70s EbHtG7eeW3p6ge3Sg0RHxzP4T9nffYuL3Bs6QVOU7KnIC5faZD5229l2FmwT9MlH UDFK1otEjKJPp3Pa9qGW6fz2Mzw4GnHNoUvNXrghK4/J4II1zMJyAbfNI/UVUPAJ G/rQ3USMpQCr9QcHfDx47i50bc1U/jjtttuRsrkIKV6FCcefUV1UNSdd7Qi+DFbY BitlZ81+NGAwGcA2v3AZU0zwMdCdBVgEZS2GMgEMAMLUn7njTP8gYS4Q1V9d5kEK 0Ahx1AS9+qEjjVXvm17uU9jQgaRfH0ldWhuYoM4Z7zMyLSSJdtj8SFa0GlUno0dG pk6Ka4cO5dute3IxCM6aYffAqc3B+9gLMABxfQh3BaE78WhfQsPo3qveCs/dDDyl LANC5NXpIM4CU3sHUzKH0PvHAmPLIacupYK6ya4Y/aSIRLTdqyzM4LQl04UVPEYs 5KcLvvcUA4qXK9KcO474cAKioekF/lumTrW+dbVbfPZgNv0qq1lRfWk6jQwRumjb KHOV/iVJoJb0vZSmzfGSSZq5dxLyNyI0tmeXLvkek+eVp3bx2TOPg+WQJ91RfQ1X lPijNGxj2eCJGYm+Iu8kt6uqA1mWrzz4OgeWCfiLVI/S5XHRD5CtbxRiasBGCfnZ KKef6jkXXm/PrpjA3FkJDJWuj2qY+QNqHbqTfDeyeLA23adVeJNryZjLZkWUTKe9 i41uHknk1OQZd4Ruoaqsywqze7f+mGifJGrHKGB2oQARAQABAAv8DgjElAkrD9+p uxNBrikfP8UtyhtE+K3PWCctnv3vtQ1kOHnV34dt7gTNoQTuucSa0JcciFq8lxHJ NjGqtJfyhjJbzXbhcFei4smi2vgW7dGncpzME9hq3JulsrUkxWTUINKmvOwhF5Kh OO55hz1MNzF2CQAbEsuZw4KIYpQwVTNSWv3xY/Xgb+CyJxzEZV2Ye8RzH+It2pV2 2ZM1+DVY46cyi1PocXeIGnDG79i1iO1yyBsl9ZlVwrf+nc82lGtJZgvWFt7/Fyh0 8psiGb3U4ZzBZZMtL/CIIczaXBo0o4rB334zni5S11MX7J4sl/uDGvWFINTn2e+I cI8V1A4jKZnl14O+CG/OGFa93x2SMD4YYwGb7Rn9QASD1K62lBunAwZZMzyqp8zC ElEmt6QtR4+Aiu2i98X9WrsTx7LmX/jfV0T2FobpboPe7pKe4PW+bQ919oaIjkKQ R+EArLJJAXNPC5DpbV1k943B3OVdCVc/HkcsL3Gu4wPkAZIGWKUpBgDSG5Aetjan UTaxab9AFwApGUnl4VAU20BSlCCqb8HdA4GdI82cGu4Jo+WhvUDXkOoprLnvEpZ3 nZ3AuVsa5ekJWjOMFiMuptb83XZ0GtrgUmEdWaxOrjzsRqeSPZaYOsQ5DSO2Zr2m ekmN6ZUpQbyHC7SXtIc2oMH11XJGOXuC65ZvTLQn9YwJRZJE9dGZTGR8m2PGvfcb 3sCWcHPjXXSPG3+z5HmNBIpxBsd2EkqPGNgqBoOz7NcotNsjZvV7y7UGAO1i0nvh 4WLqm0YRkVSlXc/Z2wdl2IJXuRnoq69yW/ZeGS7YsMFTbf1pAgb7g0ahXrzISX2s 6xxb6XJiIpEzjuVt3rjPCGRwRwAmG6P2KT1TGEUWrCjflp7LVqcPlzQNZ8RbL+Zs 1E6kq5TGqtVkuURzH1ptfBOlkxVHwznYb4et6o0Sulg/hbxsm3Nc0mGhQ/7wtjTV s1Tf9xleXDmws28+paoXO4DBnbgOewWpt8Q/NgvezlqIlgT5AgaUva4KvQX/Qn34 CNipJ0eo+SpLumNnZsVyET9fIqYjJh8fWyk6GNNvnEZDF4tih8Tf2haRqxnMX6NZ NSw5ZF+Wl+GPBWDdd3A2uLPe+s4HCfUFvsr6dvkDzVM6FLG4qam8t3yiOXT8y28o gCndWpa8qan7zrHCbfaMN/OEtSvoVVTMDlTByiFTwmrQ0yS3X6jyRiV7q3nP3kUW pSsuEmg4ChYKSuKJdbwpuUGx4abA2cS7F1pYX+n19Yd9agN/GnQI6we9pE1Y5EWJ AbYEGAEIACAWIQR3B+h/EN9JhHK6vDLlF+IRyyOp6QUCZS2GMgIbDAAKCRDlF+IR yyOp6ZCjDACo0lJuAlCJKDbmju6CjT02Fo6n0oCCbpnjp07V676iQ+HNzW7VNiyZ tPkqhVxs0Pmim9hii+DQov3NGVrMZ7ClyerOCdolQhxIaZH56MLTlLeNKSibr+CG Qjj0zbk6dWjAMK9CKbC++JoJRa8v7YxYnDwdnGx1dwQRup4bF/asg0AHNdVBPJMl vWSnoN4pKWLtNqi3HWdGpRMUFqw8cE2flJ0NsZY/AReaM8th63DWUTXz4EGHtMGv CbPwjTrCKHNM7GeR8291mZbleghegqcaKag1bPGU8ZC3lk6BpkaEk79JBEcgf9vN 6qBdmNkjmIIUQqs7aH3IuM6+RTP3V7xbGWe+xw/bBgLpA9gmtoauZIXJWmhC7zyd rJ8weFi88Zhrgcvxw+dFe08y1u6mzcAK1qwwOyjgnpN9+7qfC6QkuDD4ZEHNnp4e HdG1OL/lzJxLy2AEBOZAdRKaBRzQj3FnVw2O49e8QYmjNibODRdRnexGGHArbMiz 0SFeyeCDHC0= =cqHp -----END PGP PRIVATE KEY BLOCK----- gittuf-0.9.0/internal/testartifacts/testdata/keys/gpg/2.pub.asc000066400000000000000000000046441475150141000245030ustar00rootroot00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- mQGNBGUthjIBDADNYB6H6yG12QDX66iZrK0UZJGMjLzwGnUegRrev+jCVKuATh2i 9GnIsdAcQjlB34F+NmEJvOsK28wwR0M0WKNhuH169FnXtQNoQBPb2nzG9gpl4JlR JtzWHuJhG2aPhW4EUZcEoQY9dkSMQRbLwRC3FS8k7WHvY4QkI+unj84ulGbMzuVw qLTg/D2nt2L1Bn5RZcdrt27MrXTI57ztVPJV6sE1jlYMlSpb0I//YZcRQkA2wZur ACewEXdu7kabRzFU7z2vOaOIZq+99thBmwfzzCsMBDbhEQls7Vg1W3u2m3Xdaqli XLw+oKdr7EjyUCg+lIhyvt+9tv97vThzXRxD8YrAyy1HOg9wXK6g/bzm/TpRsZ2M iOspYJhZ+yUHGtVkAHja5BwtHcsim2sMYhiW72211QyRwNlzQVWWf2H5QZ0jv+y/ OUe4575ltb7dV1Zs9yT2+5aVwoNrN7vpJ0MMu8/Dkr0O2b+2o/U0H1qqCAstZDYX /+deX1GQE+x2znEAEQEAAbQ2Z2l0dHVmIFRlc3QgS2V5IChUZXN0IGtleSBmb3Ig Z2l0dHVmKSA8Z2l0dHVmQHNha3kuaW4+iQHOBBMBCAA4FiEEdwfofxDfSYRyurwy 5RfiEcsjqekFAmUthjICGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQ5Rfi EcsjqelQUgv8CtUQpFckzhF2U4Xy02cxAl8WPJxL75lXCr64zmYTvmZ0iAkXu/lc ytSJZr6vCoZdN8XXT8mGxzs38vN1GgFMfp8Utyl7aUV1vUc7RZaHqwqSeTt27wAf qucXtMKDZ9xkGOGZfNEBb1BL1eSShl9ya+0ttblgvnveURi/Dk1mwi5F9S5Yx/7v oIUEksyDNpHD5UhMUDUBX+4nSK+FHJnO+qUEFckU0moDajnl5KLtofkcnSCmQVc9 apohNmpkbcduoDejh3QoYRiASYDpUT7u9de4839vcj9TDFu9LBGx7Ru3nlt6eoHt 0oNER8cz+E/Z332Li9wbOkFTlOypyAuX2mQ+dtvZdhZsE/TJR1AxStaLRIyiT6dz 2vahlun89jM8OBpxzaFLzV64ISuPyeCCNczCcgG3zSP1FVDwCRv60N1EjKUAq/UH B3w8eO4udG3NVP447bbbkbK5CClehQnHn1FdVDUnXe0IvgxW2AYrZWfNfjRgMBnA Nr9wGVNM8DHQuQGNBGUthjIBDADC1J+540z/IGEuENVfXeZBCtAIcdQEvfqhI41V 75te7lPY0IGkXx9JXVobmKDOGe8zMi0kiXbY/EhWtBpVJ6NHRqZOimuHDuXbrXty MQjOmmH3wKnNwfvYCzAAcX0IdwWhO/FoX0LD6N6r3grP3Qw8pSwDQuTV6SDOAlN7 B1Myh9D7xwJjyyGnLqWCusmuGP2kiES03asszOC0JdOFFTxGLOSnC773FAOKlyvS nDuO+HACoqHpBf5bpk61vnW1W3z2YDb9KqtZUX1pOo0MEbpo2yhzlf4lSaCW9L2U ps3xkkmauXcS8jciNLZnly75HpPnlad28dkzj4PlkCfdUX0NV5T4ozRsY9ngiRmJ viLvJLerqgNZlq88+DoHlgn4i1SP0uVx0Q+QrW8UYmrARgn52Sinn+o5F15vz66Y wNxZCQyVro9qmPkDah26k3w3sniwNt2nVXiTa8mYy2ZFlEynvYuNbh5J5NTkGXeE bqGqrMsKs3u3/phonyRqxyhgdqEAEQEAAYkBtgQYAQgAIBYhBHcH6H8Q30mEcrq8 MuUX4hHLI6npBQJlLYYyAhsMAAoJEOUX4hHLI6npkKMMAKjSUm4CUIkoNuaO7oKN PTYWjqfSgIJumeOnTtXrvqJD4c3NbtU2LJm0+SqFXGzQ+aKb2GKL4NCi/c0ZWsxn sKXJ6s4J2iVCHEhpkfnowtOUt40pKJuv4IZCOPTNuTp1aMAwr0IpsL74mglFry/t jFicPB2cbHV3BBG6nhsX9qyDQAc11UE8kyW9ZKeg3ikpYu02qLcdZ0alExQWrDxw TZ+UnQ2xlj8BF5ozy2HrcNZRNfPgQYe0wa8Js/CNOsIoc0zsZ5Hzb3WZluV6CF6C pxopqDVs8ZTxkLeWToGmRoSTv0kERyB/283qoF2Y2SOYghRCqztofci4zr5FM/dX vFsZZ77HD9sGAukD2Ca2hq5khclaaELvPJ2snzB4WLzxmGuBy/HD50V7TzLW7qbN wArWrDA7KOCek337up8LpCS4MPhkQc2enh4d0bU4v+XMnEvLYAQE5kB1EpoFHNCP cWdXDY7j17xBiaM2Js4NF1Gd7EYYcCtsyLPRIV7J4IMcLQ== =WCxC -----END PGP PUBLIC KEY BLOCK----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/000077500000000000000000000000001475150141000230755ustar00rootroot00000000000000gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ecdsa000066400000000000000000000010411475150141000240730ustar00rootroot00000000000000-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAaAAAABNlY2RzYS 1zaGEyLW5pc3RwMjU2AAAACG5pc3RwMjU2AAAAQQQ20crD/8ZRkAD5QjOCNhuHBQLL22H2 MceknSGujJ0RNYMGkdwVTmtW/gIC0xanaUpopBJwhBNpguC1NZkmb/+NAAAAyCnC4Ewpwu BMAAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDbRysP/xlGQAPlC M4I2G4cFAsvbYfYxx6SdIa6MnRE1gwaR3BVOa1b+AgLTFqdpSmikEnCEE2mC4LU1mSZv/4 0AAAAgD2FtLPZIkIrP4mMnacWXeMS+KYjUVXboxxxdYbo7jD4AAAAsc2FreUBudW1iZXIt b25lLWNoaWxkLWJlbG9uZ2luZy10by1tcnMtcXVlZW4BAgME -----END OPENSSH PRIVATE KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ecdsa.pem000066400000000000000000000002621475150141000246570ustar00rootroot00000000000000-----BEGIN PUBLIC KEY----- MFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAENtHKw//GUZAA+UIzgjYbhwUCy9th 9jHHpJ0hroydETWDBpHcFU5rVv4CAtMWp2lKaKQScIQTaYLgtTWZJm//jQ== -----END PUBLIC KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ecdsa.pub000066400000000000000000000002411475150141000246610ustar00rootroot00000000000000ecdsa-sha2-nistp256 AAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlzdHAyNTYAAABBBDbRysP/xlGQAPlCM4I2G4cFAsvbYfYxx6SdIa6MnRE1gwaR3BVOa1b+AgLTFqdpSmikEnCEE2mC4LU1mSZv/40= gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ecdsa_enc000066400000000000000000000011321475150141000247210ustar00rootroot00000000000000-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jdHIAAAAGYmNyeXB0AAAAGAAAABBqEUPAiO dGlfMfgXuHYLM+AAAAGAAAAAEAAABoAAAAE2VjZHNhLXNoYTItbmlzdHAyNTYAAAAIbmlz dHAyNTYAAABBBDbRysP/xlGQAPlCM4I2G4cFAsvbYfYxx6SdIa6MnRE1gwaR3BVOa1b+Ag LTFqdpSmikEnCEE2mC4LU1mSZv/40AAADQ8I3oIPP47sMagV2XgXQVppyn865VelCmQJUc eIbS/GM018V2CHulm71Sp5gXSUBOgp9kKQ1NvcAs/OXngIhogVreTfa8fmw5ui7u1mRhaV Z3cZ7nQxW+vhFkFc4wcnVOxhLl6Ontwml4Pp2ldlrsQNZiW2tJpAPoSZl/Mlu7HLFjJHX5 hUEHAhkEvMxfRb2Y26DMSMFDpoEnq8zXnkOR+91fuy6ZFZ9X+zQnE+r6ey5aRTS2H8Z5nm QURkgOgPA85CBMhZIkF4YdXeUipxeQqQ== -----END OPENSSH PRIVATE KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ed25519000066400000000000000000000006431475150141000240210ustar00rootroot00000000000000-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAAAMwAAAAtzc2gtZW QyNTUxOQAAACD7t0NecWGTgoO5M2KAKUoMv3z7pi1R4EkDvm40sHRkIAAAAKAUArx6FAK8 egAAAAtzc2gtZWQyNTUxOQAAACD7t0NecWGTgoO5M2KAKUoMv3z7pi1R4EkDvm40sHRkIA AAAEBRH3++ZkmQCiB3lvN4iLZbEsn5oEmsl7AtMbslFHgOAvu3Q15xYZOCg7kzYoApSgy/ fPumLVHgSQO+bjSwdGQgAAAAGHNha3lAc29uLW9mLXRoZS1iaWctYm9zcwECAwQF -----END OPENSSH PRIVATE KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ed25519.pem000066400000000000000000000001611475150141000245740ustar00rootroot00000000000000-----BEGIN PUBLIC KEY----- MCowBQYDK2VwAyEA+7dDXnFhk4KDuTNigClKDL98+6YtUeBJA75uNLB0ZCA= -----END PUBLIC KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ed25519.pub000066400000000000000000000001211475150141000245750ustar00rootroot00000000000000ssh-ed25519 AAAAC3NzaC1lZDI1NTE5AAAAIPu3Q15xYZOCg7kzYoApSgy/fPumLVHgSQO+bjSwdGQg gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/ed25519_enc000066400000000000000000000007201475150141000246420ustar00rootroot00000000000000-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jdHIAAAAGYmNyeXB0AAAAGAAAABCSxL1LvV sBAwRzbaCSBLOmAAAAGAAAAAEAAAAzAAAAC3NzaC1lZDI1NTE5AAAAIPu3Q15xYZOCg7kz YoApSgy/fPumLVHgSQO+bjSwdGQgAAAAoGJL8flnHUeBia/0F1oE3pkfFzl1xYfhjOvfYa +zCmL6OTa2Ll4jyPORiCnC7moZjSjtzsGYWmMZNsMpsjX70BnrbuUHNtOkewoTnLg07nzQ i+yIlGI5kcwYshZQCm8karBHeiinFKXGU3WoC+dLvneVWNK3m7eYSZ89S+nD6xxS8O9g7L H+Mbup3joWSABA4ml4kF4mSIlArxLcv2DDPH0= -----END OPENSSH PRIVATE KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/rsa000066400000000000000000000046271475150141000236160ustar00rootroot00000000000000-----BEGIN RSA PRIVATE KEY----- MIIG4wIBAAKCAYEAxCOK3QmP8wN6DjHrdSWCflboFGqTX4+4XAGXptQkbrHeRU4l lZjYeMssIAPtSVg7AZ8i32zmQYs+H4kEPvm0Rf5g10MdkK9sYAktu7Tcsr/LXxER roJQLuvU+2i7Hzxp+FgmlV9/jrgC2VWkJfxeTM6A5ImmZArAa1WZHy99/d2ZYbHm /22PO3+1DsAK6t1jDCvUKdHDvP/K7GZ87t5xQpWEiCylMe1a3+7kmlLZd/Q4f0DF 9ZugMdDIeN4DJ5PhW64WovgVqN2OfAOfiwVC946g5Jl8xb4WCYBiiElOZ3yghww6 pboPpecCZhrrNRvI2ebmO7NmR0ucrpj0DaAiGc7Zi5tA4EKjTeCfmLm+fi+sgoAN UOpiqv+t9fw8h7xfHuRpZwjm9cxO82iWnCeNrTXVau2d8fh4pPT0X4AT7daBeNUh UgqdGETy8SzBXU4zYFdx2V1TZauuiIcqA2doY3eV5jJqu4dWvCMmwbCLSfCJJOF9 dryMwkqjpcpWUuPlAgMBAAECggGADCtRCNDN5nEp8qqEkN1LFjqfJfEiLqop8UTE sNGgZwvQc3KwoJz97/9ZwAIZCAAaPR9aAwxsfCw++6bn0AkfIb6cKeRHTSb32Dze 4l1g2hL4nm/egyTLrnWXCSLg2pnIJg6WFOt6wb3x/fkIeQuKRZqvDZRAHMc7/reD YMIfajpZs2GIfa2XAZqKqcfJhZ6ccBtiC6ww8vot5XKgar5QvvsBi49gOn5JsUjO /zIyaEpgnP3wVQJFs1bgW/tuV6zJkU0P/uwqobJWcCzQXGK6pDfMKcg/oEnExRP5 KAjpXHB1SRItCxFWavtBIzhL1/3O9P+mAjnMqj/AgjdYZitmjaYPUt0J2Lj1gTeo B+FTUJG5+zSTambOD6endTLwjR1CaeqNoY7JQF/yMuuHBujaaYolSeWzmuM56tHp zh11Pq4sfbKTs38jCkp2c5Jps+orkWKWQ4DP2tiFSqYkP2rbZxe4zYWdbBYkY3LN TrD128o8QDzX35wRCrngNa3oBqgrAoHBAPDyVWgkuGnNV9/Nv/Uajgn2VMXocLHL PuDKUaC0OBX3KJoZ1od29oasyEgMBucAoExn5QhSkjNZDCa4ThBCBfhII+rN41fY 4fUJWCN8WiPRgZ+V8Vg71CC0TNzanrn9e1xVwt0+CcU2EmWPOOPXcVjT37AllyFo A3oCkmttjQOtKf6emIb4ar+i6iEWSLTN1si5hCa4LcKmdHARxB8wDCM/Knjlp4gG L5qwLkuW3sq15toStYVzImEVRR4Nk85OQwKBwQDQZI87LBR24MarSJyd2rwLtsQq lfhj/G1wtcHpf5KI2jJQA7qeURt5LIP626Zowmvrh1CUECzF4lYmMi7W9wsOO5dW 4xVVnidpqWXwJ4spwDDZytpsJ6uhLUR7dUMZFmZ7fVV+O/h44eBphKtPU5sHrKvI No+Lajj/WSI0wZeUWqYBXBsiwvYD9shJ48BbnbOTlpr+/6NNemJYy8M+q61YOzJG 1lypFgfK5oveZTCBXRyXRoVuBZ7hFR4WssypJrcCgcBowSMqtRRMQ4+Z6O01Fgd9 A1DV/nnIgtHij3Ovx8q+mgdYADXKDYmfAc3NrRTvJ9GJhn4IAUY49tUWnHisDthj mpGrCzp1PgD3NMnSG8Vg3zxY8qrMLq7KTTlBp/bsa+1PNqYS5F8BE8Ipz3vvawKH mQ4emik7x+oDCM050ZZGa+nfHnq365cVnDHokrSnjvp7qFRxNRAIGbBz7PV0jcIp LNQ9XbNZqz7VJRXrnTtdrHoig5HR5D2lMccsCsEhJ9sCgcEAt0Dmz3MY390T94WX 7BxwpqBm6sgMsOhG2QmMZyf/FQdDnSdaUWQjk7A4sz26nX7y5iCNFFcQ+Ebr2RTi Kbj+6NowYIC6vxqfsAP+F2y+rM1MaAlBgEB2ZfrnsOVSRtpy7n8t0h8shiijbkbz WoO73cjTGvIX0tJrIukf8mKXSCdaufFkD3KAYsX+JTP5cNdCFjQnQrxMv5Iyr8En hZH0kpUjxjU1MJ+l5KBOORikye22ebTTGwgoSeiHGBDNdqYLAoHAKAl1LMAiYuL5 G+/HXropt6A42Q854KFaYcfgvRTvJifqb01heQBe6SLi19K/aE2HIXCMrJ79CF5R BcPAOCjgmEV84+XaKxybKiznb7Xo4USDkXw6y9QiYZRdx1Fez+wyhi1NRBDMML2E nIEt+XXxthw76g8LgzEId0mLB1g8akDHD+ZjoeLsOh83tOUjLeNGHl8BQIURmEb1 /pbUUUGGYlwCRRAI2Mbt8yaSYileEKEi7V5fJBZGm+m9lpUgFQ7Y -----END RSA PRIVATE KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/rsa.pem000066400000000000000000000011611475150141000243640ustar00rootroot00000000000000-----BEGIN PUBLIC KEY----- MIIBojANBgkqhkiG9w0BAQEFAAOCAY8AMIIBigKCAYEAxCOK3QmP8wN6DjHrdSWC flboFGqTX4+4XAGXptQkbrHeRU4llZjYeMssIAPtSVg7AZ8i32zmQYs+H4kEPvm0 Rf5g10MdkK9sYAktu7Tcsr/LXxERroJQLuvU+2i7Hzxp+FgmlV9/jrgC2VWkJfxe TM6A5ImmZArAa1WZHy99/d2ZYbHm/22PO3+1DsAK6t1jDCvUKdHDvP/K7GZ87t5x QpWEiCylMe1a3+7kmlLZd/Q4f0DF9ZugMdDIeN4DJ5PhW64WovgVqN2OfAOfiwVC 946g5Jl8xb4WCYBiiElOZ3yghww6pboPpecCZhrrNRvI2ebmO7NmR0ucrpj0DaAi Gc7Zi5tA4EKjTeCfmLm+fi+sgoANUOpiqv+t9fw8h7xfHuRpZwjm9cxO82iWnCeN rTXVau2d8fh4pPT0X4AT7daBeNUhUgqdGETy8SzBXU4zYFdx2V1TZauuiIcqA2do Y3eV5jJqu4dWvCMmwbCLSfCJJOF9dryMwkqjpcpWUuPlAgMBAAE= -----END PUBLIC KEY----- gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/rsa.pub000066400000000000000000000010511475150141000243670ustar00rootroot00000000000000ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABgQDEI4rdCY/zA3oOMet1JYJ+VugUapNfj7hcAZem1CRusd5FTiWVmNh4yywgA+1JWDsBnyLfbOZBiz4fiQQ++bRF/mDXQx2Qr2xgCS27tNyyv8tfERGuglAu69T7aLsfPGn4WCaVX3+OuALZVaQl/F5MzoDkiaZkCsBrVZkfL3393Zlhseb/bY87f7UOwArq3WMMK9Qp0cO8/8rsZnzu3nFClYSILKUx7Vrf7uSaUtl39Dh/QMX1m6Ax0Mh43gMnk+Fbrhai+BWo3Y58A5+LBUL3jqDkmXzFvhYJgGKISU5nfKCHDDqlug+l5wJmGus1G8jZ5uY7s2ZHS5yumPQNoCIZztmLm0DgQqNN4J+Yub5+L6yCgA1Q6mKq/631/DyHvF8e5GlnCOb1zE7zaJacJ42tNdVq7Z3x+Hik9PRfgBPt1oF41SFSCp0YRPLxLMFdTjNgV3HZXVNlq66IhyoDZ2hjd5XmMmq7h1a8IybBsItJ8Ikk4X12vIzCSqOlylZS4+U= gittuf-0.9.0/internal/testartifacts/testdata/keys/ssh/rsa_enc000066400000000000000000000051131475150141000244320ustar00rootroot00000000000000-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAACmFlczI1Ni1jdHIAAAAGYmNyeXB0AAAAGAAAABAsV7UDOJ 0VBBLJQlLjXiLHAAAAGAAAAAEAAAGXAAAAB3NzaC1yc2EAAAADAQABAAABgQDEI4rdCY/z A3oOMet1JYJ+VugUapNfj7hcAZem1CRusd5FTiWVmNh4yywgA+1JWDsBnyLfbOZBiz4fiQ Q++bRF/mDXQx2Qr2xgCS27tNyyv8tfERGuglAu69T7aLsfPGn4WCaVX3+OuALZVaQl/F5M zoDkiaZkCsBrVZkfL3393Zlhseb/bY87f7UOwArq3WMMK9Qp0cO8/8rsZnzu3nFClYSILK Ux7Vrf7uSaUtl39Dh/QMX1m6Ax0Mh43gMnk+Fbrhai+BWo3Y58A5+LBUL3jqDkmXzFvhYJ gGKISU5nfKCHDDqlug+l5wJmGus1G8jZ5uY7s2ZHS5yumPQNoCIZztmLm0DgQqNN4J+Yub 5+L6yCgA1Q6mKq/631/DyHvF8e5GlnCOb1zE7zaJacJ42tNdVq7Z3x+Hik9PRfgBPt1oF4 1SFSCp0YRPLxLMFdTjNgV3HZXVNlq66IhyoDZ2hjd5XmMmq7h1a8IybBsItJ8Ikk4X12vI zCSqOlylZS4+UAAAWAjoaEU70K7OCsqOGOk8WaJxFb0TB8GdXRmgsSXH5dTdtQumozS2bI 6jNh4nLO7HnJ6nwrJt1hMjkgHEFoQVYrggidN9CNwx1uwLSsd3w60nlNQbvBwBlW6qKpv9 JTYxhX2759By8JwJNp24MQqtUlgMh8TSOiBQs681zK6fe3lpyUuFYamUGZtEapJMhzHlb7 x+2Bc0TKO5IIfwaTg6SF37sU56+luOXPItLWLuOvOeZZl0Sif0RsPP5Su4iUT7nPD6ODem Fduh2cIMhFzJTMcmJEHTet6ZAaqS269ZAg6HX669jwoyC8ZOHJ2lQAykX0uNHDtq8uU1t9 TtpBpmGa2KO7MniOQ3X9uVFPaU7KsDQyI67sBA1D1RYGGeBG0kZAb3E4wo1+cBG8MmKUm3 aYaebUo6712+q0Jz0KxTN7uhZJa8UIJPfuXwHZitahQsdFAUv5AO8KaxFoNNFOtJbFTaeW Mp9OgTdj5ZxdA/p01DZARSmMd6OxV3/cQRytzer0Prsu9MVEPAyeP8pqgOGHlKVB3yiKs9 haviT+SZA3WPua8X92mQbNJnsNNjCOlLjFMNYmnRrZ4FJzqkyESUDf2XsCSidCr2ef5ACj NJu791XvtNlnbVqmzoBuS5mWC+eeoLS+o8Zr2pzUsJjUZ58KBegPMmONz9Oo9RYcVPrFqU ObBxRYnj2FrIN5RHKSEMlz03FunabGTIbCOiXVOEN+jNesb7uHMyoVidie08X/d69CJYus 1XZRDzZxid7u1gShA3JGnsDsSdtuFXx9iICiQVyduoim5MTd55bLQFYkO37NIspS+MkqsU tT4GuJ7Rv4SChdWdmE+y8eCkkMJfBVUzbHim/IZj6gV1AinN4I1hc33dxFce3hqBY5mWal +nQhkx3cK31YlPnB/9VyuiUpJj6v5n0kc0SKdDhp4WIGpaTcN2O2OJOGlIjP/mw7P5Z3iv 60aR8uLGWRPlwb7QC8W+gvDFgJcRd6Xlh0NDIrVXNGV6Eo3AQC7C3mpT2UXyp9eX8cEFIH bf4QyslFBd7SlX9i3h0YdeMbBFqQc2Qm+kAyplZyZKOfLCUYewyLaVftZblTmY8Jj9TsJ8 2zRpGdCtvhi2OsUZb+v3KDQj+bUAKh3fYGtSwBEf6x6Lo+Wo+vpSsWlDN+uPebQ1WmemAl NSTNpT7hl+PfkxUV2/mCF0i87xP/d5nD743J46wQckWmFQXUXWcpAgIPiuX1kdC+xOvQ33 xeu48XjGBCiGTR+WHnm5+pP/tHvRBGcjEvcAUSHieNSMOafvpzL/3gT6DlCpcBk0hTQ+Uj LsGe7XKz81soomxmPm7zdtLW8BkYBM1wMxCfs7ScQ9+iENYsCKEfc2ZkH1JxYVqjLRgMg8 mSXNi8ZyA80gyVanf4Gv1hV5PPoETCoBIqd8VxjUl+YP4W43rvl/YX5CL3IUifalX7ieN7 aroZEam9e30mqblL97mJFNU0+X6+/gbXLrTIjKvl4Aud5+euZewUW0vCO990P2030qjlhQ AOV0cBHIJbPgVFGkTuWCxZvm4SAWNZiQxajluVGqHwNqXnfg51gaa3V0BaW9dwyMlLfLy/ 1/EdaGGJDkYQG4pM48eq0PCEDs1Oy2ahtjJtjJuv44wDPqUhZwsnIkN+72EkLh/bmUm7NP 8FflPpRGa+MicWdKnO3ltg6JmDg9ymiN9r/e5Ajy+NZ9IuIbacUfndj4SctOryU7CmGe2X hQ/COmpP0rs54mdOdNfKfvcZf1oUbG88fFpLmr4zB+sp8yD6vx2xKokeT53XhmgB6zIqeD nA/9XhsBu4hPOOceY6QzDebq5Bcx21qsXcwQCteJE0r3XzQ2MYHxcuXFxhSWiHUg0IVgtG N0jA8Q== -----END OPENSSH PRIVATE KEY----- gittuf-0.9.0/internal/testartifacts/testdata/scripts/000077500000000000000000000000001475150141000230145ustar00rootroot00000000000000gittuf-0.9.0/internal/testartifacts/testdata/scripts/askpass.sh000077500000000000000000000000401475150141000250120ustar00rootroot00000000000000#!/usr/bin/env sh echo hunter2 gittuf-0.9.0/internal/third_party/000077500000000000000000000000001475150141000171655ustar00rootroot00000000000000gittuf-0.9.0/internal/third_party/.keep000066400000000000000000000000001475150141000201000ustar00rootroot00000000000000gittuf-0.9.0/internal/third_party/go-securesystemslib/000077500000000000000000000000001475150141000231755ustar00rootroot00000000000000gittuf-0.9.0/internal/third_party/go-securesystemslib/LICENSE000066400000000000000000000021011475150141000241740ustar00rootroot00000000000000The MIT License (MIT) Copyright (c) 2021 NYU Secure Systems Lab Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. gittuf-0.9.0/internal/third_party/go-securesystemslib/README.md000066400000000000000000000005401475150141000244530ustar00rootroot00000000000000# Vendored secure-systems-lab/go-securesystemslib Issue: https://github.com/gittuf/gittuf/issues/266 ## dsse The dsse package has been vendored to experimentally add support for DSSE signature extensions. We're starting with support for Sigstore, and once we land on a reusable interface for extensions, we can upstream this to go-securesystemslib. gittuf-0.9.0/internal/third_party/go-securesystemslib/dsse/000077500000000000000000000000001475150141000241335ustar00rootroot00000000000000gittuf-0.9.0/internal/third_party/go-securesystemslib/dsse/envelope.go000066400000000000000000000036361475150141000263070ustar00rootroot00000000000000package dsse import ( "encoding/base64" "fmt" "google.golang.org/protobuf/types/known/structpb" ) /* Envelope captures an envelope as described by the DSSE specification. See here: https://github.com/secure-systems-lab/dsse/blob/master/envelope.md */ type Envelope struct { PayloadType string `json:"payloadType"` Payload string `json:"payload"` Signatures []Signature `json:"signatures"` } /* DecodeB64Payload returns the serialized body, decoded from the envelope's payload field. A flexible decoder is used, first trying standard base64, then URL-encoded base64. */ func (e *Envelope) DecodeB64Payload() ([]byte, error) { return b64Decode(e.Payload) } /* Signature represents a generic in-toto signature that contains the identifier of the key which was used to create the signature. The used signature scheme has to be agreed upon by the signer and verifer out of band. The signature is a base64 encoding of the raw bytes from the signature algorithm. */ type Signature struct { KeyID string `json:"keyid"` Sig string `json:"sig"` Extension *Extension `json:"extension,omitempty"` } type Extension struct { Kind string `json:"kind"` Ext *structpb.Struct `json:"ext"` } /* PAE implementes the DSSE Pre-Authentic Encoding https://github.com/secure-systems-lab/dsse/blob/master/protocol.md#signature-definition */ func PAE(payloadType string, payload []byte) []byte { return []byte(fmt.Sprintf("DSSEv1 %d %s %d %s", len(payloadType), payloadType, len(payload), payload)) } /* Both standard and url encoding are allowed: https://github.com/secure-systems-lab/dsse/blob/master/envelope.md */ func b64Decode(s string) ([]byte, error) { b, err := base64.StdEncoding.DecodeString(s) if err != nil { b, err = base64.URLEncoding.DecodeString(s) if err != nil { return nil, fmt.Errorf("unable to base64 decode payload (is payload in the right format?)") } } return b, nil } gittuf-0.9.0/internal/third_party/go-securesystemslib/dsse/sign.go000066400000000000000000000036631475150141000254320ustar00rootroot00000000000000/* Package dsse implements the Dead Simple Signing Envelope (DSSE) https://github.com/secure-systems-lab/dsse */ package dsse import ( "context" "encoding/base64" "errors" ) // ErrNoSigners indicates that no signer was provided. var ErrNoSigners = errors.New("no signers provided") // EnvelopeSigner creates signed Envelopes. type EnvelopeSigner struct { providers []Signer } /* NewEnvelopeSigner creates an EnvelopeSigner that uses 1+ Signer algorithms to sign the data. */ func NewEnvelopeSigner(p ...Signer) (*EnvelopeSigner, error) { var providers []Signer for _, s := range p { if s != nil { providers = append(providers, s) } } if len(providers) == 0 { return nil, ErrNoSigners } return &EnvelopeSigner{ providers: providers, }, nil } /* NewMultiEnvelopeSigner creates an EnvelopeSigner that uses 1+ Signer algorithms to sign the data. The threshold parameter is legacy and is ignored. Deprecated: This function simply calls NewEnvelopeSigner, and that function should be preferred. */ func NewMultiEnvelopeSigner(threshold int, p ...Signer) (*EnvelopeSigner, error) { return NewEnvelopeSigner(p...) } /* SignPayload signs a payload and payload type according to DSSE. Returned is an envelope as defined here: https://github.com/secure-systems-lab/dsse/blob/master/envelope.md One signature will be added for each Signer in the EnvelopeSigner. */ func (es *EnvelopeSigner) SignPayload(ctx context.Context, payloadType string, body []byte) (*Envelope, error) { var e = Envelope{ Payload: base64.StdEncoding.EncodeToString(body), PayloadType: payloadType, } paeEnc := PAE(payloadType, body) for _, signer := range es.providers { sig, err := signer.Sign(ctx, paeEnc) if err != nil { return nil, err } keyID, err := signer.KeyID() if err != nil { keyID = "" } e.Signatures = append(e.Signatures, Signature{ KeyID: keyID, Sig: base64.StdEncoding.EncodeToString(sig), }) } return &e, nil } gittuf-0.9.0/internal/third_party/go-securesystemslib/dsse/signerverifier.go000066400000000000000000000030411475150141000275030ustar00rootroot00000000000000package dsse import ( "context" "crypto" "google.golang.org/protobuf/types/known/structpb" ) /* Signer defines the interface for an abstract signing algorithm. The Signer interface is used to inject signature algorithm implementations into the EnvelopeSigner. This decoupling allows for any signing algorithm and key management system can be used. The full message is provided as the parameter. If the signature algorithm depends on hashing of the message prior to signature calculation, the implementor of this interface must perform such hashing. The function must return raw bytes representing the calculated signature using the current algorithm, and the key used (if applicable). */ type Signer interface { Sign(ctx context.Context, data []byte) ([]byte, error) KeyID() (string, error) } /* Verifier verifies a complete message against a signature and key. If the message was hashed prior to signature generation, the verifier must perform the same steps. If KeyID returns successfully, only signature matching the key ID will be verified. */ type Verifier interface { Verify(ctx context.Context, data, sig []byte) error KeyID() (string, error) Public() crypto.PublicKey } type SupportsSignatureExtension interface { SetExtension(*structpb.Struct) ExpectedExtensionKind() string } // SignerVerifier provides both the signing and verification interface. type SignerVerifier interface { Signer Verifier } // Deprecated: switch to renamed SignerVerifier. This is currently aliased for // backwards compatibility. type SignVerifier = SignerVerifier gittuf-0.9.0/internal/third_party/go-securesystemslib/dsse/verify.go000066400000000000000000000070731475150141000257750ustar00rootroot00000000000000package dsse import ( "context" "crypto" "errors" "fmt" "golang.org/x/crypto/ssh" ) // ErrNoSignature indicates that an envelope did not contain any signatures. var ErrNoSignature = errors.New("no signature found") type EnvelopeVerifier struct { providers []Verifier threshold int } type AcceptedKey struct { Public crypto.PublicKey KeyID string Sig Signature } func (ev *EnvelopeVerifier) Verify(ctx context.Context, e *Envelope) ([]AcceptedKey, error) { if e == nil { return nil, errors.New("cannot verify a nil envelope") } if len(e.Signatures) == 0 { return nil, ErrNoSignature } // Decode payload (i.e serialized body) body, err := e.DecodeB64Payload() if err != nil { return nil, err } // Generate PAE(payloadtype, serialized body) paeEnc := PAE(e.PayloadType, body) // If *any* signature is found to be incorrect, it is skipped var acceptedKeys []AcceptedKey usedKeyids := make(map[string]string) unverified_providers := make([]Verifier, len(ev.providers)) copy(unverified_providers, ev.providers) for _, s := range e.Signatures { sig, err := b64Decode(s.Sig) if err != nil { return nil, err } // Loop over the providers. // If provider and signature include key IDs but do not match skip. // If a provider recognizes the key, we exit // the loop and use the result. providers := unverified_providers for i, v := range providers { keyID, err := v.KeyID() // Verifiers that do not provide a keyid will be generated one using public. if err != nil || keyID == "" { keyID, err = SHA256KeyID(v.Public()) if err != nil { keyID = "" } } if s.KeyID != "" && keyID != "" && err == nil && s.KeyID != keyID { continue } if v, supportsSignatureExtension := v.(SupportsSignatureExtension); supportsSignatureExtension { if s.Extension == nil || s.Extension.Kind != v.ExpectedExtensionKind() { continue } v.SetExtension(s.Extension.Ext) } err = v.Verify(ctx, paeEnc, sig) if err != nil { continue } acceptedKey := AcceptedKey{ Public: v.Public(), KeyID: keyID, Sig: s, } unverified_providers = removeIndex(providers, i) // See https://github.com/in-toto/in-toto/pull/251 if _, ok := usedKeyids[keyID]; ok { fmt.Printf("Found envelope signed by different subkeys of the same main key, Only one of them is counted towards the step threshold, KeyID=%s\n", keyID) continue } usedKeyids[keyID] = "" acceptedKeys = append(acceptedKeys, acceptedKey) break } } // Sanity if with some reflect magic this happens. if ev.threshold <= 0 || ev.threshold > len(ev.providers) { return nil, errors.New("invalid threshold") } if len(usedKeyids) < ev.threshold { return acceptedKeys, fmt.Errorf("accepted signatures do not match threshold, Found: %d, Expected %d", len(acceptedKeys), ev.threshold) } return acceptedKeys, nil } func NewEnvelopeVerifier(v ...Verifier) (*EnvelopeVerifier, error) { return NewMultiEnvelopeVerifier(1, v...) } func NewMultiEnvelopeVerifier(threshold int, p ...Verifier) (*EnvelopeVerifier, error) { if threshold <= 0 || threshold > len(p) { return nil, errors.New("invalid threshold") } ev := EnvelopeVerifier{ providers: p, threshold: threshold, } return &ev, nil } func SHA256KeyID(pub crypto.PublicKey) (string, error) { // Generate public key fingerprint sshpk, err := ssh.NewPublicKey(pub) if err != nil { return "", err } fingerprint := ssh.FingerprintSHA256(sshpk) return fingerprint, nil } func removeIndex(v []Verifier, index int) []Verifier { return append(v[:index], v[index+1:]...) } gittuf-0.9.0/internal/tuf/000077500000000000000000000000001475150141000154325ustar00rootroot00000000000000gittuf-0.9.0/internal/tuf/migrations/000077500000000000000000000000001475150141000176065ustar00rootroot00000000000000gittuf-0.9.0/internal/tuf/migrations/migrations.go000066400000000000000000000074041475150141000223160ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package migrations import ( "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" tufv02 "github.com/gittuf/gittuf/internal/tuf/v02" ) /* adityasaky: We should probably have an automatic migration to the _next_ version every time so that it's easy to migrate from v01 to v0k easily using consecutive migration functions. However, I think building that out now may be overkill; I don't know if we expect a bunch of schema changes. */ // MigrateRootMetadataV01ToV02 converts tufv01.RootMetadata into // tufv02.RootMetadata. func MigrateRootMetadataV01ToV02(rootMetadata *tufv01.RootMetadata) *tufv02.RootMetadata { newRootMetadata := tufv02.NewRootMetadata() // Set same expires newRootMetadata.Expires = rootMetadata.Expires // Set repository location newRootMetadata.RepositoryLocation = rootMetadata.RepositoryLocation // Set keys newRootMetadata.Principals = map[string]tuf.Principal{} for keyID, key := range rootMetadata.Keys { newRootMetadata.Principals[keyID] = key } // Set roles newRootMetadata.Roles = map[string]tufv02.Role{} for roleName, role := range rootMetadata.Roles { newRole := tufv02.Role{ PrincipalIDs: role.KeyIDs, Threshold: role.Threshold, } newRootMetadata.Roles[roleName] = newRole } // Set app attestations support newRootMetadata.GitHubApprovalsTrusted = rootMetadata.GitHubApprovalsTrusted // Set global rules newRootMetadata.GlobalRules = rootMetadata.GlobalRules // Set propagations newRootMetadata.Propagations = rootMetadata.Propagations if rootMetadata.MultiRepository != nil { newRootMetadata.MultiRepository = &tufv02.MultiRepository{ Controller: rootMetadata.MultiRepository.Controller, ControllerRepositories: []*tufv02.OtherRepository{}, NetworkRepositories: []*tufv02.OtherRepository{}, } for _, otherRepository := range rootMetadata.MultiRepository.ControllerRepositories { newRootMetadata.MultiRepository.ControllerRepositories = append(newRootMetadata.MultiRepository.ControllerRepositories, &tufv02.OtherRepository{ Name: otherRepository.GetName(), Location: otherRepository.GetLocation(), InitialRootPrincipals: otherRepository.GetInitialRootPrincipals(), }) } for _, otherRepository := range rootMetadata.MultiRepository.NetworkRepositories { newRootMetadata.MultiRepository.NetworkRepositories = append(newRootMetadata.MultiRepository.NetworkRepositories, &tufv02.OtherRepository{ Name: otherRepository.GetName(), Location: otherRepository.GetLocation(), InitialRootPrincipals: otherRepository.GetInitialRootPrincipals(), }) } } return newRootMetadata } // MigrateTargetsMetadataV01ToV02 converts tufv01.TargetsMetadata into // tufv02.TargetsMetadata. func MigrateTargetsMetadataV01ToV02(targetsMetadata *tufv01.TargetsMetadata) *tufv02.TargetsMetadata { newTargetsMetadata := tufv02.NewTargetsMetadata() // Set same expires newTargetsMetadata.Expires = targetsMetadata.Expires // Set delegations newTargetsMetadata.Delegations = &tufv02.Delegations{ Principals: map[string]tuf.Principal{}, Roles: []*tufv02.Delegation{}, } for keyID, key := range targetsMetadata.Delegations.Keys { newTargetsMetadata.Delegations.Principals[keyID] = key } for _, role := range targetsMetadata.Delegations.Roles { newRole := &tufv02.Delegation{ Name: role.Name, Paths: role.Paths, Terminating: role.Terminating, Custom: role.Custom, Role: tufv02.Role{ PrincipalIDs: role.KeyIDs, Threshold: role.Threshold, }, } newTargetsMetadata.Delegations.Roles = append(newTargetsMetadata.Delegations.Roles, newRole) } return newTargetsMetadata } gittuf-0.9.0/internal/tuf/tuf.go000066400000000000000000000346301475150141000165650ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package tuf import ( "errors" "github.com/gittuf/gittuf/internal/common/set" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) const ( // RootRoleName defines the expected name for the gittuf root of trust. RootRoleName = "root" // TargetsRoleName defines the expected name for the top level gittuf policy file. TargetsRoleName = "targets" // GitHubAppRoleName defines the expected name for the GitHub app role in the root of trust metadata. GitHubAppRoleName = "github-app" AllowRuleName = "gittuf-allow-rule" ExhaustiveVerifierName = "gittuf-exhaustive-verifier" GittufPrefix = "gittuf-" GittufControllerPrefix = "gittuf-controller" GlobalRuleThresholdType = "threshold" GlobalRuleBlockForcePushesType = "block-force-pushes" RemoveGlobalRuleType = "remove" ) var ( ErrInvalidRootMetadata = errors.New("invalid root metadata") ErrUnknownRootMetadataVersion = errors.New("unknown schema version for root metadata") ErrUnknownTargetsMetadataVersion = errors.New("unknown schema version for rule file metadata") ErrPrimaryRuleFileInformationNotFoundInRoot = errors.New("root metadata does not contain primary rule file information") ErrGitHubAppInformationNotFoundInRoot = errors.New("the special GitHub app role is not defined, but GitHub app approvals is set to trusted") ErrDuplicatedRuleName = errors.New("two rules with same name found in policy") ErrInvalidPrincipalID = errors.New("principal ID is invalid") ErrInvalidPrincipalType = errors.New("invalid principal type (do you have the right gittuf version?)") ErrPrincipalNotFound = errors.New("principal not found") ErrPrincipalStillInUse = errors.New("principal is still in use") ErrRuleNotFound = errors.New("cannot find rule entry") ErrMissingRules = errors.New("some rules are missing") ErrCannotManipulateRulesWithGittufPrefix = errors.New("cannot add or change rules whose names have the 'gittuf-' prefix") ErrCannotMeetThreshold = errors.New("insufficient keys to meet threshold") ErrUnknownGlobalRuleType = errors.New("unknown global rule type") ErrGlobalRuleBlockForcePushesOnlyAppliesToGitPaths = errors.New("all patterns for block force pushes global rule must be for Git references") ErrGlobalRuleNotFound = errors.New("global rule not found") ErrGlobalRuleAlreadyExists = errors.New("global rule already exists") ErrPropagationDirectiveNotFound = errors.New("specified propagation directive not found") ErrNotAControllerRepository = errors.New("current repository is not marked as a controller repository") ) // Principal represents an entity that is granted trust by gittuf metadata. In // the simplest case, a principal may be a single public key. On the other hand, // a principal may represent a human (who may control multiple keys), a team // (consisting of multiple humans) etc. type Principal interface { ID() string Keys() []*signerverifier.SSLibKey CustomMetadata() map[string]string } // RootMetadata represents the root of trust metadata for gittuf. type RootMetadata interface { // SetExpires sets the expiry time for the metadata. // TODO: Does expiry make sense for the gittuf context? This is currently // unenforced SetExpires(expiry string) // SchemaVersion returns the metadata schema version. SchemaVersion() string // GetRepositoryLocation returns the canonical location of the Git // repository. GetRepositoryLocation() string // SetRepositoryLocation sets the specified repository location in the // root metadata. SetRepositoryLocation(location string) // GetPrincipals returns all the principals in the root metadata. GetPrincipals() map[string]Principal // AddRootPrincipal adds the corresponding principal to the root metadata // file and marks it as trusted for subsequent root of trust metadata. AddRootPrincipal(principal Principal) error // DeleteRootPrincipal removes the corresponding principal from the set of // trusted principals for the root of trust. DeleteRootPrincipal(principalID string) error // UpdateRootThreshold sets the required number of signatures for root of // trust metadata. UpdateRootThreshold(threshold int) error // GetRootPrincipals returns the principals trusted for the root of trust // metadata. GetRootPrincipals() ([]Principal, error) // GetRootThreshold returns the threshold of principals that must sign the // root of trust metadata. GetRootThreshold() (int, error) // AddPrincipalRuleFilePrincipal adds the corresponding principal to the // root metadata file and marks it as trusted for the primary rule file. AddPrimaryRuleFilePrincipal(principal Principal) error // DeletePrimaryRuleFilePrincipal removes the corresponding principal from // the set of trusted principals for the primary rule file. DeletePrimaryRuleFilePrincipal(principalID string) error // UpdatePrimaryRuleFileThreshold sets the required number of signatures for // the primary rule file. UpdatePrimaryRuleFileThreshold(threshold int) error // GetPrimaryRuleFilePrincipals returns the principals trusted for the // primary rule file. GetPrimaryRuleFilePrincipals() ([]Principal, error) // GetPrimaryRuleFileThreshold returns the threshold of principals that must // sign the primary rule file. GetPrimaryRuleFileThreshold() (int, error) // AddGlobalRule adds the corresponding rule to the root metadata. AddGlobalRule(globalRule GlobalRule) error // GetGlobalRules returns the global rules declared in the root metadata. GetGlobalRules() []GlobalRule // DeleteGlobalRule removes the global rule from the root metadata. DeleteGlobalRule(ruleName string) error // AddGitHubAppPrincipal adds the corresponding principal to the root // metadata and is trusted for GitHub app attestations. AddGitHubAppPrincipal(appName string, principal Principal) error // DeleteGitHubAppPrincipal removes the GitHub app attestations role from // the root of trust metadata. DeleteGitHubAppPrincipal(appName string) // EnableGitHubAppApprovals indicates attestations from the GitHub app role // must be trusted. // TODO: this needs to be generalized across tools EnableGitHubAppApprovals() // DisableGitHubAppApprovals indicates attestations from the GitHub app role // must not be trusted thereafter. // TODO: this needs to be generalized across tools DisableGitHubAppApprovals() // IsGitHubAppApprovalTrusted indicates if the GitHub app is trusted. // TODO: this needs to be generalized across tools IsGitHubAppApprovalTrusted() bool // GetGitHubAppPrincipals returns the principals trusted for the GitHub app // attestations. // TODO: this needs to be generalized across tools GetGitHubAppPrincipals() ([]Principal, error) // AddPropagationDirective adds a propagation directive to the root // metadata. AddPropagationDirective(directive PropagationDirective) error // GetPropagationDirectives returns the propagation directives found in the // root metadata. GetPropagationDirectives() []PropagationDirective // DeletePropagationDirective removes a propagation directive from the root // metadata. DeletePropagationDirective(name string) error // IsController indicates if the repository serves as the controller for // a multi-repository gittuf network. IsController() bool // EnableController marks the current repository as a controller // repository. EnableController() error // DisableController marks the current repository as not-a-controller. DisableController() error // AddControllerRepository adds the specified repository as a controller // for the current repository. AddControllerRepository(name, location string, initialRootPrincipals []Principal) error // AddNetworkRepository adds the specified repository as part of the // network for which the current repository is a controller. The current // repository must be marked as a controller before this can be used. AddNetworkRepository(name, location string, initialRootPrincipals []Principal) error // GetControllerRepositories returns the repositories that serve as the // controllers for the networks the current repository is a part of. GetControllerRepositories() []OtherRepository // GetNetworkRepositories returns the repositories that are part of the // network for which the current repository is a controller. // IsController must return true for this to be set. GetNetworkRepositories() []OtherRepository } // TargetsMetadata represents gittuf's rule files. Its name is inspired by TUF. type TargetsMetadata interface { // SetExpires sets the expiry time for the metadata. // TODO: Does expiry make sense for the gittuf context? This is currently // unenforced SetExpires(expiry string) // SchemaVersion returns the metadata schema version. SchemaVersion() string // GetPrincipals returns all the principals in the rule file. GetPrincipals() map[string]Principal // GetRules returns all the rules in the metadata. GetRules() []Rule // AddRule adds a rule to the metadata file. AddRule(ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int) error // UpdateRule updates an existing rule identified by ruleName with the // provided parameters. UpdateRule(ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int) error // ReorderRules accepts the new order of rules (identified by their // ruleNames). ReorderRules(newRuleNames []string) error // RemoveRule deletes the rule identified by the ruleName. RemoveRule(ruleName string) error // AddPrincipal adds a principal to the metadata. AddPrincipal(principal Principal) error // RemovePrincipal removes a principal from the metadata. RemovePrincipal(principalID string) error } // Rule represents a rule entry in a rule file (`TargetsMetadata`). type Rule interface { // ID returns the identifier of the rule, typically a name. ID() string // Matches indicates if the rule applies to a specified path. Matches(path string) bool // GetProtectedNamespaces returns the set of namespaces protected by the // rule. GetProtectedNamespaces() []string // GetPrincipalIDs returns the identifiers of the principals that are listed // as trusted by the rule. GetPrincipalIDs() *set.Set[string] // GetThreshold returns the threshold of principals that must approve to // meet the rule. GetThreshold() int // IsLastTrustedInRuleFile indicates that subsequent rules in the rule file // are not to be trusted if the current rule matches the namespace under // verification (similar to TUF's terminating behavior). However, the // current rule's delegated rules as well as other rules already in the // queue are trusted. IsLastTrustedInRuleFile() bool } // GlobalRule represents a repository-wide constraint set by the owners in the // root metadata. type GlobalRule interface { // GetName returns the name of the global rule. GetName() string } // GlobalRuleThreshold indicates the number of required approvals for a change // to the specified namespaces to be valid. type GlobalRuleThreshold interface { GlobalRule // Matches indicates if the rule applies to a specified path. Matches(path string) bool // GetProtectedNamespaces returns the set of namespaces protected by the // rule. GetProtectedNamespaces() []string // GetThreshold returns the threshold of principals that must approve to // meet the rule. GetThreshold() int } // GlobalRuleBlockForcePushes prevents force pushes or rewriting of history for // the specified namespaces. type GlobalRuleBlockForcePushes interface { GlobalRule // Matches indicates if the rule applies to a specified path. Matches(path string) bool // GetProtectedNamespaces returns the set of namespaces protected by the // rule. GetProtectedNamespaces() []string } // PropagationDirective represents an instruction to a gittuf client to carry // out the propagation workflow. type PropagationDirective interface { // GetName returns the name of the directive. GetName() string // GetUpstreamRepository returns the clone-friendly location of the upstream // repository. GetUpstreamRepository() string // GetUpstreamReference returns the reference that must be propagated from // the upstream repository. GetUpstreamReference() string // GetDownstreamReference returns the reference that the upstream components // must be propagated into in the downstream repository (i.e., the // repository where this directive is set.) GetDownstreamReference() string // GetDownstreamPath() returns the path in the Git tree of the downstream // reference where the upstream repository's contents must be stored by the // propagation workflow. GetDownstreamPath() string } // MultiRepository is used to configure gittuf to act in multi-repository // setups. If the repository is a "controller", i.e., it declares policies for // one or more other repositories, the contents of the controller repository's // policy must be propagated to each of the other repositories. type MultiRepository interface { // IsController indicates if the current repository acts as a controller // for a network of gittuf-enabled repositories. IsController() bool // GetControllerRepositories returns the repositories configured as a // controller for the current repository. In other words, the current // repository is a part of the network overseen by each of the // configured controller repositories. GetControllerRepositories() []OtherRepository // GetNetworkRepositories returns the repositories configured as part of // the network overseen by the repository. This must return `nil` if // IsController is `false`. GetNetworkRepositories() []OtherRepository } // OtherRepository represents another gittuf-enabled repository in the root // metadata. type OtherRepository interface { // GetName returns the user-friendly name of the other repository. It // must be unique among all the listed OtherRepository entries. GetName() string // GetLocation returns the clone-friendly location of the other // repository. GetLocation() string // GetInitialRootPrincipals returns the set of principals trusted to // sign the other repository's initial gittuf root of trust metadata. GetInitialRootPrincipals() []Principal } gittuf-0.9.0/internal/tuf/v01/000077500000000000000000000000001475150141000160405ustar00rootroot00000000000000gittuf-0.9.0/internal/tuf/v01/helpers_test.go000066400000000000000000000023461475150141000210750ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "testing" "time" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/gittuf/gittuf/internal/tuf" ) var ( rootPubKeyBytes = artifacts.SSHRSAPublicSSH targets1PubKeyBytes = artifacts.SSHECDSAPublicSSH targets2PubKeyBytes = artifacts.SSHED25519PublicSSH ) func initialTestRootMetadata(t *testing.T) *RootMetadata { t.Helper() rootKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) rootMetadata := NewRootMetadata() rootMetadata.SetExpires(time.Now().AddDate(1, 0, 0).Format(time.RFC3339)) if err := rootMetadata.addKey(rootKey); err != nil { t.Fatal(err) } rootMetadata.addRole(tuf.RootRoleName, Role{ KeyIDs: set.NewSetFromItems(rootKey.KeyID), Threshold: 1, }) return rootMetadata } func initialTestTargetsMetadata(t *testing.T) *TargetsMetadata { t.Helper() targetsMetadata := NewTargetsMetadata() targetsMetadata.SetExpires(time.Now().AddDate(1, 0, 0).Format(time.RFC3339)) targetsMetadata.Delegations = &Delegations{Roles: []*Delegation{AllowRule()}} return targetsMetadata } gittuf-0.9.0/internal/tuf/v01/root.go000066400000000000000000000547161475150141000173670ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "encoding/json" "fmt" "path" "strings" "github.com/danwakefield/fnmatch" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/tuf" ) const ( rootVersion = "https://gittuf.dev/policy/root/v0.1" ) // RootMetadata defines the schema of TUF's Root role. type RootMetadata struct { Type string `json:"type"` Expires string `json:"expires"` RepositoryLocation string `json:"repositoryLocation,omitempty"` Keys map[string]*Key `json:"keys"` Roles map[string]Role `json:"roles"` GitHubApprovalsTrusted bool `json:"githubApprovalsTrusted"` GlobalRules []tuf.GlobalRule `json:"globalRules,omitempty"` Propagations []tuf.PropagationDirective `json:"propagations,omitempty"` MultiRepository *MultiRepository `json:"multiRepository,omitempty"` } // NewRootMetadata returns a new instance of RootMetadata. func NewRootMetadata() *RootMetadata { return &RootMetadata{ Type: "root", } } // SetExpires sets the expiry date of the RootMetadata to the value passed in. func (r *RootMetadata) SetExpires(expires string) { r.Expires = expires } // SchemaVersion returns the metadata schema version. func (r *RootMetadata) SchemaVersion() string { return rootVersion } // GetRepositoryLocation returns the canonical location of the Git repository. func (r *RootMetadata) GetRepositoryLocation() string { return r.RepositoryLocation } // SetRepositoryLocation sets the specified repository location in the root // metadata. func (r *RootMetadata) SetRepositoryLocation(location string) { r.RepositoryLocation = location } // AddRootPrincipal adds the specified key to the root metadata and authorizes the key // for the root role. func (r *RootMetadata) AddRootPrincipal(key tuf.Principal) error { if key == nil { return tuf.ErrInvalidPrincipalType } // Add key to metadata if err := r.addKey(key); err != nil { return err } if _, ok := r.Roles[tuf.RootRoleName]; !ok { // Create a new root role entry with this key r.addRole(tuf.RootRoleName, Role{ KeyIDs: set.NewSetFromItems(key.ID()), Threshold: 1, }) return nil } // Add key ID to the root role if it's not already in it rootRole := r.Roles[tuf.RootRoleName] rootRole.KeyIDs.Add(key.ID()) r.Roles[tuf.RootRoleName] = rootRole return nil } // DeleteRootPrincipal removes keyID from the list of trusted Root public keys // in rootMetadata. It does not remove the key entry itself as it does not check // if other roles can be verified using the same key. func (r *RootMetadata) DeleteRootPrincipal(keyID string) error { if _, ok := r.Roles[tuf.RootRoleName]; !ok { return tuf.ErrInvalidRootMetadata } rootRole := r.Roles[tuf.RootRoleName] if rootRole.KeyIDs.Len() <= rootRole.Threshold { return tuf.ErrCannotMeetThreshold } rootRole.KeyIDs.Remove(keyID) r.Roles[tuf.RootRoleName] = rootRole return nil } // AddPrimaryRuleFilePrincipal adds the 'targetsKey' as a trusted public key in // 'rootMetadata' for the top level Targets role. func (r *RootMetadata) AddPrimaryRuleFilePrincipal(key tuf.Principal) error { if key == nil { return tuf.ErrInvalidPrincipalType } // Add key to the metadata file if err := r.addKey(key); err != nil { return err } if _, ok := r.Roles[tuf.TargetsRoleName]; !ok { // Create a new targets role entry with this key r.addRole(tuf.TargetsRoleName, Role{ KeyIDs: set.NewSetFromItems(key.ID()), Threshold: 1, }) return nil } targetsRole := r.Roles[tuf.TargetsRoleName] targetsRole.KeyIDs.Add(key.ID()) r.Roles[tuf.TargetsRoleName] = targetsRole return nil } // DeletePrimaryRuleFilePrincipal removes the key matching 'keyID' from trusted // public keys for top level Targets role in 'rootMetadata'. Note: It doesn't // remove the key entry itself as it doesn't check if other roles can use the // same key. func (r *RootMetadata) DeletePrimaryRuleFilePrincipal(keyID string) error { if keyID == "" { return tuf.ErrInvalidPrincipalID } targetsRole, ok := r.Roles[tuf.TargetsRoleName] if !ok { return tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } if targetsRole.KeyIDs.Len() <= targetsRole.Threshold { return tuf.ErrCannotMeetThreshold } targetsRole.KeyIDs.Remove(keyID) r.Roles[tuf.TargetsRoleName] = targetsRole return nil } // AddGitHubAppPrincipal adds the 'appKey' as a trusted public key in // 'rootMetadata' for the special GitHub app role. This key is used to verify // GitHub pull request approval attestation signatures. func (r *RootMetadata) AddGitHubAppPrincipal(name string, key tuf.Principal) error { if key == nil { return tuf.ErrInvalidPrincipalType } // TODO: support multiple keys / threshold for app if err := r.addKey(key); err != nil { return err } role := Role{ KeyIDs: set.NewSetFromItems(key.ID()), Threshold: 1, } r.addRole(name, role) // AddRole replaces the specified role if it already exists return nil } // DeleteGitHubAppPrincipal removes the special GitHub app role from the root // metadata. func (r *RootMetadata) DeleteGitHubAppPrincipal(name string) { // TODO: support multiple keys / threshold for app delete(r.Roles, name) } // EnableGitHubAppApprovals sets GitHubApprovalsTrusted to true in the // root metadata. func (r *RootMetadata) EnableGitHubAppApprovals() { r.GitHubApprovalsTrusted = true } // DisableGitHubAppApprovals sets GitHubApprovalsTrusted to false in the root // metadata. func (r *RootMetadata) DisableGitHubAppApprovals() { r.GitHubApprovalsTrusted = false } // UpdateRootThreshold sets the threshold for the Root role. func (r *RootMetadata) UpdateRootThreshold(threshold int) error { rootRole, ok := r.Roles[tuf.RootRoleName] if !ok { return tuf.ErrInvalidRootMetadata } if rootRole.KeyIDs.Len() < threshold { return tuf.ErrCannotMeetThreshold } rootRole.Threshold = threshold r.Roles[tuf.RootRoleName] = rootRole return nil } // UpdatePrimaryRuleFileThreshold sets the threshold for the top level Targets // role. func (r *RootMetadata) UpdatePrimaryRuleFileThreshold(threshold int) error { targetsRole, ok := r.Roles[tuf.TargetsRoleName] if !ok { return tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } if targetsRole.KeyIDs.Len() < threshold { return tuf.ErrCannotMeetThreshold } targetsRole.Threshold = threshold r.Roles[tuf.TargetsRoleName] = targetsRole return nil } // GetPrincipals returns all the principals in the root metadata. func (r *RootMetadata) GetPrincipals() map[string]tuf.Principal { principals := map[string]tuf.Principal{} for id, key := range r.Keys { principals[id] = key } return principals } // GetRootThreshold returns the threshold of principals that must sign the root // of trust metadata. func (r *RootMetadata) GetRootThreshold() (int, error) { role, hasRole := r.Roles[tuf.RootRoleName] if !hasRole { return -1, tuf.ErrInvalidRootMetadata } return role.Threshold, nil } // GetRootPrincipals returns the principals trusted for the root of trust // metadata. func (r *RootMetadata) GetRootPrincipals() ([]tuf.Principal, error) { role, hasRole := r.Roles[tuf.RootRoleName] if !hasRole { return nil, tuf.ErrInvalidRootMetadata } principals := make([]tuf.Principal, 0, role.KeyIDs.Len()) for _, id := range role.KeyIDs.Contents() { key, has := r.Keys[id] if !has { return nil, tuf.ErrInvalidPrincipalType } principals = append(principals, key) } return principals, nil } // GetPrimaryRuleFileThreshold returns the threshold of principals that must // sign the primary rule file. func (r *RootMetadata) GetPrimaryRuleFileThreshold() (int, error) { role, hasRole := r.Roles[tuf.TargetsRoleName] if !hasRole { return -1, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } return role.Threshold, nil } // GetPrimaryRuleFilePrincipals returns the principals trusted for the primary // rule file. func (r *RootMetadata) GetPrimaryRuleFilePrincipals() ([]tuf.Principal, error) { role, hasRole := r.Roles[tuf.TargetsRoleName] if !hasRole { return nil, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } principals := make([]tuf.Principal, 0, role.KeyIDs.Len()) for _, id := range role.KeyIDs.Contents() { key, has := r.Keys[id] if !has { return nil, tuf.ErrInvalidPrincipalType } principals = append(principals, key) } return principals, nil } // IsGitHubAppApprovalTrusted indicates if the GitHub app is trusted. // // TODO: this needs to be generalized across tools func (r *RootMetadata) IsGitHubAppApprovalTrusted() bool { return r.GitHubApprovalsTrusted } // GetGitHubAppPrincipals returns the principals trusted for the GitHub app // attestations. // // TODO: this needs to be generalized across tools func (r *RootMetadata) GetGitHubAppPrincipals() ([]tuf.Principal, error) { role, hasRole := r.Roles[tuf.GitHubAppRoleName] if !hasRole { return nil, tuf.ErrGitHubAppInformationNotFoundInRoot } principals := make([]tuf.Principal, 0, role.KeyIDs.Len()) for _, id := range role.KeyIDs.Contents() { key, has := r.Keys[id] if !has { return nil, tuf.ErrInvalidPrincipalType } principals = append(principals, key) } return principals, nil } // AddGlobalRule adds a new global rule to RootMetadata. func (r *RootMetadata) AddGlobalRule(globalRule tuf.GlobalRule) error { allGlobalRules := r.GlobalRules if allGlobalRules == nil { allGlobalRules = []tuf.GlobalRule{} } // check for duplicates for _, rule := range allGlobalRules { if rule.GetName() == globalRule.GetName() { return tuf.ErrGlobalRuleAlreadyExists } } allGlobalRules = append(allGlobalRules, globalRule) r.GlobalRules = allGlobalRules return nil } // DeleteGlobalRule removes the specified global rule from the RootMetadata. func (r *RootMetadata) DeleteGlobalRule(ruleName string) error { allGlobalRules := r.GlobalRules updatedGlobalRules := []tuf.GlobalRule{} if len(allGlobalRules) == 0 { return tuf.ErrGlobalRuleNotFound } for _, rule := range allGlobalRules { if rule.GetName() != ruleName { updatedGlobalRules = append(updatedGlobalRules, rule) } } r.GlobalRules = updatedGlobalRules return nil } func (r *RootMetadata) GetGlobalRules() []tuf.GlobalRule { return r.GlobalRules } // AddPropagationDirective adds a propagation directive to the root metadata. func (r *RootMetadata) AddPropagationDirective(directive tuf.PropagationDirective) error { // TODO: handle duplicates / updates r.Propagations = append(r.Propagations, directive) return nil } // GetPropagationDirectives returns the propagation directives found in the root // metadata. func (r *RootMetadata) GetPropagationDirectives() []tuf.PropagationDirective { return r.Propagations } // DeletePropagationDirective removes a propagation directive from the root // metadata. func (r *RootMetadata) DeletePropagationDirective(name string) error { index := -1 for i, directive := range r.Propagations { if directive.GetName() == name { index = i break } } if index == -1 { return tuf.ErrPropagationDirectiveNotFound } r.Propagations = append(r.Propagations[:index], r.Propagations[index+1:]...) return nil } // IsController indicates if the repository serves as the controller for a // multi-repository gittuf network. func (r *RootMetadata) IsController() bool { if r.MultiRepository == nil { return false } return r.MultiRepository.IsController() } // EnableController marks the current repository as a controller repository. func (r *RootMetadata) EnableController() error { if r.MultiRepository == nil { r.MultiRepository = &MultiRepository{} } r.MultiRepository.Controller = true return nil // TODO: what if it's already a controller? noop? } // DisableController marks the current repository as not-a-controller. func (r *RootMetadata) DisableController() error { if r.MultiRepository == nil { // nothing to do return nil } r.MultiRepository.Controller = false // TODO: should we remove the network repository entries? return nil } // AddControllerRepository adds the specified repository as a controller for the // current repository. func (r *RootMetadata) AddControllerRepository(name, location string, initialRootPrincipals []tuf.Principal) error { if r.MultiRepository == nil { r.MultiRepository = &MultiRepository{ControllerRepositories: []*OtherRepository{}} } // TODO: check for duplicates otherRepository := &OtherRepository{ Name: name, Location: location, InitialRootPrincipals: make([]*Key, 0, len(initialRootPrincipals)), } for _, principal := range initialRootPrincipals { key, isKey := principal.(*Key) if !isKey { return tuf.ErrInvalidPrincipalType } otherRepository.InitialRootPrincipals = append(otherRepository.InitialRootPrincipals, key) } r.MultiRepository.ControllerRepositories = append(r.MultiRepository.ControllerRepositories, otherRepository) // Add the controller as a repository whose policy contents must be // propagated into this repository propagationName := fmt.Sprintf("%s-%s", tuf.GittufControllerPrefix, name) propagationLocation := path.Join(tuf.GittufControllerPrefix, name) return r.AddPropagationDirective(NewPropagationDirective(propagationName, location, "refs/gittuf/policy", "refs/gittuf/policy", propagationLocation)) } // AddNetworkRepository adds the specified repository as part of the network for // which the current repository is a controller. The current repository must be // marked as a controller before this can be used. func (r *RootMetadata) AddNetworkRepository(name, location string, initialRootPrincipals []tuf.Principal) error { if r.MultiRepository == nil || !r.MultiRepository.Controller { // EnableController must be called first return tuf.ErrNotAControllerRepository } if r.MultiRepository.NetworkRepositories == nil { r.MultiRepository.NetworkRepositories = []*OtherRepository{} } // TODO: check for duplicates otherRepository := &OtherRepository{ Name: name, Location: location, InitialRootPrincipals: make([]*Key, 0, len(initialRootPrincipals)), } for _, principal := range initialRootPrincipals { key, isKey := principal.(*Key) if !isKey { return tuf.ErrInvalidPrincipalType } otherRepository.InitialRootPrincipals = append(otherRepository.InitialRootPrincipals, key) } r.MultiRepository.NetworkRepositories = append(r.MultiRepository.NetworkRepositories, otherRepository) return nil } // GetControllerRepositories returns the repositories that serve as the // controllers for the networks the current repository is a part of. func (r *RootMetadata) GetControllerRepositories() []tuf.OtherRepository { if r.MultiRepository == nil { return nil } return r.MultiRepository.GetControllerRepositories() } // GetNetworkRepositories returns the repositories that are part of the network // for which the current repository is a controller. IsController must return // true for this to be set. func (r *RootMetadata) GetNetworkRepositories() []tuf.OtherRepository { if r.MultiRepository == nil { return nil } return r.MultiRepository.GetNetworkRepositories() } func (r *RootMetadata) UnmarshalJSON(data []byte) error { // this type _has_ to be a copy of RootMetadata, minus the use of // json.RawMessage for tuf interfaces type tempType struct { Type string `json:"type"` Expires string `json:"expires"` RepositoryLocation string `json:"repositoryLocation,omitempty"` Keys map[string]*Key `json:"keys"` Roles map[string]Role `json:"roles"` GitHubApprovalsTrusted bool `json:"githubApprovalsTrusted"` GlobalRules []json.RawMessage `json:"globalRules,omitempty"` Propagations []json.RawMessage `json:"propagations,omitempty"` MultiRepository *MultiRepository `json:"multiRepository,omitempty"` } temp := &tempType{} if err := json.Unmarshal(data, &temp); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } r.Type = temp.Type r.Expires = temp.Expires r.RepositoryLocation = temp.RepositoryLocation r.Keys = temp.Keys r.Roles = temp.Roles r.GitHubApprovalsTrusted = temp.GitHubApprovalsTrusted r.GlobalRules = []tuf.GlobalRule{} for _, globalRuleBytes := range temp.GlobalRules { tempGlobalRule := map[string]any{} if err := json.Unmarshal(globalRuleBytes, &tempGlobalRule); err != nil { return fmt.Errorf("unable to unmarshal json for global rule: %w", err) } switch tempGlobalRule["type"] { case tuf.GlobalRuleThresholdType: globalRule := &GlobalRuleThreshold{} if err := json.Unmarshal(globalRuleBytes, globalRule); err != nil { return fmt.Errorf("unable to unmarshal json for global rule: %w", err) } r.GlobalRules = append(r.GlobalRules, globalRule) case tuf.GlobalRuleBlockForcePushesType: globalRule := &GlobalRuleBlockForcePushes{} if err := json.Unmarshal(globalRuleBytes, globalRule); err != nil { return fmt.Errorf("unable to unmarshal json for global rule: %w", err) } r.GlobalRules = append(r.GlobalRules, globalRule) default: return tuf.ErrUnknownGlobalRuleType } } r.Propagations = []tuf.PropagationDirective{} for _, propagationDirectiveBytes := range temp.Propagations { propagationDirective := &PropagationDirective{} if err := json.Unmarshal(propagationDirectiveBytes, propagationDirective); err != nil { return fmt.Errorf("unable to unmarshal json for propagation directive: %w", err) } r.Propagations = append(r.Propagations, propagationDirective) } r.MultiRepository = temp.MultiRepository return nil } // addKey adds a key to the RootMetadata instance. func (r *RootMetadata) addKey(key tuf.Principal) error { if r.Keys == nil { r.Keys = map[string]*Key{} } keyT, isKnownType := key.(*Key) if !isKnownType { return tuf.ErrInvalidPrincipalType } r.Keys[key.ID()] = keyT return nil } // addRole adds a role object and associates it with roleName in the // RootMetadata instance. func (r *RootMetadata) addRole(roleName string, role Role) { if r.Roles == nil { r.Roles = map[string]Role{} } r.Roles[roleName] = role } type GlobalRuleThreshold struct { Name string `json:"name"` Type string `json:"type"` Paths []string `json:"paths"` Threshold int `json:"threshold"` } func NewGlobalRuleThreshold(name string, paths []string, threshold int) *GlobalRuleThreshold { return &GlobalRuleThreshold{ Name: name, Type: tuf.GlobalRuleThresholdType, Paths: paths, Threshold: threshold, } } func (g *GlobalRuleThreshold) GetName() string { return g.Name } func (g *GlobalRuleThreshold) Matches(path string) bool { for _, pattern := range g.Paths { // We validate pattern when it's added to / updated in the metadata if matches := fnmatch.Match(pattern, path, 0); matches { return true } } return false } func (g *GlobalRuleThreshold) GetProtectedNamespaces() []string { return g.Paths } func (g *GlobalRuleThreshold) GetThreshold() int { return g.Threshold } type GlobalRuleBlockForcePushes struct { Name string `json:"name"` Type string `json:"type"` Paths []string `json:"paths"` } func NewGlobalRuleBlockForcePushes(name string, paths []string) (*GlobalRuleBlockForcePushes, error) { for _, path := range paths { if !strings.HasPrefix(path, "git:") { // TODO: set prefix correctly return nil, tuf.ErrGlobalRuleBlockForcePushesOnlyAppliesToGitPaths } } return &GlobalRuleBlockForcePushes{ Name: name, Type: tuf.GlobalRuleBlockForcePushesType, Paths: paths, }, nil } func (g *GlobalRuleBlockForcePushes) GetName() string { return g.Name } func (g *GlobalRuleBlockForcePushes) Matches(path string) bool { for _, pattern := range g.Paths { // We validate pattern when it's added to / updated in the metadata if matches := fnmatch.Match(pattern, path, 0); matches { return true } } return false } func (g *GlobalRuleBlockForcePushes) GetProtectedNamespaces() []string { return g.Paths } type PropagationDirective struct { Name string `json:"name"` UpstreamRepository string `json:"upstreamRepository"` UpstreamReference string `json:"upstreamReference"` DownstreamReference string `json:"downstreamReference"` DownstreamPath string `json:"downstreamPath"` } func (p *PropagationDirective) GetName() string { return p.Name } func (p *PropagationDirective) GetUpstreamRepository() string { return p.UpstreamRepository } func (p *PropagationDirective) GetUpstreamReference() string { return p.UpstreamReference } func (p *PropagationDirective) GetDownstreamReference() string { return p.DownstreamReference } func (p *PropagationDirective) GetDownstreamPath() string { return p.DownstreamPath } func NewPropagationDirective(name, upstreamRepository, upstreamReference, downstreamReference, downstreamPath string) tuf.PropagationDirective { return &PropagationDirective{ Name: name, UpstreamRepository: upstreamRepository, UpstreamReference: upstreamReference, DownstreamReference: downstreamReference, DownstreamPath: downstreamPath, } } type MultiRepository struct { Controller bool `json:"controller"` ControllerRepositories []*OtherRepository `json:"controllerRepositories,omitempty"` NetworkRepositories []*OtherRepository `json:"networkRepositories,omitempty"` } func (m *MultiRepository) IsController() bool { return m.Controller } func (m *MultiRepository) GetControllerRepositories() []tuf.OtherRepository { controllerRepositories := []tuf.OtherRepository{} for _, repository := range m.ControllerRepositories { controllerRepositories = append(controllerRepositories, repository) } return controllerRepositories } func (m *MultiRepository) GetNetworkRepositories() []tuf.OtherRepository { if !m.Controller { return nil } networkRepositories := []tuf.OtherRepository{} for _, repository := range m.NetworkRepositories { networkRepositories = append(networkRepositories, repository) } return networkRepositories } type OtherRepository struct { Name string `json:"name"` Location string `json:"location"` InitialRootPrincipals []*Key `json:"initialRootPrincipals"` } func (o *OtherRepository) GetName() string { return o.Name } func (o *OtherRepository) GetLocation() string { return o.Location } func (o *OtherRepository) GetInitialRootPrincipals() []tuf.Principal { initialRootPrincipals := []tuf.Principal{} for _, key := range o.InitialRootPrincipals { initialRootPrincipals = append(initialRootPrincipals, key) } return initialRootPrincipals } gittuf-0.9.0/internal/tuf/v01/root_test.go000066400000000000000000000460701475150141000204200ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "context" "encoding/json" "fmt" "os" "path/filepath" "testing" "time" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" "github.com/stretchr/testify/assert" ) func TestRootMetadata(t *testing.T) { rootMetadata := NewRootMetadata() key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) err := rootMetadata.addKey(key) assert.Nil(t, err) assert.Equal(t, key, rootMetadata.Keys[key.KeyID]) t.Run("test SetExpires", func(t *testing.T) { d := time.Date(1995, time.October, 26, 9, 0, 0, 0, time.UTC) rootMetadata.SetExpires(d.Format(time.RFC3339)) assert.Equal(t, "1995-10-26T09:00:00Z", rootMetadata.Expires) }) t.Run("test addRole", func(t *testing.T) { rootMetadata.addRole("targets", Role{ KeyIDs: set.NewSetFromItems(key.KeyID), Threshold: 1, }) assert.True(t, rootMetadata.Roles["targets"].KeyIDs.Has(key.KeyID)) }) t.Run("test SchemaVersion", func(t *testing.T) { schemaVersion := rootMetadata.SchemaVersion() assert.Equal(t, rootVersion, schemaVersion) }) t.Run("test GetPrincipals", func(t *testing.T) { expectedPrincipals := map[string]tuf.Principal{key.KeyID: key} principals := rootMetadata.GetPrincipals() assert.Equal(t, expectedPrincipals, principals) }) t.Run("test rootLocation", func(t *testing.T) { currentLocation := rootMetadata.GetRepositoryLocation() assert.Equal(t, "", currentLocation) location := "https://example.com/repository/location" rootMetadata.SetRepositoryLocation(location) currentLocation = rootMetadata.GetRepositoryLocation() assert.Equal(t, location, currentLocation) }) t.Run("test propagation directives", func(t *testing.T) { directives := rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) directive := &PropagationDirective{ Name: "test", UpstreamRepository: "https://example.com/git/repository", UpstreamReference: "refs/heads/main", DownstreamReference: "refs/heads/main", DownstreamPath: "upstream/", } err = rootMetadata.AddPropagationDirective(directive) assert.Nil(t, err) directives = rootMetadata.GetPropagationDirectives() assert.Equal(t, 1, len(directives)) assert.Equal(t, directive, directives[0]) err = rootMetadata.DeletePropagationDirective("test") assert.Nil(t, err) directives = rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) err = rootMetadata.DeletePropagationDirective("test") assert.ErrorIs(t, err, tuf.ErrPropagationDirectiveNotFound) }) t.Run("test multi-repository", func(t *testing.T) { isController := rootMetadata.IsController() assert.False(t, isController) name := "test" location := "http://git.example.com/repository" initialRootPrincipals := []tuf.Principal{key} err := rootMetadata.AddControllerRepository(name, location, initialRootPrincipals) assert.Nil(t, err) controllerRepositories := rootMetadata.GetControllerRepositories() assert.Equal(t, []tuf.OtherRepository{&OtherRepository{Name: name, Location: location, InitialRootPrincipals: []*Key{key}}}, controllerRepositories) propagations := rootMetadata.GetPropagationDirectives() found := false for _, propagation := range propagations { if propagation.GetName() == "gittuf-controller-test" { found = true break } } assert.True(t, found) err = rootMetadata.AddNetworkRepository(name, location, initialRootPrincipals) assert.ErrorIs(t, err, tuf.ErrNotAControllerRepository) err = rootMetadata.EnableController() assert.Nil(t, err) err = rootMetadata.AddNetworkRepository(name, location, initialRootPrincipals) assert.Nil(t, err) networkRepositories := rootMetadata.GetNetworkRepositories() assert.Equal(t, []tuf.OtherRepository{&OtherRepository{Name: name, Location: location, InitialRootPrincipals: []*Key{key}}}, networkRepositories) err = rootMetadata.DisableController() assert.Nil(t, err) networkRepositories = rootMetadata.GetNetworkRepositories() assert.Nil(t, networkRepositories) }) } func TestRootMetadataWithSSHKey(t *testing.T) { // Setup test key pair keys := []struct { name string data []byte }{ {"rsa", artifacts.SSHRSAPrivate}, {"rsa.pub", artifacts.SSHRSAPublicSSH}, } tmpDir := t.TempDir() for _, key := range keys { keyPath := filepath.Join(tmpDir, key.name) if err := os.WriteFile(keyPath, key.data, 0o600); err != nil { t.Fatal(err) } } keyPath := filepath.Join(tmpDir, "rsa") sslibKeyO, err := ssh.NewKeyFromFile(keyPath) if err != nil { t.Fatal() } sslibKey := NewKeyFromSSLibKey(sslibKeyO) // Create TUF root and add test key rootMetadata := NewRootMetadata() if err := rootMetadata.addKey(sslibKey); err != nil { t.Fatal(err) } // Wrap and and sign ctx := context.Background() env, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal() } verifier, err := ssh.NewVerifierFromKey(sslibKeyO) if err != nil { t.Fatal() } signer := &ssh.Signer{ Verifier: verifier, Path: keyPath, } env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { t.Fatal() } // Unwrap and verify // NOTE: For the sake of testing the contained key, we unwrap before we // verify. Typically, in DSSE it should be the other way around. payload, err := env.DecodeB64Payload() if err != nil { t.Fatal() } rootMetadata2 := &RootMetadata{} if err := json.Unmarshal(payload, rootMetadata2); err != nil { t.Fatal() } sslibKey2 := rootMetadata2.Keys[sslibKey.KeyID] // NOTE: Typically, a caller would choose this method, if KeyType==ssh.SSHKeyType verifier2, err := ssh.NewVerifierFromKey(sslibKey2.Keys()[0]) if err != nil { t.Fatal() } _, err = dsse.VerifyEnvelope(ctx, env, []sslibdsse.Verifier{verifier2}, 1) if err != nil { t.Fatal() } } func TestAddRootPrincipal(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("with root role already in metadata", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) newRootKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddRootPrincipal(newRootKey) assert.Nil(t, err) assert.Equal(t, newRootKey, rootMetadata.Keys[newRootKey.KeyID]) assert.Equal(t, set.NewSetFromItems(key.KeyID, newRootKey.KeyID), rootMetadata.Roles[tuf.RootRoleName].KeyIDs) }) t.Run("without root role already in metadata", func(t *testing.T) { rootMetadata := NewRootMetadata() err := rootMetadata.AddRootPrincipal(key) assert.Nil(t, err) assert.Equal(t, key, rootMetadata.Keys[key.KeyID]) assert.Equal(t, set.NewSetFromItems(key.KeyID), rootMetadata.Roles[tuf.RootRoleName].KeyIDs) }) } func TestDeleteRootPrincipal(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) rootMetadata := initialTestRootMetadata(t) newRootKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddRootPrincipal(newRootKey) assert.Nil(t, err) err = rootMetadata.DeleteRootPrincipal(newRootKey.KeyID) assert.Nil(t, err) assert.Equal(t, key, rootMetadata.Keys[key.KeyID]) assert.Equal(t, newRootKey, rootMetadata.Keys[newRootKey.KeyID]) assert.Equal(t, set.NewSetFromItems(key.KeyID), rootMetadata.Roles[tuf.RootRoleName].KeyIDs) err = rootMetadata.DeleteRootPrincipal(key.KeyID) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestAddPrimaryRuleFilePrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) targetsKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddPrimaryRuleFilePrincipal(nil) assert.ErrorIs(t, err, tuf.ErrInvalidPrincipalType) err = rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey) assert.Nil(t, err) assert.Equal(t, targetsKey, rootMetadata.Keys[targetsKey.KeyID]) assert.Equal(t, set.NewSetFromItems(targetsKey.KeyID), rootMetadata.Roles[tuf.TargetsRoleName].KeyIDs) } func TestDeletePrimaryRuleFilePrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) targetsKey1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) targetsKey2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) err := rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey1) assert.Nil(t, err) err = rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey2) assert.Nil(t, err) err = rootMetadata.DeletePrimaryRuleFilePrincipal("") assert.ErrorIs(t, err, tuf.ErrInvalidPrincipalID) err = rootMetadata.DeletePrimaryRuleFilePrincipal(targetsKey1.KeyID) assert.Nil(t, err) assert.Equal(t, targetsKey1, rootMetadata.Keys[targetsKey1.KeyID]) assert.Equal(t, targetsKey2, rootMetadata.Keys[targetsKey2.KeyID]) targetsRole := rootMetadata.Roles[tuf.TargetsRoleName] assert.True(t, targetsRole.KeyIDs.Has(targetsKey2.KeyID)) err = rootMetadata.DeletePrimaryRuleFilePrincipal(targetsKey2.KeyID) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestAddGitHubAppPrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) appKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, nil) assert.ErrorIs(t, err, tuf.ErrInvalidPrincipalType) err = rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey) assert.Nil(t, err) assert.Equal(t, appKey, rootMetadata.Keys[appKey.KeyID]) assert.Equal(t, set.NewSetFromItems(appKey.KeyID), rootMetadata.Roles[tuf.GitHubAppRoleName].KeyIDs) } func TestDeleteGitHubAppPrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) appKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey) assert.Nil(t, err) rootMetadata.DeleteGitHubAppPrincipal(tuf.GitHubAppRoleName) assert.Nil(t, rootMetadata.Roles[tuf.GitHubAppRoleName].KeyIDs) } func TestEnableGitHubAppApprovals(t *testing.T) { rootMetadata := initialTestRootMetadata(t) assert.False(t, rootMetadata.GitHubApprovalsTrusted) rootMetadata.EnableGitHubAppApprovals() assert.True(t, rootMetadata.GitHubApprovalsTrusted) } func TestDisableGitHubAppApprovals(t *testing.T) { rootMetadata := initialTestRootMetadata(t) assert.False(t, rootMetadata.GitHubApprovalsTrusted) rootMetadata.EnableGitHubAppApprovals() assert.True(t, rootMetadata.GitHubApprovalsTrusted) rootMetadata.DisableGitHubAppApprovals() assert.False(t, rootMetadata.GitHubApprovalsTrusted) } func TestUpdateAndGetRootThreshold(t *testing.T) { rootMetadata := NewRootMetadata() err := rootMetadata.UpdateRootThreshold(3) assert.ErrorIs(t, err, tuf.ErrInvalidRootMetadata) threshold, err := rootMetadata.GetRootThreshold() assert.ErrorIs(t, err, tuf.ErrInvalidRootMetadata) assert.Equal(t, -1, threshold) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := rootMetadata.AddRootPrincipal(key1); err != nil { t.Fatal(err) } if err := rootMetadata.AddRootPrincipal(key2); err != nil { t.Fatal(err) } err = rootMetadata.UpdateRootThreshold(2) assert.Nil(t, err) assert.Equal(t, 2, rootMetadata.Roles[tuf.RootRoleName].Threshold) threshold, err = rootMetadata.GetRootThreshold() assert.Nil(t, err) assert.Equal(t, 2, threshold) err = rootMetadata.UpdateRootThreshold(3) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestUpdateAndGetPrimaryRuleFileThreshold(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.UpdatePrimaryRuleFileThreshold(3) assert.ErrorIs(t, err, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot) threshold, err := rootMetadata.GetPrimaryRuleFileThreshold() assert.ErrorIs(t, err, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot) assert.Equal(t, -1, threshold) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := rootMetadata.AddPrimaryRuleFilePrincipal(key1); err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key2); err != nil { t.Fatal(err) } err = rootMetadata.UpdatePrimaryRuleFileThreshold(2) assert.Nil(t, err) assert.Equal(t, 2, rootMetadata.Roles[tuf.TargetsRoleName].Threshold) threshold, err = rootMetadata.GetPrimaryRuleFileThreshold() assert.Nil(t, err) assert.Equal(t, 2, threshold) err = rootMetadata.UpdatePrimaryRuleFileThreshold(3) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestGetRootPrincipals(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("root role exists", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) expectedPrincipals := []tuf.Principal{key} rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, rootPrincipals) }) t.Run("root role does not exist", func(t *testing.T) { rootMetadata := NewRootMetadata() rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.ErrorIs(t, err, tuf.ErrInvalidRootMetadata) assert.Nil(t, rootPrincipals) }) } func TestGetPrimaryRuleFilePrincipals(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("targets role exists", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.AddPrimaryRuleFilePrincipal(key) assert.Nil(t, err) expectedPrincipals := []tuf.Principal{key} principals, err := rootMetadata.GetPrimaryRuleFilePrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, principals) }) t.Run("targets role does not exist", func(t *testing.T) { rootMetadata := NewRootMetadata() rootPrincipals, err := rootMetadata.GetPrimaryRuleFilePrincipals() assert.ErrorIs(t, err, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot) assert.Nil(t, rootPrincipals) }) } func TestGetGitHubAppPrincipals(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("role exists", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, key) assert.Nil(t, err) expectedPrincipals := []tuf.Principal{key} principals, err := rootMetadata.GetGitHubAppPrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, principals) }) t.Run("role does not exist", func(t *testing.T) { rootMetadata := NewRootMetadata() rootPrincipals, err := rootMetadata.GetGitHubAppPrincipals() assert.ErrorIs(t, err, tuf.ErrGitHubAppInformationNotFoundInRoot) assert.Nil(t, rootPrincipals) }) } func TestIsGitHubAppApprovalTrusted(t *testing.T) { rootMetadata := initialTestRootMetadata(t) trusted := rootMetadata.IsGitHubAppApprovalTrusted() assert.False(t, trusted) key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, key) assert.Nil(t, err) rootMetadata.EnableGitHubAppApprovals() trusted = rootMetadata.IsGitHubAppApprovalTrusted() assert.True(t, trusted) } func TestGlobalRules(t *testing.T) { rootMetadata := initialTestRootMetadata(t) assert.Nil(t, rootMetadata.GlobalRules) // no global rule yet err := rootMetadata.AddGlobalRule(NewGlobalRuleThreshold("threshold-2-main", []string{"git:refs/heads/main"}, 2)) assert.Nil(t, err) err = rootMetadata.AddGlobalRule(NewGlobalRuleThreshold("threshold-2-main", []string{"git:refs/heads/main"}, 2)) assert.ErrorIs(t, err, tuf.ErrGlobalRuleAlreadyExists) assert.Equal(t, 1, len(rootMetadata.GlobalRules)) assert.Equal(t, "threshold-2-main", rootMetadata.GlobalRules[0].GetName()) expectedGlobalRule := &GlobalRuleThreshold{ Name: "threshold-2-main", Paths: []string{"git:refs/heads/main"}, Threshold: 2, } globalRules := rootMetadata.GetGlobalRules() assert.Equal(t, expectedGlobalRule.GetName(), globalRules[0].GetName()) assert.Equal(t, expectedGlobalRule.GetProtectedNamespaces(), globalRules[0].(tuf.GlobalRuleThreshold).GetProtectedNamespaces()) assert.Equal(t, expectedGlobalRule.GetThreshold(), globalRules[0].(tuf.GlobalRuleThreshold).GetThreshold()) forcePushesGlobalRule, err := NewGlobalRuleBlockForcePushes("block-force-pushes", []string{"git:refs/heads/main"}) if err != nil { t.Fatal(err) } err = rootMetadata.AddGlobalRule(forcePushesGlobalRule) assert.Nil(t, err) err = rootMetadata.AddGlobalRule(forcePushesGlobalRule) assert.ErrorIs(t, err, tuf.ErrGlobalRuleAlreadyExists) assert.Equal(t, 2, len(rootMetadata.GlobalRules)) assert.Equal(t, "threshold-2-main", rootMetadata.GlobalRules[0].GetName()) assert.Equal(t, "block-force-pushes", rootMetadata.GlobalRules[1].GetName()) err = rootMetadata.DeleteGlobalRule("threshold-2-main") assert.Nil(t, err) err = rootMetadata.DeleteGlobalRule("block-force-pushes") assert.Nil(t, err) assert.Equal(t, 0, len(rootMetadata.GlobalRules)) err = rootMetadata.DeleteGlobalRule("") assert.ErrorIs(t, err, tuf.ErrGlobalRuleNotFound) } func TestNewGlobalRuleBlockForcePushes(t *testing.T) { tests := map[string]struct { patterns []string expectedError error }{ "no error, single git pattern": { patterns: []string{"git:refs/heads/main"}, }, "no error, multiple git patterns": { patterns: []string{"git:refs/heads/main", "git:refs/heads/feature"}, }, "no error, multiple git patterns including wildcards": { patterns: []string{"git:refs/heads/main", "git:refs/heads/release/*"}, }, "error, single non-git pattern": { patterns: []string{"file:foo"}, expectedError: tuf.ErrGlobalRuleBlockForcePushesOnlyAppliesToGitPaths, }, "error, multiple non-git patterns including wildcards": { patterns: []string{"file:foo", "file:bar", "file:baz/*"}, expectedError: tuf.ErrGlobalRuleBlockForcePushesOnlyAppliesToGitPaths, }, "error, mix of git and non-git patterns including wildcards": { patterns: []string{"git:refs/heads/main", "git:refs/heads/release/*", "file:foo", "file:bar", "file:baz/*"}, expectedError: tuf.ErrGlobalRuleBlockForcePushesOnlyAppliesToGitPaths, }, } for name, test := range tests { rule, err := NewGlobalRuleBlockForcePushes("test-block-force-pushes", test.patterns) if test.expectedError == nil { assert.Nil(t, err, fmt.Sprintf("unexpected error '%v' in test '%s'", err, name)) assert.Equal(t, test.patterns, rule.Paths) } else { assert.ErrorIs(t, err, test.expectedError, fmt.Sprintf("unexpected error '%v', expected '%v' in test '%s'", err, test.expectedError, name)) } } } func TestPropagationDirective(t *testing.T) { name := "test" upstreamRepository := "https://example.com/git/repository" refName := "refs/heads/main" localPath := "upstream/" directive := NewPropagationDirective(name, upstreamRepository, refName, refName, localPath) assert.Equal(t, name, directive.GetName()) assert.Equal(t, upstreamRepository, directive.GetUpstreamRepository()) assert.Equal(t, refName, directive.GetUpstreamReference()) assert.Equal(t, refName, directive.GetDownstreamReference()) assert.Equal(t, localPath, directive.GetDownstreamPath()) } gittuf-0.9.0/internal/tuf/v01/targets.go000066400000000000000000000227101475150141000200420ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "encoding/json" "fmt" "strings" "github.com/danwakefield/fnmatch" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/tuf" ) const ( targetsVersion = "http://gittuf.dev/policy/rule-file/v0.1" ) // TargetsMetadata defines the schema of TUF's Targets role. type TargetsMetadata struct { Type string `json:"type"` Expires string `json:"expires"` Targets map[string]any `json:"targets"` Delegations *Delegations `json:"delegations"` } // NewTargetsMetadata returns a new instance of TargetsMetadata. func NewTargetsMetadata() *TargetsMetadata { return &TargetsMetadata{ Type: "targets", Delegations: &Delegations{Roles: []*Delegation{AllowRule()}}, } } // SetExpires sets the expiry date of the TargetsMetadata to the value passed // in. func (t *TargetsMetadata) SetExpires(expires string) { t.Expires = expires } // SchemaVersion returns the metadata schema version. func (t *TargetsMetadata) SchemaVersion() string { return targetsVersion } // Validate ensures the instance of TargetsMetadata matches gittuf expectations. func (t *TargetsMetadata) Validate() error { if len(t.Targets) != 0 { return ErrTargetsNotEmpty } return nil } // AddRule adds a new delegation to TargetsMetadata. func (t *TargetsMetadata) AddRule(ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int) error { if strings.HasPrefix(ruleName, tuf.GittufPrefix) { return tuf.ErrCannotManipulateRulesWithGittufPrefix } for _, principalID := range authorizedPrincipalIDs { if _, has := t.Delegations.Keys[principalID]; !has { return tuf.ErrPrincipalNotFound } } if len(authorizedPrincipalIDs) < threshold { return tuf.ErrCannotMeetThreshold } allDelegations := t.Delegations.Roles if allDelegations == nil { allDelegations = []*Delegation{} } newDelegation := &Delegation{ Name: ruleName, Paths: rulePatterns, Terminating: false, Role: Role{ KeyIDs: set.NewSetFromItems(authorizedPrincipalIDs...), Threshold: threshold, }, } allDelegations = append(allDelegations[:len(allDelegations)-1], newDelegation, AllowRule()) t.Delegations.Roles = allDelegations return nil } // UpdateRule is used to amend a delegation in TargetsMetadata. func (t *TargetsMetadata) UpdateRule(ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int) error { if strings.HasPrefix(ruleName, tuf.GittufPrefix) { return tuf.ErrCannotManipulateRulesWithGittufPrefix } for _, principalID := range authorizedPrincipalIDs { if _, has := t.Delegations.Keys[principalID]; !has { return tuf.ErrPrincipalNotFound } } if len(authorizedPrincipalIDs) < threshold { return tuf.ErrCannotMeetThreshold } allDelegations := []*Delegation{} for _, delegation := range t.Delegations.Roles { if delegation.ID() == tuf.AllowRuleName { break } if delegation.ID() != ruleName { allDelegations = append(allDelegations, delegation) continue } if delegation.Name == ruleName { delegation.Paths = rulePatterns delegation.Role = Role{ KeyIDs: set.NewSetFromItems(authorizedPrincipalIDs...), Threshold: threshold, } } allDelegations = append(allDelegations, delegation) } allDelegations = append(allDelegations, AllowRule()) t.Delegations.Roles = allDelegations return nil } // ReorderRules changes the order of delegations, and the new order is specified // in `ruleNames []string`. func (t *TargetsMetadata) ReorderRules(ruleNames []string) error { // Create a map of all existing delegations for quick look up rolesMap := make(map[string]*Delegation) // Create a set of current rules in metadata, skipping the allow rule currentRules := set.NewSet[string]() for _, delegation := range t.Delegations.Roles { if delegation.Name == tuf.AllowRuleName { continue } rolesMap[delegation.Name] = delegation currentRules.Add(delegation.Name) } specifiedRules := set.NewSet[string]() for _, name := range ruleNames { if specifiedRules.Has(name) { return fmt.Errorf("%w: '%s'", tuf.ErrDuplicatedRuleName, name) } specifiedRules.Add(name) } if !currentRules.Equal(specifiedRules) { onlyInSpecifiedRules := specifiedRules.Minus(currentRules) if onlyInSpecifiedRules.Len() != 0 { if onlyInSpecifiedRules.Has(tuf.AllowRuleName) { return fmt.Errorf("%w: do not specify allow rule", tuf.ErrCannotManipulateRulesWithGittufPrefix) } contents := onlyInSpecifiedRules.Contents() return fmt.Errorf("%w: rules '%s' do not exist in current rule file", tuf.ErrRuleNotFound, strings.Join(contents, ", ")) } onlyInCurrentRules := currentRules.Minus(specifiedRules) if onlyInCurrentRules.Len() != 0 { contents := onlyInCurrentRules.Contents() return fmt.Errorf("%w: rules '%s' not specified", tuf.ErrMissingRules, strings.Join(contents, ", ")) } } // Create newDelegations and set it in the targetsMetadata after adding allow rule newDelegations := make([]*Delegation, 0, len(rolesMap)+1) for _, ruleName := range ruleNames { newDelegations = append(newDelegations, rolesMap[ruleName]) } newDelegations = append(newDelegations, AllowRule()) t.Delegations.Roles = newDelegations return nil } // RemoveRule deletes a delegation entry from TargetsMetadata. func (t *TargetsMetadata) RemoveRule(ruleName string) error { if strings.HasPrefix(ruleName, tuf.GittufPrefix) { return tuf.ErrCannotManipulateRulesWithGittufPrefix } allDelegations := t.Delegations.Roles updatedDelegations := []*Delegation{} for _, delegation := range allDelegations { if delegation.Name != ruleName { updatedDelegations = append(updatedDelegations, delegation) } } t.Delegations.Roles = updatedDelegations return nil } // GetPrincipals returns all the principals in the rule file. func (t *TargetsMetadata) GetPrincipals() map[string]tuf.Principal { principals := map[string]tuf.Principal{} for id, key := range t.Delegations.Keys { principals[id] = key } return principals } // GetRules returns all the rules in the metadata. func (t *TargetsMetadata) GetRules() []tuf.Rule { if t.Delegations == nil { return nil } rules := make([]tuf.Rule, 0, len(t.Delegations.Roles)) for _, delegation := range t.Delegations.Roles { rules = append(rules, delegation) } return rules } // AddPrincipal adds a principal to the metadata. // // TODO: this isn't associated with a specific rule; with the removal of // verify-commit and verify-tag, it may not make sense anymore func (t *TargetsMetadata) AddPrincipal(principal tuf.Principal) error { return t.Delegations.addKey(principal) } func (t *TargetsMetadata) RemovePrincipal(principalID string) error { return t.Delegations.removeKey(principalID) } // Delegations defines the schema for specifying delegations in TUF's Targets // metadata. type Delegations struct { Keys map[string]*Key `json:"keys"` Roles []*Delegation `json:"roles"` } // addKey adds a delegations key. func (d *Delegations) addKey(key tuf.Principal) error { if d.Keys == nil { d.Keys = map[string]*Key{} } keyT, isKnownType := key.(*Key) if !isKnownType { return tuf.ErrInvalidPrincipalType } d.Keys[key.ID()] = keyT return nil } func (d *Delegations) removeKey(keyID string) error { if d.Keys == nil { return tuf.ErrPrincipalNotFound } for _, curRole := range d.Roles { if curRole.GetPrincipalIDs() != nil && curRole.GetPrincipalIDs().Has(keyID) { return tuf.ErrPrincipalStillInUse } } delete(d.Keys, keyID) return nil } // AllowRule returns the default, last rule for all policy files. func AllowRule() *Delegation { return &Delegation{ Name: tuf.AllowRuleName, Paths: []string{"*"}, Terminating: true, Role: Role{ Threshold: 1, }, } } // Delegation defines the schema for a single delegation entry. It differs from // the standard TUF schema by allowing a `custom` field to record details // pertaining to the delegation. It implements the tuf.Rule interface. type Delegation struct { Name string `json:"name"` Paths []string `json:"paths"` Terminating bool `json:"terminating"` Custom *json.RawMessage `json:"custom,omitempty"` Role } // ID returns the identifier of the delegation, its name. func (d *Delegation) ID() string { return d.Name } // Matches checks if any of the delegation's patterns match the target. func (d *Delegation) Matches(target string) bool { for _, pattern := range d.Paths { // We validate pattern when it's added to / updated in the metadata if matches := fnmatch.Match(pattern, target, 0); matches { return true } } return false } // GetPrincipalIDs returns the identifiers of the principals that are listed as // trusted by the rule. func (d *Delegation) GetPrincipalIDs() *set.Set[string] { return d.Role.KeyIDs } // GetThreshold returns the threshold of principals that must approve to meet // the rule. func (d *Delegation) GetThreshold() int { return d.Role.Threshold } // IsLastTrustedInRuleFile indicates that subsequent rules in the rule file are // not to be trusted if the current rule matches the namespace under // verification (similar to TUF's terminating behavior). However, the current // rule's delegated rules as well as other rules already in the queue are // trusted. func (d *Delegation) IsLastTrustedInRuleFile() bool { return d.Terminating } // GetProtectedNamespaces returns the set of namespaces protected by the // delegation. func (d *Delegation) GetProtectedNamespaces() []string { return d.Paths } gittuf-0.9.0/internal/tuf/v01/targets_test.go000066400000000000000000000316531475150141000211070ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 import ( "fmt" "testing" "time" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/gittuf/gittuf/internal/tuf" "github.com/stretchr/testify/assert" ) func TestTargetsMetadataAndDelegations(t *testing.T) { targetsMetadata := NewTargetsMetadata() t.Run("test SetExpires", func(t *testing.T) { d := time.Date(1995, time.October, 26, 9, 0, 0, 0, time.UTC) targetsMetadata.SetExpires(d.Format(time.RFC3339)) assert.Equal(t, "1995-10-26T09:00:00Z", targetsMetadata.Expires) }) t.Run("test Validate", func(t *testing.T) { err := targetsMetadata.Validate() assert.Nil(t, err) targetsMetadata.Targets = map[string]any{"test": true} err = targetsMetadata.Validate() assert.ErrorIs(t, err, ErrTargetsNotEmpty) targetsMetadata.Targets = nil }) key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("test addKey", func(t *testing.T) { delegations := &Delegations{} assert.Nil(t, delegations.Keys) err := delegations.addKey(key) assert.Nil(t, err) assert.Equal(t, key, delegations.Keys[key.KeyID]) }) t.Run("test removeKey", func(t *testing.T) { delegations := &Delegations{} err := delegations.addKey(key) assert.Nil(t, err) assert.Equal(t, key, delegations.Keys[key.KeyID]) err = delegations.removeKey(key.KeyID) assert.Nil(t, err) assert.Empty(t, delegations.Keys) }) } func TestDelegation(t *testing.T) { t.Run("matches", func(t *testing.T) { tests := map[string]struct { patterns []string target string expected bool }{ "full path, matches": { patterns: []string{"foo"}, target: "foo", expected: true, }, "artifact in directory, matches": { patterns: []string{"foo/*"}, target: "foo/bar", expected: true, }, "artifact in directory, does not match": { patterns: []string{"foo/*.txt"}, target: "foo/bar.tgz", expected: false, }, "artifact in directory, one pattern matches": { patterns: []string{"foo/*.txt", "foo/*.tgz"}, target: "foo/bar.tgz", expected: true, }, "artifact in subdirectory, matches": { patterns: []string{"foo/*"}, target: "foo/bar/foobar", expected: true, }, "artifact in subdirectory with specified extension, matches": { patterns: []string{"foo/*.tgz"}, target: "foo/bar/foobar.tgz", expected: true, }, "pattern with single character selector, matches": { patterns: []string{"foo/?.tgz"}, target: "foo/a.tgz", expected: true, }, "pattern with character sequence, matches": { patterns: []string{"foo/[abc].tgz"}, target: "foo/a.tgz", expected: true, }, "pattern with character sequence, does not match": { patterns: []string{"foo/[abc].tgz"}, target: "foo/x.tgz", expected: false, }, "pattern with negative character sequence, matches": { patterns: []string{"foo/[!abc].tgz"}, target: "foo/x.tgz", expected: true, }, "pattern with negative character sequence, does not match": { patterns: []string{"foo/[!abc].tgz"}, target: "foo/a.tgz", expected: false, }, "artifact in arbitrary directory, matches": { patterns: []string{"*/*.txt"}, target: "foo/bar/foobar.txt", expected: true, }, "artifact with specific name in arbitrary directory, matches": { patterns: []string{"*/foobar.txt"}, target: "foo/bar/foobar.txt", expected: true, }, "artifact with arbitrary subdirectories, matches": { patterns: []string{"foo/*/foobar.txt"}, target: "foo/bar/baz/foobar.txt", expected: true, }, "artifact in arbitrary directory, does not match": { patterns: []string{"*.txt"}, target: "foo/bar/foobar.txtfile", expected: false, }, "arbitrary directory, does not match": { patterns: []string{"*_test"}, target: "foo/bar_test/foobar", expected: false, }, "no patterns": { patterns: nil, target: "foo", expected: false, }, "pattern with multiple consecutive wildcards, matches": { patterns: []string{"foo/*/*/*.txt"}, target: "foo/bar/baz/qux.txt", expected: true, }, "pattern with multiple non-consecutive wildcards, matches": { patterns: []string{"foo/*/baz/*.txt"}, target: "foo/bar/baz/qux.txt", expected: true, }, "pattern with gittuf git prefix, matches": { patterns: []string{"git:refs/heads/*"}, target: "git:refs/heads/main", expected: true, }, "pattern with gittuf file prefix for all recursive contents, matches": { patterns: []string{"file:src/signatures/*"}, target: "file:src/signatures/rsa/rsa.go", expected: true, }, } for name, test := range tests { delegation := Delegation{Paths: test.patterns} got := delegation.Matches(test.target) assert.Equal(t, test.expected, got, fmt.Sprintf("unexpected result in test '%s'", name)) } }) t.Run("threshold", func(t *testing.T) { delegation := &Delegation{} threshold := delegation.GetThreshold() assert.Equal(t, 0, threshold) delegation.Threshold = 1 threshold = delegation.GetThreshold() assert.Equal(t, 1, threshold) }) t.Run("terminating", func(t *testing.T) { delegation := &Delegation{} isTerminating := delegation.IsLastTrustedInRuleFile() assert.False(t, isTerminating) delegation.Terminating = true isTerminating = delegation.IsLastTrustedInRuleFile() assert.True(t, isTerminating) }) t.Run("protected namespaces", func(t *testing.T) { delegation := &Delegation{ Paths: []string{"1", "2"}, } protected := delegation.GetProtectedNamespaces() assert.Equal(t, []string{"1", "2"}, protected) }) t.Run("principal IDs", func(t *testing.T) { keyIDs := set.NewSetFromItems("1", "2") delegation := &Delegation{ Role: Role{KeyIDs: keyIDs}, } principalIDs := delegation.GetPrincipalIDs() assert.Equal(t, keyIDs, principalIDs) }) } func TestAddRuleAndGetRules(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("test-rule", []string{key1.KeyID, key2.KeyID}, []string{"test/"}, 1) assert.Nil(t, err) assert.Contains(t, targetsMetadata.Delegations.Keys, key1.KeyID) assert.Equal(t, key1, targetsMetadata.Delegations.Keys[key1.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Keys, key2.KeyID) assert.Equal(t, key2, targetsMetadata.Delegations.Keys[key2.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) rule := &Delegation{ Name: "test-rule", Paths: []string{"test/"}, Terminating: false, Role: Role{KeyIDs: set.NewSetFromItems(key1.KeyID, key2.KeyID), Threshold: 1}, } assert.Equal(t, rule, targetsMetadata.Delegations.Roles[0]) rules := targetsMetadata.GetRules() assert.Equal(t, 2, len(rules)) assert.Equal(t, []tuf.Rule{rule, AllowRule()}, rules) } func TestUpdateDelegation(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("test-rule", []string{key1.KeyID}, []string{"test/"}, 1) if err != nil { t.Fatal(err) } assert.Contains(t, targetsMetadata.Delegations.Keys, key1.KeyID) assert.Equal(t, key1, targetsMetadata.Delegations.Keys[key1.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) assert.Equal(t, &Delegation{ Name: "test-rule", Paths: []string{"test/"}, Terminating: false, Role: Role{KeyIDs: set.NewSetFromItems(key1.KeyID), Threshold: 1}, }, targetsMetadata.Delegations.Roles[0]) if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } err = targetsMetadata.UpdateRule("test-rule", []string{key1.KeyID, key2.KeyID}, []string{"test/"}, 1) assert.Nil(t, err) assert.Contains(t, targetsMetadata.Delegations.Keys, key1.KeyID) assert.Equal(t, key1, targetsMetadata.Delegations.Keys[key1.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Keys, key2.KeyID) assert.Equal(t, key2, targetsMetadata.Delegations.Keys[key2.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) assert.Equal(t, &Delegation{ Name: "test-rule", Paths: []string{"test/"}, Terminating: false, Role: Role{KeyIDs: set.NewSetFromItems(key1.KeyID, key2.KeyID), Threshold: 1}, }, targetsMetadata.Delegations.Roles[0]) } func TestReorderRules(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("rule-1", []string{key1.KeyID}, []string{"path1/"}, 1) if err != nil { t.Fatal(err) } err = targetsMetadata.AddRule("rule-2", []string{key2.KeyID}, []string{"path2/"}, 1) if err != nil { t.Fatal(err) } err = targetsMetadata.AddRule("rule-3", []string{key1.KeyID, key2.KeyID}, []string{"path3/"}, 1) if err != nil { t.Fatal(err) } tests := map[string]struct { ruleNames []string expected []string expectedError error }{ "reverse order (valid input)": { ruleNames: []string{"rule-3", "rule-2", "rule-1"}, expected: []string{"rule-3", "rule-2", "rule-1", tuf.AllowRuleName}, expectedError: nil, }, "rule not specified in new order": { ruleNames: []string{"rule-3", "rule-2"}, expectedError: tuf.ErrMissingRules, }, "rule repeated in the new order": { ruleNames: []string{"rule-3", "rule-2", "rule-1", "rule-3"}, expectedError: tuf.ErrDuplicatedRuleName, }, "unknown rule in the new order": { ruleNames: []string{"rule-3", "rule-2", "rule-1", "rule-4"}, expectedError: tuf.ErrRuleNotFound, }, "unknown rule in the new order (with correct length)": { ruleNames: []string{"rule-3", "rule-2", "rule-4"}, expectedError: tuf.ErrRuleNotFound, }, "allow rule appears in the new order": { ruleNames: []string{"rule-2", "rule-3", "rule-1", tuf.AllowRuleName}, expectedError: tuf.ErrCannotManipulateRulesWithGittufPrefix, }, } for name, test := range tests { err = targetsMetadata.ReorderRules(test.ruleNames) if test.expectedError != nil { assert.ErrorIs(t, err, test.expectedError, fmt.Sprintf("unexpected error in test '%s'", name)) } else { assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) assert.Equal(t, len(test.expected), len(targetsMetadata.Delegations.Roles), fmt.Sprintf("expected %d rules in test '%s', but got %d rules", len(test.expected), name, len(targetsMetadata.Delegations.Roles))) for i, ruleName := range test.expected { assert.Equal(t, ruleName, targetsMetadata.Delegations.Roles[i].Name, fmt.Sprintf("expected rule '%s' at index %d in test '%s', but got '%s'", ruleName, i, name, targetsMetadata.Delegations.Roles[i].Name)) } } } } func TestRemoveRule(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("test-rule", []string{key.KeyID}, []string{"test/"}, 1) if err != nil { t.Fatal(err) } assert.Equal(t, 2, len(targetsMetadata.Delegations.Roles)) err = targetsMetadata.RemoveRule("test-rule") assert.Nil(t, err) assert.Equal(t, 1, len(targetsMetadata.Delegations.Roles)) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) assert.Contains(t, targetsMetadata.Delegations.Keys, key.KeyID) } func TestGetPrincipals(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } principals := targetsMetadata.GetPrincipals() assert.Equal(t, map[string]tuf.Principal{key1.KeyID: key1}, principals) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } principals = targetsMetadata.GetPrincipals() assert.Equal(t, map[string]tuf.Principal{key1.KeyID: key1, key2.KeyID: key2}, principals) } func TestAllowRule(t *testing.T) { allowRule := AllowRule() assert.Equal(t, tuf.AllowRuleName, allowRule.Name) assert.Equal(t, []string{"*"}, allowRule.Paths) assert.True(t, allowRule.Terminating) assert.Empty(t, allowRule.KeyIDs) assert.Equal(t, 1, allowRule.Threshold) } gittuf-0.9.0/internal/tuf/v01/tuf.go000066400000000000000000000032221475150141000171640ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v01 // This package defines gittuf's take on TUF metadata. There are some minor // changes, such as the addition of `custom` to delegation entries. Some of it, // however, is inspired by or cloned from the go-tuf implementation. import ( "errors" "github.com/gittuf/gittuf/internal/common/set" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) var ErrTargetsNotEmpty = errors.New("`targets` field in gittuf Targets metadata must be empty") // Key defines the structure for how public keys are stored in TUF metadata. It // implements the tuf.Principal and is used for backwards compatibility where a // Principal is always represented directly by a signing key or identity. type Key signerverifier.SSLibKey // NewKeyFromSSLibKey converts the signerverifier.SSLibKey into a Key object. func NewKeyFromSSLibKey(key *signerverifier.SSLibKey) *Key { k := Key(*key) return &k } // ID implements the key's identifier. It implements the Principal interface. func (k *Key) ID() string { return k.KeyID } // Keys returns the set of keys (using the signerverifier.SSLibKey definition) // associated with the principal. func (k *Key) Keys() []*signerverifier.SSLibKey { key := signerverifier.SSLibKey(*k) return []*signerverifier.SSLibKey{&key} } func (k *Key) CustomMetadata() map[string]string { // Key does not support custom metadata return nil } // Role records common characteristics recorded in a role entry in Root metadata // and in a delegation entry. type Role struct { KeyIDs *set.Set[string] `json:"keyids"` Threshold int `json:"threshold"` } gittuf-0.9.0/internal/tuf/v02/000077500000000000000000000000001475150141000160415ustar00rootroot00000000000000gittuf-0.9.0/internal/tuf/v02/helpers_test.go000066400000000000000000000023621475150141000210740ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "testing" "time" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" "github.com/gittuf/gittuf/internal/tuf" ) var ( rootPubKeyBytes = artifacts.SSHRSAPublicSSH targets1PubKeyBytes = artifacts.SSHECDSAPublicSSH targets2PubKeyBytes = artifacts.SSHED25519PublicSSH ) func initialTestRootMetadata(t *testing.T) *RootMetadata { t.Helper() rootKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) rootMetadata := NewRootMetadata() rootMetadata.SetExpires(time.Now().AddDate(1, 0, 0).Format(time.RFC3339)) if err := rootMetadata.addPrincipal(rootKey); err != nil { t.Fatal(err) } rootMetadata.addRole(tuf.RootRoleName, Role{ PrincipalIDs: set.NewSetFromItems(rootKey.KeyID), Threshold: 1, }) return rootMetadata } func initialTestTargetsMetadata(t *testing.T) *TargetsMetadata { t.Helper() targetsMetadata := NewTargetsMetadata() targetsMetadata.SetExpires(time.Now().AddDate(1, 0, 0).Format(time.RFC3339)) targetsMetadata.Delegations = &Delegations{Roles: []*Delegation{AllowRule()}} return targetsMetadata } gittuf-0.9.0/internal/tuf/v02/root.go000066400000000000000000000561071475150141000173640ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "encoding/json" "fmt" "path" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/tuf" tufv01 "github.com/gittuf/gittuf/internal/tuf/v01" ) const ( RootVersion = "https://gittuf.dev/policy/root/v0.2" ) // RootMetadata defines the schema of TUF's Root role. type RootMetadata struct { Type string `json:"type"` Version string `json:"schemaVersion"` Expires string `json:"expires"` RepositoryLocation string `json:"repositoryLocation,omitempty"` Principals map[string]tuf.Principal `json:"principals"` Roles map[string]Role `json:"roles"` GitHubApprovalsTrusted bool `json:"githubApprovalsTrusted"` GlobalRules []tuf.GlobalRule `json:"globalRules,omitempty"` Propagations []tuf.PropagationDirective `json:"propagations,omitempty"` MultiRepository *MultiRepository `json:"multiRepository,omitempty"` } // NewRootMetadata returns a new instance of RootMetadata. func NewRootMetadata() *RootMetadata { return &RootMetadata{ Type: "root", Version: RootVersion, } } // SetExpires sets the expiry date of the RootMetadata to the value passed in. func (r *RootMetadata) SetExpires(expires string) { r.Expires = expires } // SchemaVersion returns the metadata schema version. func (r *RootMetadata) SchemaVersion() string { return r.Version } // GetRepositoryLocation returns the canonical location of the Git repository. func (r *RootMetadata) GetRepositoryLocation() string { return r.RepositoryLocation } // SetRepositoryLocation sets the specified repository location in the root // metadata. func (r *RootMetadata) SetRepositoryLocation(location string) { r.RepositoryLocation = location } // AddRootPrincipal adds the specified principal to the root metadata and // authorizes the principal for the root role. func (r *RootMetadata) AddRootPrincipal(principal tuf.Principal) error { if principal == nil { return tuf.ErrInvalidPrincipalType } // Add principal to metadata if err := r.addPrincipal(principal); err != nil { return err } rootRole, ok := r.Roles[tuf.RootRoleName] if !ok { // Create a new root role entry with this principal r.addRole(tuf.RootRoleName, Role{ PrincipalIDs: set.NewSetFromItems(principal.ID()), Threshold: 1, }) return nil } // Add principal ID to the root role if it's not already in it rootRole.PrincipalIDs.Add(principal.ID()) r.Roles[tuf.RootRoleName] = rootRole return nil } // DeleteRootPrincipal removes principalID from the list of trusted Root // principals in rootMetadata. It does not remove the principal entry itself as // it does not check if other roles can be verified using the same principal. func (r *RootMetadata) DeleteRootPrincipal(principalID string) error { rootRole, has := r.Roles[tuf.RootRoleName] if !has { return tuf.ErrInvalidRootMetadata } if rootRole.PrincipalIDs.Len() <= rootRole.Threshold { return tuf.ErrCannotMeetThreshold } rootRole.PrincipalIDs.Remove(principalID) r.Roles[tuf.RootRoleName] = rootRole return nil } // AddPrimaryRuleFilePrincipal adds the 'principal' as a trusted signer in // 'rootMetadata' for the top level Targets role. func (r *RootMetadata) AddPrimaryRuleFilePrincipal(principal tuf.Principal) error { if principal == nil { return tuf.ErrInvalidPrincipalType } // Add principal to the metadata file if err := r.addPrincipal(principal); err != nil { return err } targetsRole, ok := r.Roles[tuf.TargetsRoleName] if !ok { // Create a new targets role entry with this principal r.addRole(tuf.TargetsRoleName, Role{ PrincipalIDs: set.NewSetFromItems(principal.ID()), Threshold: 1, }) return nil } targetsRole.PrincipalIDs.Add(principal.ID()) r.Roles[tuf.TargetsRoleName] = targetsRole return nil } // DeletePrimaryRuleFilePrincipal removes the principal matching 'principalID' // from trusted principals for top level Targets role in 'rootMetadata'. Note: // It doesn't remove the principal entry itself as it doesn't check if other // roles can use the same principal. func (r *RootMetadata) DeletePrimaryRuleFilePrincipal(principalID string) error { if principalID == "" { return tuf.ErrInvalidPrincipalID } targetsRole, ok := r.Roles[tuf.TargetsRoleName] if !ok { return tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } if targetsRole.PrincipalIDs.Len() <= targetsRole.Threshold { return tuf.ErrCannotMeetThreshold } targetsRole.PrincipalIDs.Remove(principalID) r.Roles[tuf.TargetsRoleName] = targetsRole return nil } // AddGitHubAppPrincipal adds the 'principal' as a trusted principal in // 'rootMetadata' for the special GitHub app role. This key is used to verify // GitHub pull request approval attestation signatures. func (r *RootMetadata) AddGitHubAppPrincipal(name string, principal tuf.Principal) error { if principal == nil { return tuf.ErrInvalidPrincipalType } // TODO: support multiple principals / threshold for app if err := r.addPrincipal(principal); err != nil { return err } role := Role{ PrincipalIDs: set.NewSetFromItems(principal.ID()), Threshold: 1, } r.addRole(name, role) // AddRole replaces the specified role if it already exists return nil } // DeleteGitHubAppPrincipal removes the special GitHub app role from the root // metadata. func (r *RootMetadata) DeleteGitHubAppPrincipal(name string) { // TODO: support multiple principals / threshold for app delete(r.Roles, name) } // EnableGitHubAppApprovals sets GitHubApprovalsTrusted to true in the // root metadata. func (r *RootMetadata) EnableGitHubAppApprovals() { r.GitHubApprovalsTrusted = true } // DisableGitHubAppApprovals sets GitHubApprovalsTrusted to false in the root // metadata. func (r *RootMetadata) DisableGitHubAppApprovals() { r.GitHubApprovalsTrusted = false } // UpdateRootThreshold sets the threshold for the Root role. func (r *RootMetadata) UpdateRootThreshold(threshold int) error { rootRole, ok := r.Roles[tuf.RootRoleName] if !ok { return tuf.ErrInvalidRootMetadata } if rootRole.PrincipalIDs.Len() < threshold { return tuf.ErrCannotMeetThreshold } rootRole.Threshold = threshold r.Roles[tuf.RootRoleName] = rootRole return nil } // UpdatePrimaryRuleFileThreshold sets the threshold for the top level Targets // role. func (r *RootMetadata) UpdatePrimaryRuleFileThreshold(threshold int) error { targetsRole, ok := r.Roles[tuf.TargetsRoleName] if !ok { return tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } if targetsRole.PrincipalIDs.Len() < threshold { return tuf.ErrCannotMeetThreshold } targetsRole.Threshold = threshold r.Roles[tuf.TargetsRoleName] = targetsRole return nil } // GetPrincipals returns all the principals in the root metadata. func (r *RootMetadata) GetPrincipals() map[string]tuf.Principal { return r.Principals } // GetRootThreshold returns the threshold of principals that must sign the root // of trust metadata. func (r *RootMetadata) GetRootThreshold() (int, error) { role, hasRole := r.Roles[tuf.RootRoleName] if !hasRole { return -1, tuf.ErrInvalidRootMetadata } return role.Threshold, nil } // GetRootPrincipals returns the principals trusted for the root of trust // metadata. func (r *RootMetadata) GetRootPrincipals() ([]tuf.Principal, error) { role, hasRole := r.Roles[tuf.RootRoleName] if !hasRole { return nil, tuf.ErrInvalidRootMetadata } principals := make([]tuf.Principal, 0, role.PrincipalIDs.Len()) for _, id := range role.PrincipalIDs.Contents() { principals = append(principals, r.Principals[id]) } return principals, nil } // GetPrimaryRuleFileThreshold returns the threshold of principals that must // sign the primary rule file. func (r *RootMetadata) GetPrimaryRuleFileThreshold() (int, error) { role, hasRole := r.Roles[tuf.TargetsRoleName] if !hasRole { return -1, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } return role.Threshold, nil } // GetPrimaryRuleFilePrincipals returns the principals trusted for the primary // rule file. func (r *RootMetadata) GetPrimaryRuleFilePrincipals() ([]tuf.Principal, error) { role, hasRole := r.Roles[tuf.TargetsRoleName] if !hasRole { return nil, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot } principals := make([]tuf.Principal, 0, role.PrincipalIDs.Len()) for _, id := range role.PrincipalIDs.Contents() { principals = append(principals, r.Principals[id]) } return principals, nil } // IsGitHubAppApprovalTrusted indicates if the GitHub app is trusted. // // TODO: this needs to be generalized across tools func (r *RootMetadata) IsGitHubAppApprovalTrusted() bool { return r.GitHubApprovalsTrusted } // GetGitHubAppPrincipals returns the principals trusted for the GitHub app // attestations. // // TODO: this needs to be generalized across tools func (r *RootMetadata) GetGitHubAppPrincipals() ([]tuf.Principal, error) { role, hasRole := r.Roles[tuf.GitHubAppRoleName] if !hasRole { return nil, tuf.ErrGitHubAppInformationNotFoundInRoot } principals := make([]tuf.Principal, 0, role.PrincipalIDs.Len()) for _, id := range role.PrincipalIDs.Contents() { principals = append(principals, r.Principals[id]) } return principals, nil } func (r *RootMetadata) UnmarshalJSON(data []byte) error { // this type _has_ to be a copy of RootMetadata, minus the use of // json.RawMessage in place of tuf interfaces type tempType struct { Type string `json:"type"` Version string `json:"schemaVersion"` Expires string `json:"expires"` RepositoryLocation string `json:"repositoryLocation,omitempty"` Principals map[string]json.RawMessage `json:"principals"` Roles map[string]Role `json:"roles"` GitHubApprovalsTrusted bool `json:"githubApprovalsTrusted"` GlobalRules []json.RawMessage `json:"globalRules,omitempty"` Propagations []json.RawMessage `json:"propagations,omitempty"` MultiRepository *MultiRepository `json:"multiRepository,omitempty"` } temp := &tempType{} if err := json.Unmarshal(data, &temp); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } r.Type = temp.Type r.Version = temp.Version r.Expires = temp.Expires r.RepositoryLocation = temp.RepositoryLocation r.Principals = make(map[string]tuf.Principal) for principalID, principalBytes := range temp.Principals { tempPrincipal := map[string]any{} if err := json.Unmarshal(principalBytes, &tempPrincipal); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } if _, has := tempPrincipal["keyid"]; has { // this is *Key key := &Key{} if err := json.Unmarshal(principalBytes, key); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } r.Principals[principalID] = key continue } if _, has := tempPrincipal["personID"]; has { // this is *Person person := &Person{} if err := json.Unmarshal(principalBytes, person); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } r.Principals[principalID] = person continue } return fmt.Errorf("unrecognized principal type '%s'", string(principalBytes)) } r.Roles = temp.Roles r.GitHubApprovalsTrusted = temp.GitHubApprovalsTrusted r.GlobalRules = []tuf.GlobalRule{} for _, globalRuleBytes := range temp.GlobalRules { tempGlobalRule := map[string]any{} if err := json.Unmarshal(globalRuleBytes, &tempGlobalRule); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } switch tempGlobalRule["type"] { case tuf.GlobalRuleThresholdType: globalRule := &GlobalRuleThreshold{} if err := json.Unmarshal(globalRuleBytes, globalRule); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } r.GlobalRules = append(r.GlobalRules, globalRule) case tuf.GlobalRuleBlockForcePushesType: globalRule := &GlobalRuleBlockForcePushes{} if err := json.Unmarshal(globalRuleBytes, globalRule); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } r.GlobalRules = append(r.GlobalRules, globalRule) default: return tuf.ErrUnknownGlobalRuleType } } r.Propagations = []tuf.PropagationDirective{} for _, propagationDirectiveBytes := range temp.Propagations { propagationDirective := &PropagationDirective{} if err := json.Unmarshal(propagationDirectiveBytes, propagationDirective); err != nil { return fmt.Errorf("unable to unmarshal json for propagation directive: %w", err) } r.Propagations = append(r.Propagations, propagationDirective) } r.MultiRepository = temp.MultiRepository return nil } // AddGlobalRule adds a new global rule to RootMetadata. func (r *RootMetadata) AddGlobalRule(globalRule tuf.GlobalRule) error { allGlobalRules := r.GlobalRules if allGlobalRules == nil { allGlobalRules = []tuf.GlobalRule{} } // check for duplicates for _, rule := range allGlobalRules { if rule.GetName() == globalRule.GetName() { return tuf.ErrGlobalRuleAlreadyExists } } allGlobalRules = append(allGlobalRules, globalRule) r.GlobalRules = allGlobalRules return nil } // DeleteGlobalRule removes the specified global rule from the RootMetadata. func (r *RootMetadata) DeleteGlobalRule(ruleName string) error { allGlobalRules := r.GlobalRules updatedGlobalRules := []tuf.GlobalRule{} if len(allGlobalRules) == 0 { return tuf.ErrGlobalRuleNotFound } for _, rule := range allGlobalRules { if rule.GetName() != ruleName { updatedGlobalRules = append(updatedGlobalRules, rule) } } r.GlobalRules = updatedGlobalRules return nil } // GetGlobalRules returns all the global rules in the root metadata. func (r *RootMetadata) GetGlobalRules() []tuf.GlobalRule { return r.GlobalRules } // AddPropagationDirective adds a propagation directive to the root metadata. func (r *RootMetadata) AddPropagationDirective(directive tuf.PropagationDirective) error { // TODO: handle duplicates / updates r.Propagations = append(r.Propagations, directive) return nil } // GetPropagationDirectives returns the propagation directives found in the root // metadata. func (r *RootMetadata) GetPropagationDirectives() []tuf.PropagationDirective { return r.Propagations } // DeletePropagationDirective removes a propagation directive from the root // metadata. func (r *RootMetadata) DeletePropagationDirective(name string) error { index := -1 for i, directive := range r.Propagations { if directive.GetName() == name { index = i break } } if index == -1 { return tuf.ErrPropagationDirectiveNotFound } r.Propagations = append(r.Propagations[:index], r.Propagations[index+1:]...) return nil } // IsController indicates if the repository serves as the controller for a // multi-repository gittuf network. func (r *RootMetadata) IsController() bool { if r.MultiRepository == nil { return false } return r.MultiRepository.IsController() } // EnableController marks the current repository as a controller repository. func (r *RootMetadata) EnableController() error { if r.MultiRepository == nil { r.MultiRepository = &MultiRepository{} } r.MultiRepository.Controller = true return nil // TODO: what if it's already a controller? noop? } // DisableController marks the current repository as not-a-controller. func (r *RootMetadata) DisableController() error { if r.MultiRepository == nil { // nothing to do return nil } r.MultiRepository.Controller = false // TODO: should we remove the network repository entries? return nil } // AddControllerRepository adds the specified repository as a controller for the // current repository. func (r *RootMetadata) AddControllerRepository(name, location string, initialRootPrincipals []tuf.Principal) error { if r.MultiRepository == nil { r.MultiRepository = &MultiRepository{ControllerRepositories: []*OtherRepository{}} } // TODO: check for duplicates otherRepository := &OtherRepository{ Name: name, Location: location, InitialRootPrincipals: make([]tuf.Principal, 0, len(initialRootPrincipals)), } for _, principal := range initialRootPrincipals { switch principal := principal.(type) { case *Key: otherRepository.InitialRootPrincipals = append(otherRepository.InitialRootPrincipals, principal) case *Person: otherRepository.InitialRootPrincipals = append(otherRepository.InitialRootPrincipals, principal) default: return tuf.ErrInvalidPrincipalType } } r.MultiRepository.ControllerRepositories = append(r.MultiRepository.ControllerRepositories, otherRepository) // Add the controller as a repository whose policy contents must be // propagated into this repository propagationName := fmt.Sprintf("%s-%s", tuf.GittufControllerPrefix, name) propagationLocation := path.Join(tuf.GittufControllerPrefix, name) return r.AddPropagationDirective(NewPropagationDirective(propagationName, location, "refs/gittuf/policy", "refs/gittuf/policy", propagationLocation)) } // AddNetworkRepository adds the specified repository as part of the network for // which the current repository is a controller. The current repository must be // marked as a controller before this can be used. func (r *RootMetadata) AddNetworkRepository(name, location string, initialRootPrincipals []tuf.Principal) error { if r.MultiRepository == nil || !r.MultiRepository.Controller { // EnableController must be called first return tuf.ErrNotAControllerRepository } if r.MultiRepository.NetworkRepositories == nil { r.MultiRepository.NetworkRepositories = []*OtherRepository{} } // TODO: check for duplicates otherRepository := &OtherRepository{ Name: name, Location: location, InitialRootPrincipals: make([]tuf.Principal, 0, len(initialRootPrincipals)), } for _, principal := range initialRootPrincipals { switch principal := principal.(type) { case *Key: otherRepository.InitialRootPrincipals = append(otherRepository.InitialRootPrincipals, principal) case *Person: otherRepository.InitialRootPrincipals = append(otherRepository.InitialRootPrincipals, principal) default: return tuf.ErrInvalidPrincipalType } } r.MultiRepository.NetworkRepositories = append(r.MultiRepository.NetworkRepositories, otherRepository) return nil } // GetControllerRepositories returns the repositories that serve as the // controllers for the networks the current repository is a part of. func (r *RootMetadata) GetControllerRepositories() []tuf.OtherRepository { if r.MultiRepository == nil { return nil } return r.MultiRepository.GetControllerRepositories() } // GetNetworkRepositories returns the repositories that are part of the network // for which the current repository is a controller. IsController must return // true for this to be set. func (r *RootMetadata) GetNetworkRepositories() []tuf.OtherRepository { if r.MultiRepository == nil { return nil } return r.MultiRepository.GetNetworkRepositories() } // addPrincipal adds a principal to the RootMetadata instance. v02 of the // metadata supports Key and Person as supported principal types. func (r *RootMetadata) addPrincipal(principal tuf.Principal) error { if r.Principals == nil { r.Principals = map[string]tuf.Principal{} } switch principal := principal.(type) { case *Key, *Person: r.Principals[principal.ID()] = principal default: return tuf.ErrInvalidPrincipalType } return nil } // addRole adds a role object and associates it with roleName in the // RootMetadata instance. func (r *RootMetadata) addRole(roleName string, role Role) { if r.Roles == nil { r.Roles = map[string]Role{} } r.Roles[roleName] = role } type GlobalRuleThreshold = tufv01.GlobalRuleThreshold type GlobalRuleBlockForcePushes = tufv01.GlobalRuleBlockForcePushes var NewGlobalRuleThreshold = tufv01.NewGlobalRuleThreshold var NewGlobalRuleBlockForcePushes = tufv01.NewGlobalRuleBlockForcePushes type PropagationDirective = tufv01.PropagationDirective func NewPropagationDirective(name, upstreamRepository, upstreamReference, downstreamReference, downstreamPath string) tuf.PropagationDirective { return &PropagationDirective{ Name: name, UpstreamRepository: upstreamRepository, UpstreamReference: upstreamReference, DownstreamReference: downstreamReference, DownstreamPath: downstreamPath, } } type MultiRepository struct { Controller bool `json:"controller"` ControllerRepositories []*OtherRepository `json:"controllerRepositories,omitempty"` NetworkRepositories []*OtherRepository `json:"networkRepositories,omitempty"` } func (m *MultiRepository) IsController() bool { return m.Controller } func (m *MultiRepository) GetControllerRepositories() []tuf.OtherRepository { controllerRepositories := []tuf.OtherRepository{} for _, repository := range m.ControllerRepositories { controllerRepositories = append(controllerRepositories, repository) } return controllerRepositories } func (m *MultiRepository) GetNetworkRepositories() []tuf.OtherRepository { if !m.Controller { return nil } networkRepositories := []tuf.OtherRepository{} for _, repository := range m.NetworkRepositories { networkRepositories = append(networkRepositories, repository) } return networkRepositories } type OtherRepository struct { Name string `json:"name"` Location string `json:"location"` InitialRootPrincipals []tuf.Principal `json:"initialRootPrincipals"` } func (o *OtherRepository) GetName() string { return o.Name } func (o *OtherRepository) GetLocation() string { return o.Location } func (o *OtherRepository) GetInitialRootPrincipals() []tuf.Principal { return o.InitialRootPrincipals } func (o *OtherRepository) UnmarshalJSON(data []byte) error { type tempType struct { Name string `json:"name"` Location string `json:"location"` InitialRootPrincipals []json.RawMessage `json:"initialRootPrincipals"` } temp := &tempType{} if err := json.Unmarshal(data, &temp); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } o.Name = temp.Name o.Location = temp.Location o.InitialRootPrincipals = make([]tuf.Principal, 0, len(temp.InitialRootPrincipals)) for _, principalBytes := range temp.InitialRootPrincipals { tempPrincipal := map[string]any{} if err := json.Unmarshal(principalBytes, &tempPrincipal); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } if _, has := tempPrincipal["keyid"]; has { // this is *Key key := &Key{} if err := json.Unmarshal(principalBytes, key); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } o.InitialRootPrincipals = append(o.InitialRootPrincipals, key) continue } if _, has := tempPrincipal["personID"]; has { // this is *Person person := &Person{} if err := json.Unmarshal(principalBytes, person); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } o.InitialRootPrincipals = append(o.InitialRootPrincipals, person) continue } return fmt.Errorf("unrecognized principal type '%s'", string(principalBytes)) } return nil } gittuf-0.9.0/internal/tuf/v02/root_test.go000066400000000000000000000504351475150141000204210ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "context" "encoding/json" "os" "path/filepath" "sort" "testing" "time" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/dsse" "github.com/gittuf/gittuf/internal/signerverifier/ssh" artifacts "github.com/gittuf/gittuf/internal/testartifacts" sslibdsse "github.com/gittuf/gittuf/internal/third_party/go-securesystemslib/dsse" "github.com/gittuf/gittuf/internal/tuf" "github.com/stretchr/testify/assert" ) func TestRootMetadata(t *testing.T) { rootMetadata := NewRootMetadata() key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) err := rootMetadata.addPrincipal(key) assert.Nil(t, err) assert.Equal(t, key, rootMetadata.Principals[key.KeyID]) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{key.KeyID: key}, } err = rootMetadata.addPrincipal(person) assert.Nil(t, err) assert.Equal(t, person, rootMetadata.Principals[person.PersonID]) t.Run("test SetExpires", func(t *testing.T) { d := time.Date(1995, time.October, 26, 9, 0, 0, 0, time.UTC) rootMetadata.SetExpires(d.Format(time.RFC3339)) assert.Equal(t, "1995-10-26T09:00:00Z", rootMetadata.Expires) }) t.Run("test addRole", func(t *testing.T) { rootMetadata.addRole("targets", Role{ PrincipalIDs: set.NewSetFromItems(key.KeyID), Threshold: 1, }) assert.True(t, rootMetadata.Roles["targets"].PrincipalIDs.Has(key.KeyID)) }) t.Run("test SchemaVersion", func(t *testing.T) { schemaVersion := rootMetadata.SchemaVersion() assert.Equal(t, RootVersion, schemaVersion) }) t.Run("test GetPrincipals", func(t *testing.T) { expectedPrincipals := map[string]tuf.Principal{ key.KeyID: key, person.PersonID: person, } principals := rootMetadata.GetPrincipals() assert.Equal(t, expectedPrincipals, principals) }) t.Run("test rootLocation", func(t *testing.T) { currentLocation := rootMetadata.GetRepositoryLocation() assert.Equal(t, "", currentLocation) location := "https://example.com/repository/location" rootMetadata.SetRepositoryLocation(location) currentLocation = rootMetadata.GetRepositoryLocation() assert.Equal(t, location, currentLocation) }) t.Run("test propagation directives", func(t *testing.T) { directives := rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) directive := &PropagationDirective{ Name: "test", UpstreamRepository: "https://example.com/git/repository", UpstreamReference: "refs/heads/main", DownstreamReference: "refs/heads/main", DownstreamPath: "upstream/", } err = rootMetadata.AddPropagationDirective(directive) assert.Nil(t, err) directives = rootMetadata.GetPropagationDirectives() assert.Equal(t, 1, len(directives)) assert.Equal(t, directive, directives[0]) err = rootMetadata.DeletePropagationDirective("test") assert.Nil(t, err) directives = rootMetadata.GetPropagationDirectives() assert.Empty(t, directives) err = rootMetadata.DeletePropagationDirective("test") assert.ErrorIs(t, err, tuf.ErrPropagationDirectiveNotFound) }) t.Run("test multi-repository", func(t *testing.T) { isController := rootMetadata.IsController() assert.False(t, isController) name := "test" location := "http://git.example.com/repository" initialRootPrincipals := []tuf.Principal{key, person} err := rootMetadata.AddControllerRepository(name, location, initialRootPrincipals) assert.Nil(t, err) controllerRepositories := rootMetadata.GetControllerRepositories() assert.Equal(t, []tuf.OtherRepository{&OtherRepository{Name: name, Location: location, InitialRootPrincipals: initialRootPrincipals}}, controllerRepositories) propagations := rootMetadata.GetPropagationDirectives() found := false for _, propagation := range propagations { if propagation.GetName() == "gittuf-controller-test" { found = true break } } assert.True(t, found) err = rootMetadata.AddNetworkRepository(name, location, initialRootPrincipals) assert.ErrorIs(t, err, tuf.ErrNotAControllerRepository) err = rootMetadata.EnableController() assert.Nil(t, err) err = rootMetadata.AddNetworkRepository(name, location, initialRootPrincipals) assert.Nil(t, err) networkRepositories := rootMetadata.GetNetworkRepositories() assert.Equal(t, []tuf.OtherRepository{&OtherRepository{Name: name, Location: location, InitialRootPrincipals: initialRootPrincipals}}, networkRepositories) err = rootMetadata.DisableController() assert.Nil(t, err) networkRepositories = rootMetadata.GetNetworkRepositories() assert.Nil(t, networkRepositories) }) } func TestRootMetadataWithSSHKey(t *testing.T) { // Setup test key pair keys := []struct { name string data []byte }{ {"rsa", artifacts.SSHRSAPrivate}, {"rsa.pub", artifacts.SSHRSAPublicSSH}, } tmpDir := t.TempDir() for _, key := range keys { keyPath := filepath.Join(tmpDir, key.name) if err := os.WriteFile(keyPath, key.data, 0o600); err != nil { t.Fatal(err) } } keyPath := filepath.Join(tmpDir, "rsa") sslibKeyO, err := ssh.NewKeyFromFile(keyPath) if err != nil { t.Fatal(err) } sslibKey := NewKeyFromSSLibKey(sslibKeyO) // Create TUF root and add test key rootMetadata := NewRootMetadata() if err := rootMetadata.addPrincipal(sslibKey); err != nil { t.Fatal(err) } // Wrap and and sign ctx := context.Background() env, err := dsse.CreateEnvelope(rootMetadata) if err != nil { t.Fatal(err) } verifier, err := ssh.NewVerifierFromKey(sslibKeyO) if err != nil { t.Fatal() } signer := &ssh.Signer{ Verifier: verifier, Path: keyPath, } env, err = dsse.SignEnvelope(ctx, env, signer) if err != nil { t.Fatal(err) } // Unwrap and verify // NOTE: For the sake of testing the contained key, we unwrap before we // verify. Typically, in DSSE it should be the other way around. payload, err := env.DecodeB64Payload() if err != nil { t.Fatal(err) } rootMetadata2 := &RootMetadata{} if err := json.Unmarshal(payload, rootMetadata2); err != nil { t.Log(string(payload)) t.Fatal(err) } sslibKey2 := rootMetadata2.Principals[sslibKey.KeyID] // NOTE: Typically, a caller would choose this method, if KeyType==ssh.SSHKeyType verifier2, err := ssh.NewVerifierFromKey(sslibKey2.Keys()[0]) if err != nil { t.Fatal(err) } _, err = dsse.VerifyEnvelope(ctx, env, []sslibdsse.Verifier{verifier2}, 1) if err != nil { t.Fatal(err) } } func TestAddRootPrincipal(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("with root role already in metadata", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) newRootKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddRootPrincipal(newRootKey) assert.Nil(t, err) assert.Equal(t, newRootKey, rootMetadata.Principals[newRootKey.KeyID]) assert.Equal(t, set.NewSetFromItems(key.KeyID, newRootKey.KeyID), rootMetadata.Roles[tuf.RootRoleName].PrincipalIDs) }) t.Run("without root role already in metadata", func(t *testing.T) { rootMetadata := NewRootMetadata() err := rootMetadata.AddRootPrincipal(key) assert.Nil(t, err) assert.Equal(t, key, rootMetadata.Principals[key.KeyID]) assert.Equal(t, set.NewSetFromItems(key.KeyID), rootMetadata.Roles[tuf.RootRoleName].PrincipalIDs) }) t.Run("with person", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{ key.KeyID: key, }, } err := rootMetadata.AddRootPrincipal(person) assert.Nil(t, err) assert.Equal(t, person, rootMetadata.Principals[person.PersonID]) assert.Equal(t, set.NewSetFromItems(person.PersonID, key.KeyID), rootMetadata.Roles[tuf.RootRoleName].PrincipalIDs) }) } func TestDeleteRootPrincipal(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) rootMetadata := initialTestRootMetadata(t) newRootKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{ key.KeyID: key, }, } err := rootMetadata.AddRootPrincipal(newRootKey) assert.Nil(t, err) err = rootMetadata.AddRootPrincipal(person) assert.Nil(t, err) err = rootMetadata.DeleteRootPrincipal(newRootKey.KeyID) assert.Nil(t, err) assert.Equal(t, newRootKey, rootMetadata.Principals[newRootKey.KeyID]) assert.Equal(t, set.NewSetFromItems(key.KeyID, person.PersonID), rootMetadata.Roles[tuf.RootRoleName].PrincipalIDs) err = rootMetadata.DeleteRootPrincipal(person.PersonID) assert.Nil(t, err) assert.Equal(t, person, rootMetadata.Principals[person.PersonID]) assert.Equal(t, set.NewSetFromItems(key.KeyID), rootMetadata.Roles[tuf.RootRoleName].PrincipalIDs) err = rootMetadata.DeleteRootPrincipal(key.KeyID) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestAddPrimaryRuleFilePrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) targetsKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddPrimaryRuleFilePrincipal(nil) assert.ErrorIs(t, err, tuf.ErrInvalidPrincipalType) err = rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey) assert.Nil(t, err) assert.Equal(t, targetsKey, rootMetadata.Principals[targetsKey.KeyID]) assert.Equal(t, set.NewSetFromItems(targetsKey.KeyID), rootMetadata.Roles[tuf.TargetsRoleName].PrincipalIDs) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{ targetsKey.KeyID: targetsKey, }, } err = rootMetadata.AddPrimaryRuleFilePrincipal(person) assert.Nil(t, err) assert.Equal(t, person, rootMetadata.Principals[person.PersonID]) assert.Equal(t, set.NewSetFromItems(targetsKey.KeyID, person.PersonID), rootMetadata.Roles[tuf.TargetsRoleName].PrincipalIDs) } func TestDeletePrimaryRuleFilePrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) targetsKey1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) targetsKey2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) err := rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey1) assert.Nil(t, err) err = rootMetadata.AddPrimaryRuleFilePrincipal(targetsKey2) assert.Nil(t, err) err = rootMetadata.DeletePrimaryRuleFilePrincipal("") assert.ErrorIs(t, err, tuf.ErrInvalidPrincipalID) err = rootMetadata.DeletePrimaryRuleFilePrincipal(targetsKey1.KeyID) assert.Nil(t, err) assert.Equal(t, targetsKey1, rootMetadata.Principals[targetsKey1.KeyID]) assert.Equal(t, targetsKey2, rootMetadata.Principals[targetsKey2.KeyID]) targetsRole := rootMetadata.Roles[tuf.TargetsRoleName] assert.True(t, targetsRole.PrincipalIDs.Has(targetsKey2.KeyID)) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{ targetsKey1.KeyID: targetsKey1, }, } err = rootMetadata.AddPrimaryRuleFilePrincipal(person) assert.Nil(t, err) assert.True(t, rootMetadata.Roles[tuf.TargetsRoleName].PrincipalIDs.Has(person.PersonID)) err = rootMetadata.DeletePrimaryRuleFilePrincipal(person.PersonID) assert.Nil(t, err) assert.False(t, rootMetadata.Roles[tuf.TargetsRoleName].PrincipalIDs.Has(person.PersonID)) err = rootMetadata.DeletePrimaryRuleFilePrincipal(targetsKey2.KeyID) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestAddGitHubAppPrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) appKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, nil) assert.ErrorIs(t, err, tuf.ErrInvalidPrincipalType) err = rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey) assert.Nil(t, err) assert.Equal(t, appKey, rootMetadata.Principals[appKey.KeyID]) assert.Equal(t, set.NewSetFromItems(appKey.KeyID), rootMetadata.Roles[tuf.GitHubAppRoleName].PrincipalIDs) } func TestDeleteGitHubAppPrincipal(t *testing.T) { rootMetadata := initialTestRootMetadata(t) appKey := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, appKey) assert.Nil(t, err) rootMetadata.DeleteGitHubAppPrincipal(tuf.GitHubAppRoleName) assert.Nil(t, rootMetadata.Roles[tuf.GitHubAppRoleName].PrincipalIDs) } func TestEnableGitHubAppApprovals(t *testing.T) { rootMetadata := initialTestRootMetadata(t) assert.False(t, rootMetadata.GitHubApprovalsTrusted) rootMetadata.EnableGitHubAppApprovals() assert.True(t, rootMetadata.GitHubApprovalsTrusted) } func TestDisableGitHubAppApprovals(t *testing.T) { rootMetadata := initialTestRootMetadata(t) assert.False(t, rootMetadata.GitHubApprovalsTrusted) rootMetadata.EnableGitHubAppApprovals() assert.True(t, rootMetadata.GitHubApprovalsTrusted) rootMetadata.DisableGitHubAppApprovals() assert.False(t, rootMetadata.GitHubApprovalsTrusted) } func TestUpdateAndGetRootThreshold(t *testing.T) { rootMetadata := NewRootMetadata() err := rootMetadata.UpdateRootThreshold(3) assert.ErrorIs(t, err, tuf.ErrInvalidRootMetadata) threshold, err := rootMetadata.GetRootThreshold() assert.ErrorIs(t, err, tuf.ErrInvalidRootMetadata) assert.Equal(t, -1, threshold) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := rootMetadata.AddRootPrincipal(key1); err != nil { t.Fatal(err) } if err := rootMetadata.AddRootPrincipal(key2); err != nil { t.Fatal(err) } err = rootMetadata.UpdateRootThreshold(2) assert.Nil(t, err) assert.Equal(t, 2, rootMetadata.Roles[tuf.RootRoleName].Threshold) threshold, err = rootMetadata.GetRootThreshold() assert.Nil(t, err) assert.Equal(t, 2, threshold) err = rootMetadata.UpdateRootThreshold(3) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestUpdateAndGetPrimaryRuleFileThreshold(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.UpdatePrimaryRuleFileThreshold(3) assert.ErrorIs(t, err, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot) threshold, err := rootMetadata.GetPrimaryRuleFileThreshold() assert.ErrorIs(t, err, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot) assert.Equal(t, -1, threshold) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := rootMetadata.AddPrimaryRuleFilePrincipal(key1); err != nil { t.Fatal(err) } if err := rootMetadata.AddPrimaryRuleFilePrincipal(key2); err != nil { t.Fatal(err) } err = rootMetadata.UpdatePrimaryRuleFileThreshold(2) assert.Nil(t, err) assert.Equal(t, 2, rootMetadata.Roles[tuf.TargetsRoleName].Threshold) threshold, err = rootMetadata.GetPrimaryRuleFileThreshold() assert.Nil(t, err) assert.Equal(t, 2, threshold) err = rootMetadata.UpdatePrimaryRuleFileThreshold(3) assert.ErrorIs(t, err, tuf.ErrCannotMeetThreshold) } func TestGetRootPrincipals(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{key.KeyID: key}, } t.Run("root role exists", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) expectedPrincipals := []tuf.Principal{key} rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, rootPrincipals) }) t.Run("root role does not exist", func(t *testing.T) { rootMetadata := NewRootMetadata() rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.ErrorIs(t, err, tuf.ErrInvalidRootMetadata) assert.Nil(t, rootPrincipals) }) t.Run("with person", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.AddRootPrincipal(person) assert.Nil(t, err) expectedPrincipals := []tuf.Principal{key, person} sort.Slice(expectedPrincipals, func(i, j int) bool { return expectedPrincipals[i].ID() < expectedPrincipals[j].ID() }) rootPrincipals, err := rootMetadata.GetRootPrincipals() assert.Nil(t, err) sort.Slice(rootPrincipals, func(i, j int) bool { return rootPrincipals[i].ID() < rootPrincipals[j].ID() }) assert.Equal(t, expectedPrincipals, rootPrincipals) }) } func TestGetPrimaryRuleFilePrincipals(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) person := &Person{ PersonID: "jane.doe@example.com", PublicKeys: map[string]*Key{key.KeyID: key}, } t.Run("targets role exists", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.AddPrimaryRuleFilePrincipal(key) assert.Nil(t, err) expectedPrincipals := []tuf.Principal{key} principals, err := rootMetadata.GetPrimaryRuleFilePrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, principals) }) t.Run("targets role does not exist", func(t *testing.T) { rootMetadata := NewRootMetadata() rootPrincipals, err := rootMetadata.GetPrimaryRuleFilePrincipals() assert.ErrorIs(t, err, tuf.ErrPrimaryRuleFileInformationNotFoundInRoot) assert.Nil(t, rootPrincipals) }) t.Run("with person", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.AddPrimaryRuleFilePrincipal(person) assert.Nil(t, err) expectedPrincipals := []tuf.Principal{person} principals, err := rootMetadata.GetPrimaryRuleFilePrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, principals) }) } func TestGetGitHubAppPrincipals(t *testing.T) { key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) t.Run("role exists", func(t *testing.T) { rootMetadata := initialTestRootMetadata(t) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, key) assert.Nil(t, err) expectedPrincipals := []tuf.Principal{key} principals, err := rootMetadata.GetGitHubAppPrincipals() assert.Nil(t, err) assert.Equal(t, expectedPrincipals, principals) }) t.Run("role does not exist", func(t *testing.T) { rootMetadata := NewRootMetadata() rootPrincipals, err := rootMetadata.GetGitHubAppPrincipals() assert.ErrorIs(t, err, tuf.ErrGitHubAppInformationNotFoundInRoot) assert.Nil(t, rootPrincipals) }) } func TestIsGitHubAppApprovalTrusted(t *testing.T) { rootMetadata := initialTestRootMetadata(t) trusted := rootMetadata.IsGitHubAppApprovalTrusted() assert.False(t, trusted) key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) err := rootMetadata.AddGitHubAppPrincipal(tuf.GitHubAppRoleName, key) assert.Nil(t, err) rootMetadata.EnableGitHubAppApprovals() trusted = rootMetadata.IsGitHubAppApprovalTrusted() assert.True(t, trusted) } func TestGlobalRules(t *testing.T) { rootMetadata := initialTestRootMetadata(t) assert.Nil(t, rootMetadata.GlobalRules) // no global rule yet err := rootMetadata.AddGlobalRule(NewGlobalRuleThreshold("threshold-2-main", []string{"git:refs/heads/main"}, 2)) assert.Nil(t, err) err = rootMetadata.AddGlobalRule(NewGlobalRuleThreshold("threshold-2-main", []string{"git:refs/heads/main"}, 2)) assert.ErrorIs(t, err, tuf.ErrGlobalRuleAlreadyExists) assert.Equal(t, 1, len(rootMetadata.GlobalRules)) assert.Equal(t, "threshold-2-main", rootMetadata.GlobalRules[0].GetName()) expectedGlobalRule := &GlobalRuleThreshold{ Name: "threshold-2-main", Paths: []string{"git:refs/heads/main"}, Threshold: 2, } globalRules := rootMetadata.GetGlobalRules() assert.Equal(t, expectedGlobalRule.GetName(), globalRules[0].GetName()) assert.Equal(t, expectedGlobalRule.GetProtectedNamespaces(), globalRules[0].(tuf.GlobalRuleThreshold).GetProtectedNamespaces()) assert.Equal(t, expectedGlobalRule.GetThreshold(), globalRules[0].(tuf.GlobalRuleThreshold).GetThreshold()) forcePushesGlobalRule, err := NewGlobalRuleBlockForcePushes("block-force-pushes", []string{"git:refs/heads/main"}) if err != nil { t.Fatal(err) } err = rootMetadata.AddGlobalRule(forcePushesGlobalRule) assert.Nil(t, err) err = rootMetadata.AddGlobalRule(forcePushesGlobalRule) assert.ErrorIs(t, err, tuf.ErrGlobalRuleAlreadyExists) assert.Equal(t, 2, len(rootMetadata.GlobalRules)) assert.Equal(t, "threshold-2-main", rootMetadata.GlobalRules[0].GetName()) assert.Equal(t, "block-force-pushes", rootMetadata.GlobalRules[1].GetName()) err = rootMetadata.DeleteGlobalRule("threshold-2-main") assert.Nil(t, err) err = rootMetadata.DeleteGlobalRule("block-force-pushes") assert.Nil(t, err) assert.Equal(t, 0, len(rootMetadata.GlobalRules)) err = rootMetadata.DeleteGlobalRule("") assert.ErrorIs(t, err, tuf.ErrGlobalRuleNotFound) } gittuf-0.9.0/internal/tuf/v02/targets.go000066400000000000000000000267331475150141000200540ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "encoding/json" "errors" "fmt" "strings" "github.com/danwakefield/fnmatch" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/tuf" ) const ( TargetsVersion = "http://gittuf.dev/policy/rule-file/v0.2" ) var ErrTargetsNotEmpty = errors.New("`targets` field in gittuf Targets metadata must be empty") // TargetsMetadata defines the schema of TUF's Targets role. type TargetsMetadata struct { Type string `json:"type"` Version string `json:"schemaVersion"` Expires string `json:"expires"` Targets map[string]any `json:"targets"` Delegations *Delegations `json:"delegations"` } // NewTargetsMetadata returns a new instance of TargetsMetadata. func NewTargetsMetadata() *TargetsMetadata { return &TargetsMetadata{ Type: "targets", Version: TargetsVersion, Delegations: &Delegations{Roles: []*Delegation{AllowRule()}}, } } // SetExpires sets the expiry date of the TargetsMetadata to the value passed // in. func (t *TargetsMetadata) SetExpires(expires string) { t.Expires = expires } // SchemaVersion returns the metadata schema version. func (t *TargetsMetadata) SchemaVersion() string { return t.Version } // Validate ensures the instance of TargetsMetadata matches gittuf expectations. func (t *TargetsMetadata) Validate() error { if len(t.Targets) != 0 { return ErrTargetsNotEmpty } return nil } // AddRule adds a new delegation to TargetsMetadata. func (t *TargetsMetadata) AddRule(ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int) error { if strings.HasPrefix(ruleName, tuf.GittufPrefix) { return tuf.ErrCannotManipulateRulesWithGittufPrefix } for _, principalID := range authorizedPrincipalIDs { if _, has := t.Delegations.Principals[principalID]; !has { return tuf.ErrPrincipalNotFound } } if len(authorizedPrincipalIDs) < threshold { return tuf.ErrCannotMeetThreshold } allDelegations := t.Delegations.Roles if allDelegations == nil { allDelegations = []*Delegation{} } newDelegation := &Delegation{ Name: ruleName, Paths: rulePatterns, Terminating: false, Role: Role{ PrincipalIDs: set.NewSetFromItems(authorizedPrincipalIDs...), Threshold: threshold, }, } allDelegations = append(allDelegations[:len(allDelegations)-1], newDelegation, AllowRule()) t.Delegations.Roles = allDelegations return nil } // UpdateRule is used to amend a delegation in TargetsMetadata. func (t *TargetsMetadata) UpdateRule(ruleName string, authorizedPrincipalIDs, rulePatterns []string, threshold int) error { if strings.HasPrefix(ruleName, tuf.GittufPrefix) { return tuf.ErrCannotManipulateRulesWithGittufPrefix } for _, principalID := range authorizedPrincipalIDs { if _, has := t.Delegations.Principals[principalID]; !has { return tuf.ErrPrincipalNotFound } } if len(authorizedPrincipalIDs) < threshold { return tuf.ErrCannotMeetThreshold } allDelegations := []*Delegation{} for _, delegation := range t.Delegations.Roles { if delegation.ID() == tuf.AllowRuleName { break } if delegation.ID() != ruleName { allDelegations = append(allDelegations, delegation) continue } if delegation.Name == ruleName { delegation.Paths = rulePatterns delegation.Role = Role{ PrincipalIDs: set.NewSetFromItems(authorizedPrincipalIDs...), Threshold: threshold, } } allDelegations = append(allDelegations, delegation) } allDelegations = append(allDelegations, AllowRule()) t.Delegations.Roles = allDelegations return nil } // ReorderRules changes the order of delegations, and the new order is specified // in `ruleNames []string`. func (t *TargetsMetadata) ReorderRules(ruleNames []string) error { // Create a map of all existing delegations for quick look up rolesMap := make(map[string]*Delegation) // Create a set of current rules in metadata, skipping the allow rule currentRules := set.NewSet[string]() for _, delegation := range t.Delegations.Roles { if delegation.Name == tuf.AllowRuleName { continue } rolesMap[delegation.Name] = delegation currentRules.Add(delegation.Name) } specifiedRules := set.NewSet[string]() for _, name := range ruleNames { if specifiedRules.Has(name) { return fmt.Errorf("%w: '%s'", tuf.ErrDuplicatedRuleName, name) } specifiedRules.Add(name) } if !currentRules.Equal(specifiedRules) { onlyInSpecifiedRules := specifiedRules.Minus(currentRules) if onlyInSpecifiedRules.Len() != 0 { if onlyInSpecifiedRules.Has(tuf.AllowRuleName) { return fmt.Errorf("%w: do not specify allow rule", tuf.ErrCannotManipulateRulesWithGittufPrefix) } contents := onlyInSpecifiedRules.Contents() return fmt.Errorf("%w: rules '%s' do not exist in current rule file", tuf.ErrRuleNotFound, strings.Join(contents, ", ")) } onlyInCurrentRules := currentRules.Minus(specifiedRules) if onlyInCurrentRules.Len() != 0 { contents := onlyInCurrentRules.Contents() return fmt.Errorf("%w: rules '%s' not specified", tuf.ErrMissingRules, strings.Join(contents, ", ")) } } // Create newDelegations and set it in the targetsMetadata after adding allow rule newDelegations := make([]*Delegation, 0, len(rolesMap)+1) for _, ruleName := range ruleNames { newDelegations = append(newDelegations, rolesMap[ruleName]) } newDelegations = append(newDelegations, AllowRule()) t.Delegations.Roles = newDelegations return nil } // RemoveRule deletes a delegation entry from TargetsMetadata. func (t *TargetsMetadata) RemoveRule(ruleName string) error { if strings.HasPrefix(ruleName, tuf.GittufPrefix) { return tuf.ErrCannotManipulateRulesWithGittufPrefix } allDelegations := t.Delegations.Roles updatedDelegations := []*Delegation{} for _, delegation := range allDelegations { if delegation.Name != ruleName { updatedDelegations = append(updatedDelegations, delegation) } } t.Delegations.Roles = updatedDelegations return nil } // GetPrincipals returns all the principals in the rule file. func (t *TargetsMetadata) GetPrincipals() map[string]tuf.Principal { principals := map[string]tuf.Principal{} for id, principal := range t.Delegations.Principals { principals[id] = principal } return principals } // GetRules returns all the rules in the metadata. func (t *TargetsMetadata) GetRules() []tuf.Rule { if t.Delegations == nil { return nil } rules := make([]tuf.Rule, 0, len(t.Delegations.Roles)) for _, delegation := range t.Delegations.Roles { rules = append(rules, delegation) } return rules } // AddPrincipal adds a principal to the metadata. // // TODO: this isn't associated with a specific rule; with the removal of // verify-commit and verify-tag, it may not make sense anymore func (t *TargetsMetadata) AddPrincipal(principal tuf.Principal) error { return t.Delegations.addPrincipal(principal) } // RemovePrincipal removes a principal from the metadata. func (t *TargetsMetadata) RemovePrincipal(principalID string) error { return t.Delegations.removePrincipal(principalID) } // Delegations defines the schema for specifying delegations in TUF's Targets // metadata. type Delegations struct { Principals map[string]tuf.Principal `json:"principals"` Roles []*Delegation `json:"roles"` } func (d *Delegations) UnmarshalJSON(data []byte) error { // this type _has_ to be a copy of Delegations, minus the use of // json.RawMessage in place of tuf.Principal type tempType struct { Principals map[string]json.RawMessage `json:"principals"` Roles []*Delegation `json:"roles"` } temp := &tempType{} if err := json.Unmarshal(data, temp); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } d.Principals = make(map[string]tuf.Principal) for principalID, principalBytes := range temp.Principals { tempPrincipal := map[string]any{} if err := json.Unmarshal(principalBytes, &tempPrincipal); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } if _, has := tempPrincipal["keyid"]; has { // this is *Key key := &Key{} if err := json.Unmarshal(principalBytes, key); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } d.Principals[principalID] = key continue } if _, has := tempPrincipal["personID"]; has { // this is *Person person := &Person{} if err := json.Unmarshal(principalBytes, person); err != nil { return fmt.Errorf("unable to unmarshal json: %w", err) } d.Principals[principalID] = person continue } return fmt.Errorf("unrecognized principal type '%s'", string(principalBytes)) } d.Roles = temp.Roles return nil } // addPrincipal adds a delegations key or person. v02 supports Key and Person // as principal types. func (d *Delegations) addPrincipal(principal tuf.Principal) error { if d.Principals == nil { d.Principals = map[string]tuf.Principal{} } switch principal := principal.(type) { case *Key, *Person: d.Principals[principal.ID()] = principal default: return tuf.ErrInvalidPrincipalType } return nil } // removePrincipal removes a delegations key or person. v02 supports Key and // Person as principal types. func (d *Delegations) removePrincipal(principalID string) error { if d.Principals == nil { return tuf.ErrPrincipalNotFound } if principalID == "" { return tuf.ErrInvalidPrincipalID } for _, curRole := range d.Roles { if curRole.GetPrincipalIDs() != nil && curRole.GetPrincipalIDs().Has(principalID) { return tuf.ErrPrincipalStillInUse } } delete(d.Principals, principalID) return nil } // AllowRule returns the default, last rule for all policy files. func AllowRule() *Delegation { return &Delegation{ Name: tuf.AllowRuleName, Paths: []string{"*"}, Terminating: true, Role: Role{ Threshold: 1, }, } } // Delegation defines the schema for a single delegation entry. It differs from // the standard TUF schema by allowing a `custom` field to record details // pertaining to the delegation. It implements the tuf.Rule interface. type Delegation struct { Name string `json:"name"` Paths []string `json:"paths"` Terminating bool `json:"terminating"` Custom *json.RawMessage `json:"custom,omitempty"` Role } // ID returns the identifier of the delegation, its name. func (d *Delegation) ID() string { return d.Name } // Matches checks if any of the delegation's patterns match the target. func (d *Delegation) Matches(target string) bool { for _, pattern := range d.Paths { // We validate pattern when it's added to / updated in the metadata if matches := fnmatch.Match(pattern, target, 0); matches { return true } } return false } // GetPrincipalIDs returns the identifiers of the principals that are listed as // trusted by the rule. func (d *Delegation) GetPrincipalIDs() *set.Set[string] { return d.Role.PrincipalIDs } // GetThreshold returns the threshold of principals that must approve to meet // the rule. func (d *Delegation) GetThreshold() int { return d.Role.Threshold } // IsLastTrustedInRuleFile indicates that subsequent rules in the rule file are // not to be trusted if the current rule matches the namespace under // verification (similar to TUF's terminating behavior). However, the current // rule's delegated rules as well as other rules already in the queue are // trusted. func (d *Delegation) IsLastTrustedInRuleFile() bool { return d.Terminating } // GetProtectedNamespaces returns the set of namespaces protected by the // delegation. func (d *Delegation) GetProtectedNamespaces() []string { return d.Paths } gittuf-0.9.0/internal/tuf/v02/targets_test.go000066400000000000000000000340551475150141000211070ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "fmt" "testing" "time" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/gittuf/gittuf/internal/tuf" "github.com/stretchr/testify/assert" ) func TestTargetsMetadataAndDelegations(t *testing.T) { targetsMetadata := NewTargetsMetadata() t.Run("test SetExpires", func(t *testing.T) { d := time.Date(1995, time.October, 26, 9, 0, 0, 0, time.UTC) targetsMetadata.SetExpires(d.Format(time.RFC3339)) assert.Equal(t, "1995-10-26T09:00:00Z", targetsMetadata.Expires) }) t.Run("test Validate", func(t *testing.T) { err := targetsMetadata.Validate() assert.Nil(t, err) targetsMetadata.Targets = map[string]any{"test": true} err = targetsMetadata.Validate() assert.ErrorIs(t, err, ErrTargetsNotEmpty) targetsMetadata.Targets = nil }) key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, rootPubKeyBytes)) person := &Person{ PersonID: "jane.doe", PublicKeys: map[string]*Key{key.KeyID: key}, } t.Run("test addPrincipal", func(t *testing.T) { delegations := &Delegations{} assert.Nil(t, delegations.Principals) err := delegations.addPrincipal(key) assert.Nil(t, err) assert.Equal(t, key, delegations.Principals[key.KeyID]) err = delegations.addPrincipal(person) assert.Nil(t, err) assert.Equal(t, person, delegations.Principals[person.PersonID]) }) t.Run("test removePrincipal", func(t *testing.T) { delegations := &Delegations{} err := delegations.addPrincipal(key) assert.Nil(t, err) assert.Equal(t, key, delegations.Principals[key.KeyID]) err = delegations.addPrincipal(person) assert.Nil(t, err) assert.Equal(t, person, delegations.Principals[person.PersonID]) assert.NotEmpty(t, delegations.Principals) err = delegations.removePrincipal(key.KeyID) assert.Nil(t, err) _, exists := delegations.Principals[key.KeyID] assert.False(t, exists) err = delegations.removePrincipal(person.PersonID) assert.Nil(t, err) _, exists = delegations.Principals[person.PersonID] assert.False(t, exists) assert.Empty(t, delegations.Principals) }) } func TestDelegation(t *testing.T) { t.Run("matches", func(t *testing.T) { tests := map[string]struct { patterns []string target string expected bool }{ "full path, matches": { patterns: []string{"foo"}, target: "foo", expected: true, }, "artifact in directory, matches": { patterns: []string{"foo/*"}, target: "foo/bar", expected: true, }, "artifact in directory, does not match": { patterns: []string{"foo/*.txt"}, target: "foo/bar.tgz", expected: false, }, "artifact in directory, one pattern matches": { patterns: []string{"foo/*.txt", "foo/*.tgz"}, target: "foo/bar.tgz", expected: true, }, "artifact in subdirectory, matches": { patterns: []string{"foo/*"}, target: "foo/bar/foobar", expected: true, }, "artifact in subdirectory with specified extension, matches": { patterns: []string{"foo/*.tgz"}, target: "foo/bar/foobar.tgz", expected: true, }, "pattern with single character selector, matches": { patterns: []string{"foo/?.tgz"}, target: "foo/a.tgz", expected: true, }, "pattern with character sequence, matches": { patterns: []string{"foo/[abc].tgz"}, target: "foo/a.tgz", expected: true, }, "pattern with character sequence, does not match": { patterns: []string{"foo/[abc].tgz"}, target: "foo/x.tgz", expected: false, }, "pattern with negative character sequence, matches": { patterns: []string{"foo/[!abc].tgz"}, target: "foo/x.tgz", expected: true, }, "pattern with negative character sequence, does not match": { patterns: []string{"foo/[!abc].tgz"}, target: "foo/a.tgz", expected: false, }, "artifact in arbitrary directory, matches": { patterns: []string{"*/*.txt"}, target: "foo/bar/foobar.txt", expected: true, }, "artifact with specific name in arbitrary directory, matches": { patterns: []string{"*/foobar.txt"}, target: "foo/bar/foobar.txt", expected: true, }, "artifact with arbitrary subdirectories, matches": { patterns: []string{"foo/*/foobar.txt"}, target: "foo/bar/baz/foobar.txt", expected: true, }, "artifact in arbitrary directory, does not match": { patterns: []string{"*.txt"}, target: "foo/bar/foobar.txtfile", expected: false, }, "arbitrary directory, does not match": { patterns: []string{"*_test"}, target: "foo/bar_test/foobar", expected: false, }, "no patterns": { patterns: nil, target: "foo", expected: false, }, "pattern with multiple consecutive wildcards, matches": { patterns: []string{"foo/*/*/*.txt"}, target: "foo/bar/baz/qux.txt", expected: true, }, "pattern with multiple non-consecutive wildcards, matches": { patterns: []string{"foo/*/baz/*.txt"}, target: "foo/bar/baz/qux.txt", expected: true, }, "pattern with gittuf git prefix, matches": { patterns: []string{"git:refs/heads/*"}, target: "git:refs/heads/main", expected: true, }, "pattern with gittuf file prefix for all recursive contents, matches": { patterns: []string{"file:src/signatures/*"}, target: "file:src/signatures/rsa/rsa.go", expected: true, }, } for name, test := range tests { delegation := Delegation{Paths: test.patterns} got := delegation.Matches(test.target) assert.Equal(t, test.expected, got, fmt.Sprintf("unexpected result in test '%s'", name)) } }) t.Run("threshold", func(t *testing.T) { delegation := &Delegation{} threshold := delegation.GetThreshold() assert.Equal(t, 0, threshold) delegation.Threshold = 1 threshold = delegation.GetThreshold() assert.Equal(t, 1, threshold) }) t.Run("terminating", func(t *testing.T) { delegation := &Delegation{} isTerminating := delegation.IsLastTrustedInRuleFile() assert.False(t, isTerminating) delegation.Terminating = true isTerminating = delegation.IsLastTrustedInRuleFile() assert.True(t, isTerminating) }) t.Run("protected namespaces", func(t *testing.T) { delegation := &Delegation{ Paths: []string{"1", "2"}, } protected := delegation.GetProtectedNamespaces() assert.Equal(t, []string{"1", "2"}, protected) }) t.Run("principal IDs", func(t *testing.T) { keyIDs := set.NewSetFromItems("1", "2") delegation := &Delegation{ Role: Role{PrincipalIDs: keyIDs}, } principalIDs := delegation.GetPrincipalIDs() assert.Equal(t, keyIDs, principalIDs) }) } func TestAddRuleAndGetRules(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) person := &Person{ PersonID: "jane.doe", PublicKeys: map[string]*Key{key1.KeyID: key1}, } if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(person); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("test-rule", []string{key1.KeyID, key2.KeyID, person.PersonID}, []string{"test/"}, 1) assert.Nil(t, err) assert.Contains(t, targetsMetadata.Delegations.Principals, key1.KeyID) assert.Equal(t, key1, targetsMetadata.Delegations.Principals[key1.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Principals, key2.KeyID) assert.Equal(t, key2, targetsMetadata.Delegations.Principals[key2.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Principals, person.PersonID) assert.Equal(t, person, targetsMetadata.Delegations.Principals[person.PersonID]) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) rule := &Delegation{ Name: "test-rule", Paths: []string{"test/"}, Terminating: false, Role: Role{PrincipalIDs: set.NewSetFromItems(key1.KeyID, key2.KeyID, person.PersonID), Threshold: 1}, } assert.Equal(t, rule, targetsMetadata.Delegations.Roles[0]) rules := targetsMetadata.GetRules() assert.Equal(t, 2, len(rules)) assert.Equal(t, []tuf.Rule{rule, AllowRule()}, rules) } func TestUpdateDelegation(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("test-rule", []string{key1.KeyID}, []string{"test/"}, 1) if err != nil { t.Fatal(err) } assert.Contains(t, targetsMetadata.Delegations.Principals, key1.KeyID) assert.Equal(t, key1, targetsMetadata.Delegations.Principals[key1.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) assert.Equal(t, &Delegation{ Name: "test-rule", Paths: []string{"test/"}, Terminating: false, Role: Role{PrincipalIDs: set.NewSetFromItems(key1.KeyID), Threshold: 1}, }, targetsMetadata.Delegations.Roles[0]) if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } err = targetsMetadata.UpdateRule("test-rule", []string{key1.KeyID, key2.KeyID}, []string{"test/"}, 1) assert.Nil(t, err) assert.Contains(t, targetsMetadata.Delegations.Principals, key1.KeyID) assert.Equal(t, key1, targetsMetadata.Delegations.Principals[key1.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Principals, key2.KeyID) assert.Equal(t, key2, targetsMetadata.Delegations.Principals[key2.KeyID]) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) assert.Equal(t, &Delegation{ Name: "test-rule", Paths: []string{"test/"}, Terminating: false, Role: Role{PrincipalIDs: set.NewSetFromItems(key1.KeyID, key2.KeyID), Threshold: 1}, }, targetsMetadata.Delegations.Roles[0]) } func TestReorderRules(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("rule-1", []string{key1.KeyID}, []string{"path1/"}, 1) if err != nil { t.Fatal(err) } err = targetsMetadata.AddRule("rule-2", []string{key2.KeyID}, []string{"path2/"}, 1) if err != nil { t.Fatal(err) } err = targetsMetadata.AddRule("rule-3", []string{key1.KeyID, key2.KeyID}, []string{"path3/"}, 1) if err != nil { t.Fatal(err) } tests := map[string]struct { ruleNames []string expected []string expectedError error }{ "reverse order (valid input)": { ruleNames: []string{"rule-3", "rule-2", "rule-1"}, expected: []string{"rule-3", "rule-2", "rule-1", tuf.AllowRuleName}, expectedError: nil, }, "rule not specified in new order": { ruleNames: []string{"rule-3", "rule-2"}, expectedError: tuf.ErrMissingRules, }, "rule repeated in the new order": { ruleNames: []string{"rule-3", "rule-2", "rule-1", "rule-3"}, expectedError: tuf.ErrDuplicatedRuleName, }, "unknown rule in the new order": { ruleNames: []string{"rule-3", "rule-2", "rule-1", "rule-4"}, expectedError: tuf.ErrRuleNotFound, }, "unknown rule in the new order (with correct length)": { ruleNames: []string{"rule-3", "rule-2", "rule-4"}, expectedError: tuf.ErrRuleNotFound, }, "allow rule appears in the new order": { ruleNames: []string{"rule-2", "rule-3", "rule-1", tuf.AllowRuleName}, expectedError: tuf.ErrCannotManipulateRulesWithGittufPrefix, }, } for name, test := range tests { err = targetsMetadata.ReorderRules(test.ruleNames) if test.expectedError != nil { assert.ErrorIs(t, err, test.expectedError, fmt.Sprintf("unexpected error in test '%s'", name)) } else { assert.Nil(t, err, fmt.Sprintf("unexpected error in test '%s'", name)) assert.Equal(t, len(test.expected), len(targetsMetadata.Delegations.Roles), fmt.Sprintf("expected %d rules in test '%s', but got %d rules", len(test.expected), name, len(targetsMetadata.Delegations.Roles))) for i, ruleName := range test.expected { assert.Equal(t, ruleName, targetsMetadata.Delegations.Roles[i].Name, fmt.Sprintf("expected rule '%s' at index %d in test '%s', but got '%s'", ruleName, i, name, targetsMetadata.Delegations.Roles[i].Name)) } } } } func TestRemoveRule(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key); err != nil { t.Fatal(err) } err := targetsMetadata.AddRule("test-rule", []string{key.KeyID}, []string{"test/"}, 1) if err != nil { t.Fatal(err) } assert.Equal(t, 2, len(targetsMetadata.Delegations.Roles)) err = targetsMetadata.RemoveRule("test-rule") assert.Nil(t, err) assert.Equal(t, 1, len(targetsMetadata.Delegations.Roles)) assert.Contains(t, targetsMetadata.Delegations.Roles, AllowRule()) assert.Contains(t, targetsMetadata.Delegations.Principals, key.KeyID) } func TestGetPrincipals(t *testing.T) { targetsMetadata := initialTestTargetsMetadata(t) key1 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets1PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key1); err != nil { t.Fatal(err) } principals := targetsMetadata.GetPrincipals() assert.Equal(t, map[string]tuf.Principal{key1.KeyID: key1}, principals) key2 := NewKeyFromSSLibKey(ssh.NewKeyFromBytes(t, targets2PubKeyBytes)) if err := targetsMetadata.AddPrincipal(key2); err != nil { t.Fatal(err) } principals = targetsMetadata.GetPrincipals() assert.Equal(t, map[string]tuf.Principal{key1.KeyID: key1, key2.KeyID: key2}, principals) } func TestAllowRule(t *testing.T) { allowRule := AllowRule() assert.Equal(t, tuf.AllowRuleName, allowRule.Name) assert.Equal(t, []string{"*"}, allowRule.Paths) assert.True(t, allowRule.Terminating) assert.Empty(t, allowRule.PrincipalIDs) assert.Equal(t, 1, allowRule.Threshold) } gittuf-0.9.0/internal/tuf/v02/tuf.go000066400000000000000000000046771475150141000172040ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 // This package defines gittuf's take on TUF metadata. There are some minor // changes, such as the addition of `custom` to delegation entries. Some of it, // however, is inspired by or cloned from the go-tuf implementation. import ( "fmt" "os" "github.com/gittuf/gittuf/internal/common/set" "github.com/gittuf/gittuf/internal/dev" v01 "github.com/gittuf/gittuf/internal/tuf/v01" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" ) const ( AllowV02MetadataKey = "GITTUF_ALLOW_V02_POLICY" associatedIdentityKey = "(associated identity)" ) // AllowV02Metadata returns true if gittuf is in developer mode and // GITTUF_ALLOW_V02_POLICY=1. func AllowV02Metadata() bool { return dev.InDevMode() && os.Getenv(AllowV02MetadataKey) == "1" } // Key defines the structure for how public keys are stored in TUF metadata. It // implements the tuf.Principal and is used for backwards compatibility where a // Principal is always represented directly by a signing key or identity. type Key = v01.Key // NewKeyFromSSLibKey converts the signerverifier.SSLibKey into a Key object. func NewKeyFromSSLibKey(key *signerverifier.SSLibKey) *Key { k := Key(*key) return &k } type Person struct { PersonID string `json:"personID"` PublicKeys map[string]*Key `json:"keys"` AssociatedIdentities map[string]string `json:"associatedIdentities"` Custom map[string]string `json:"custom"` } func (p *Person) ID() string { return p.PersonID } func (p *Person) Keys() []*signerverifier.SSLibKey { keys := make([]*signerverifier.SSLibKey, 0, len(p.PublicKeys)) for _, key := range p.PublicKeys { key := signerverifier.SSLibKey(*key) keys = append(keys, &key) } return keys } func (p *Person) CustomMetadata() map[string]string { var metadata map[string]string for provider, identity := range p.AssociatedIdentities { if metadata == nil { metadata = map[string]string{} } metadata[fmt.Sprintf("%s %s", associatedIdentityKey, provider)] = identity } for key, value := range p.Custom { if metadata == nil { metadata = map[string]string{} } metadata[key] = value } return metadata } // Role records common characteristics recorded in a role entry in Root metadata // and in a delegation entry. type Role struct { PrincipalIDs *set.Set[string] `json:"principalIDs"` Threshold int `json:"threshold"` } gittuf-0.9.0/internal/tuf/v02/tuf_test.go000066400000000000000000000055441475150141000202350ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package v02 import ( "fmt" "testing" "github.com/gittuf/gittuf/internal/signerverifier/ssh" "github.com/secure-systems-lab/go-securesystemslib/signerverifier" "github.com/stretchr/testify/assert" ) func TestPerson(t *testing.T) { keyR := ssh.NewKeyFromBytes(t, rootPubKeyBytes) key := NewKeyFromSSLibKey(keyR) tests := map[string]struct { person *Person expectedID string expectedKeys []*signerverifier.SSLibKey expectedCustomMetadata map[string]string }{ "no custom metadata": { person: &Person{ PersonID: "jane.doe", PublicKeys: map[string]*Key{ key.KeyID: key, }, }, expectedID: "jane.doe", expectedKeys: []*signerverifier.SSLibKey{keyR}, expectedCustomMetadata: nil, }, "only associated identities": { person: &Person{ PersonID: "jane.doe", PublicKeys: map[string]*Key{ key.KeyID: key, }, AssociatedIdentities: map[string]string{ "https://github.com": "jane.doe", "https://gitlab.com": "jane.doe", }, }, expectedID: "jane.doe", expectedKeys: []*signerverifier.SSLibKey{keyR}, expectedCustomMetadata: map[string]string{ fmt.Sprintf("%s https://github.com", associatedIdentityKey): "jane.doe", fmt.Sprintf("%s https://gitlab.com", associatedIdentityKey): "jane.doe", }, }, "only custom metadata": { person: &Person{ PersonID: "jane.doe", PublicKeys: map[string]*Key{ key.KeyID: key, }, Custom: map[string]string{ "key": "value", }, }, expectedID: "jane.doe", expectedKeys: []*signerverifier.SSLibKey{keyR}, expectedCustomMetadata: map[string]string{ "key": "value", }, }, "both associated identities and custom metadata": { person: &Person{ PersonID: "jane.doe", PublicKeys: map[string]*Key{ key.KeyID: key, }, AssociatedIdentities: map[string]string{ "https://github.com": "jane.doe", "https://gitlab.com": "jane.doe", }, Custom: map[string]string{ "key": "value", }, }, expectedID: "jane.doe", expectedKeys: []*signerverifier.SSLibKey{keyR}, expectedCustomMetadata: map[string]string{ fmt.Sprintf("%s https://github.com", associatedIdentityKey): "jane.doe", fmt.Sprintf("%s https://gitlab.com", associatedIdentityKey): "jane.doe", "key": "value", }, }, } for name, test := range tests { id := test.person.ID() assert.Equal(t, test.expectedID, id, fmt.Sprintf("unexpected person ID in test '%s'", name)) keys := test.person.Keys() assert.Equal(t, test.expectedKeys, keys, fmt.Sprintf("unexpected keys in test '%s'", name)) customMetadata := test.person.CustomMetadata() assert.Equal(t, test.expectedCustomMetadata, customMetadata, fmt.Sprintf("unexpected custom metadata in test '%s'", name)) } } gittuf-0.9.0/internal/version/000077500000000000000000000000001475150141000163215ustar00rootroot00000000000000gittuf-0.9.0/internal/version/version.go000066400000000000000000000007421475150141000203400ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package version import "runtime/debug" // gitVersion records the basic version information from Git. It is typically // overwritten during a go build. var gitVersion = "devel" func GetVersion() string { buildInfo, ok := debug.ReadBuildInfo() if !ok { return "unknown" } if buildInfo.Main.Version == "(devel)" || buildInfo.Main.Version == "" { return gitVersion } return buildInfo.Main.Version } gittuf-0.9.0/main.go000066400000000000000000000020551475150141000142750ustar00rootroot00000000000000// Copyright The gittuf Authors // SPDX-License-Identifier: Apache-2.0 package main import ( "fmt" "os" "runtime/debug" "github.com/gittuf/gittuf/internal/cmd/profile" "github.com/gittuf/gittuf/internal/cmd/root" ) func main() { defer func() { if err := profile.StopProfiling(); err != nil { fmt.Fprintf(os.Stderr, "unexpected profiling error: %s\n", err.Error()) } if r := recover(); r != nil { fmt.Fprintf(os.Stderr, "unexpected error: %s\n\n", fmt.Sprint(r)) debug.PrintStack() fmt.Fprintln(os.Stderr, "\nPlease consider filing a bug on https:/github.com/gittuf/gittuf/issues with the stack trace and steps to reproduce this state. Thanks!") os.Exit(1) // this is the last possible deferred function to run } }() rootCmd := root.New() if err := rootCmd.Execute(); err != nil { // We can ignore the linter here (deferred functions are not executed // when os.Exit is invoked) because if we do have an error, we don't // have a panic, which is what the deferred function is looking for. os.Exit(1) //nolint:gocritic } }