pax_global_header00006660000000000000000000000064147735275730014536gustar00rootroot0000000000000052 comment=95810107164a2b4c76bcdad51439f61031663078 protobuf-specs-0.4.1/000077500000000000000000000000001477352757300145135ustar00rootroot00000000000000protobuf-specs-0.4.1/.gitattributes000066400000000000000000000000661477352757300174100ustar00rootroot00000000000000*.pb.go linguist-generated /gen/** linguist-generated protobuf-specs-0.4.1/.github/000077500000000000000000000000001477352757300160535ustar00rootroot00000000000000protobuf-specs-0.4.1/.github/ISSUE_TEMPLATE/000077500000000000000000000000001477352757300202365ustar00rootroot00000000000000protobuf-specs-0.4.1/.github/ISSUE_TEMPLATE/release-checklist.md000066400000000000000000000035001477352757300241450ustar00rootroot00000000000000--- name: Release Checklist about: All the tasks required to complete a release across languages title: Release v labels: '' assignees: '' --- Full release instructions are at: [RELEASE.md](/sigstore/protobuf-specs/blob/main/RELEASE.md) ## Pre Release - [ ] Check mediatype version of [Bundle](/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto), updating for major/minor releases - [ ] Check mediatype version of [TrustedRoot](/sigstore/protobuf-specs/blob/main/protos/sigstore_trustroot.proto), updating for major/minor releases - [ ] Update [CHANGELOG](/sigstore/protobuf-specs/blob/main/CHANGELOG.md) - [ ] Update [pyproject.toml](/sigstore/protobuf-specs/blob/main/gen/pb-python/pyproject.toml) so the `version` matches the targeted release - [ ] Update [package.json](/sigstore/protobuf-specs/blob/main/gen/pb-typescript/package.json) so the `version` matches the targeted release - [ ] Update [version.rb](/sigstore/protobuf-specs/blob/main/gen/pb-ruby/lib/sigstore_protobuf_specs/version.rb) so the `version` matches the targeted release - [ ] Update [Cargo.toml](/sigstore/protobuf-specs/blob/main/gen/pb-rust/sigstore-protobuf-specs/Cargo.toml) so the `version` matches the targeted release ## Tag Release - [ ] `v` - [ ] `release/java/v` - [ ] `release/python/v` - [ ] `release/ruby/v` - [ ] `release/rust/v` - [ ] `release/typescript/v` ## Publish Release - [ ] Java to Maven Central ## Verify Releases Published - [ ] [Java](https://central.sonatype.com/artifact/dev.sigstore/protobuf-specs/) - [ ] [Python](https://pypi.org/project/sigstore-protobuf-specs/) - [ ] [Ruby](https://rubygems.org/gems/sigstore_protobuf_specs) - [ ] [Rust](https://crates.io/crates/sigstore_protobuf_specs) - [ ] [Typescript](https://www.npmjs.com/package/@sigstore/protobuf-specs) protobuf-specs-0.4.1/.github/dependabot.yml000066400000000000000000000035431477352757300207100ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. version: 2 updates: - package-ecosystem: "gomod" directory: "/" schedule: interval: "daily" - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" - package-ecosystem: "gradle" directory: "/java" schedule: interval: "daily" groups: protobuf: patterns: - "*protobuf*" - package-ecosystem: "docker" directory: "/protoc-builder" schedule: interval: "monthly" - package-ecosystem: "gomod" directory: "/protoc-builder/hack/go" schedule: interval: "monthly" - package-ecosystem: "pip" directory: "/protoc-builder/hack" schedule: interval: "monthly" - package-ecosystem: "cargo" directory: "/gen/pb-rust" schedule: interval: "monthly" - package-ecosystem: "npm" directory: "/protoc-builder/hack" schedule: interval: "monthly" # this monitors Homebrew builds of protobuf compiler to monitor protobuf releases; # but still downloads the release asset from GitHub (since it is statically linked) # the "protobuf" label triggers a workflow to update versions.mk - package-ecosystem: "docker" directory: "/protoc-builder/hack" schedule: interval: "monthly" labels: - "dependencies" - "protobuf" protobuf-specs-0.4.1/.github/workflows/000077500000000000000000000000001477352757300201105ustar00rootroot00000000000000protobuf-specs-0.4.1/.github/workflows/generate.yml000066400000000000000000000027401477352757300224300ustar00rootroot00000000000000# # Copyright 2022 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: Check generated code for changes permissions: contents: read on: push: paths: - '**.proto' - 'gen/**' - 'protoc-builder/**' pull_request: paths: - '**.proto' - 'gen/**' - 'protoc-builder/**' jobs: check_generated_protos: name: Check generated protobufs runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false # clear out the generated files directory so we know we are actually # generating all the files again (instead of a subset) - name: Clear out protobuf directory run: | make clean - name: Compile protobufs run: | make all - name: Ensure no files were modified as a result of the codegen run: git update-index --refresh && git diff-index --quiet HEAD -- || git diff --exit-code protobuf-specs-0.4.1/.github/workflows/googleapis-update.yml000066400000000000000000000032751477352757300242530ustar00rootroot00000000000000name: Update Google APIs Commit Hash on: schedule: - cron: '0 0 1 * *' workflow_dispatch: permissions: {} jobs: update_protobuf_version: runs-on: ubuntu-latest permissions: contents: write pull-requests: write steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 persist-credentials: true # zizmor: ignore[artipacked] - name: Extract latest commit hash from googleapis/googleapis and create PR env: RUN_ID: ${{ github.run_id }} GH_TOKEN: ${{ secrets.GOOGLEAPIS_SIGSTOREBOT_TOKEN }} run: | (cd /tmp && git clone --depth=1 https://github.com/googleapis/googleapis) export LATEST_COMMIT_HASH=$(cd /tmp/googleapis && git log -n 1 --format=%H) sed -i "s/^\(DEFAULT_GOOGLEAPIS_COMMIT\s*=\s*\).*/\1${LATEST_COMMIT_HASH}/" protoc-builder/versions.mk make all git config user.name "Sigstore Bot" git config user.email "86837369+sigstore-bot@users.noreply.github.com" git config --global --type bool push.autoSetupRemote true git add -A git checkout -b googleapis-${RUN_ID} git commit -sam "Update GOOGLEAPIS_COMMIT in versions.mk" git push gh pr create --title "build(deps): bump github.com/googleapis/googleapis to latest commit in protoc-builder/versions.mk" \ --body "This pull request updates the DEFAULT_GOOGLEAPIS_COMMIT variable in protoc-builder/versions.mk with the latest commit hash from the googleapis/googleapis repository." \ --base main \ --head googleapis-${RUN_ID} protobuf-specs-0.4.1/.github/workflows/gradle-wrapper-validation.yml000066400000000000000000000006511477352757300257010ustar00rootroot00000000000000name: "Validate Gradle Wrapper" on: [push, pull_request] permissions: {} jobs: validation: name: "Validation" runs-on: ubuntu-latest permissions: contents: read steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - uses: gradle/actions/wrapper-validation@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1 protobuf-specs-0.4.1/.github/workflows/java-build.yml000066400000000000000000000030441477352757300226520ustar00rootroot00000000000000# # Copyright 2022 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: Check java build permissions: contents: read on: workflow_call: # allow this workflow to be called by other workflows push: paths: - '**.proto' - 'java/**' pull_request: paths: - '**.proto' - 'java/**' jobs: build: strategy: matrix: java-version: [11, 17] fail-fast: false runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Set up JDK ${{ matrix.java-version }} uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: java-version: ${{ matrix.java-version }} distribution: 'temurin' - name: Setup Gradle uses: gradle/actions/setup-gradle@06832c7b30a0129d7fb559bcc6e43d26f6374244 # v4.3.1 # make sure jar generation works and tests/checks pass - name: Test run: | cd java ./gradlew build protobuf-specs-0.4.1/.github/workflows/java-release.yml000066400000000000000000000052161477352757300231760ustar00rootroot00000000000000name: Build Java Release on: push: tags: # if you change this pattern, make sure jobs.strip-tag still works - 'release/java/v[0-9]+.[0-9]+.[0-9]+' permissions: {} jobs: ci: permissions: contents: read uses: ./.github/workflows/java-build.yml strip-tag: runs-on: ubuntu-latest outputs: version: ${{ steps.version.outputs.version }} steps: - name: process tag id: version env: TAG: ${{ github.ref_name }} run: | echo "version=${TAG#"release/java/v"}" >> $GITHUB_OUTPUT build: runs-on: ubuntu-latest needs: [ci, strip-tag] permissions: contents: read # to checkout code id-token: write # to sign with sigstore steps: - name: checkout tag uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Set up JDK 11 uses: actions/setup-java@3a4f6e1af504cf6a31855fa899c6aa5355ba6c12 # v4.7.0 with: java-version: 11 distribution: 'temurin' - name: Authenticate to Google Cloud uses: google-github-actions/auth@71f986410dfbc7added4569d411d040a91dc6935 # v2.1.8 with: workload_identity_provider: projects/306323169285/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider service_account: protobuf-specs-releaser@sigstore-secrets.iam.gserviceaccount.com - uses: google-github-actions/get-secretmanager-secrets@a8440875e1c2892062aef9061228d4f1af8f919b # v2.2.3 id: secrets with: secrets: |- signing_key:sigstore-secrets/sigstore-java-pgp-priv-key signing_password:sigstore-secrets/sigstore-java-pgp-priv-key-password sonatype_username:sigstore-secrets/sigstore-java-sonatype-username sonatype_password:sigstore-secrets/sigstore-java-sonatype-password - name: Build, Sign and Push to Maven Central # TODO: someone still needs to close and release this, but that can be automated next working-directory: ./java env: VERSION: ${{ needs.strip-tag.outputs.version }} ORG_GRADLE_PROJECT_signingKey: ${{ steps.secrets.outputs.signing_key }} ORG_GRADLE_PROJECT_signingPassword: ${{ steps.secrets.outputs.signing_password }} ORG_GRADLE_PROJECT_sonatypeUsername: ${{ steps.secrets.outputs.sonatype_username }} ORG_GRADLE_PROJECT_sonatypePassword: ${{ steps.secrets.outputs.sonatype_password }} run: | ./gradlew clean :publishProtoPublicationToSonatypeRepository -Pversion=${VERSION} -Prelease protobuf-specs-0.4.1/.github/workflows/protobuf-update.yml000066400000000000000000000032571477352757300237620ustar00rootroot00000000000000name: Protobuf update on: pull_request: types: [labeled] permissions: {} jobs: my_job: runs-on: ubuntu-latest if: github.event.label.name == 'protobuf' permissions: contents: write steps: - name: Checkout code uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: fetch-depth: 0 persist-credentials: true # zizmor: ignore[artipacked] - name: Update versions.mk with latest release version run: | export PROTOC_VERSION="$(awk -F'[:@]' '/FROM ghcr.io\/homebrew\/core\/protobuf/{print $2; exit}' protoc-builder/hack/Dockerfile.protobuf)" echo "Detected protobuf v${PROTOC_VERSION}... computing digest of artifact" export PROTOC_ZIP=$(mktemp) curl -fsSL --retry 3 -o ${PROTOC_ZIP} https://github.com/protocolbuffers/protobuf/releases/download/v${PROTOC_VERSION}/protoc-${PROTOC_VERSION}-linux-x86_64.zip export PROTOC_CHECKSUM="$(sha256sum ${PROTOC_ZIP} | awk '{print "sha256:"$1}')" sed -i 's/^\(DEFAULT_PROTOC_VERSION\s*=\s*\).*/\1'v${PROTOC_VERSION}'/' protoc-builder/versions.mk sed -i 's/^\(DEFAULT_PROTOC_CHECKSUM\s*=\s*\).*/\1'${PROTOC_CHECKSUM}'/' protoc-builder/versions.mk - name: Amend Dependabot PR env: PULL_REQUEST_HEAD_REF: ${{ github.event.pull_request.head.ref }} run: | git config user.name "github-actions[bot]" git config user.email "41898282+github-actions[bot]@users.noreply.github.com" git add -A git commit -sam "Bumping default protoc version and checksum in versions.mk" git push origin HEAD:${PULL_REQUEST_HEAD_REF} protobuf-specs-0.4.1/.github/workflows/python-build.yml000066400000000000000000000034011477352757300232470ustar00rootroot00000000000000# # Copyright 2022 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: Check Python build permissions: contents: read on: push: paths: - '**.proto' - 'gen/pb-python/**' - 'protoc-builder/Dockerfile.python' - 'protoc-builder/versions.mk' - 'protoc-builder/hack/dev-requirements.txt' pull_request: paths: - '**.proto' - 'gen/pb-python/**' - 'protoc-builder/Dockerfile.python' - 'protoc-builder/versions.mk' - 'protoc-builder/hack/dev-requirements.txt' jobs: build: strategy: matrix: python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] fail-fast: false runs-on: ubuntu-latest steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: ${{ matrix.python-version }} - name: Build run: | cd gen/pb-python python -m venv env && source env/bin/activate python -m pip install --upgrade pip python -m pip install .[dev] python -m build protobuf-specs-0.4.1/.github/workflows/python-release.yml000066400000000000000000000014171477352757300235750ustar00rootroot00000000000000on: push: tags: - 'release/python/v*' permissions: {} name: release Python package jobs: pypi: name: upload release to PyPI runs-on: ubuntu-latest permissions: contents: read id-token: write # required for trusted publishing to PyPI steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - uses: actions/setup-python@8d9ed9ac5c53483de85588cdf95a591a75ab9f55 # v5.5.0 with: python-version: "3.x" - name: build run: | cd gen/pb-python/ make package - name: publish uses: pypa/gh-action-pypi-publish@76f52bc884231f62b9a034ebfe128415bbaabdfc # v1.12.4 with: packages-dir: gen/pb-python/dist/ protobuf-specs-0.4.1/.github/workflows/ruby-build.yml000066400000000000000000000031161477352757300227120ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: Check Ruby build permissions: contents: read on: push: paths: - '**.proto' - 'gen/pb-ruby/**' - 'protoc-builder/Dockerfile.ruby' - 'protoc-builder/versions.mk' pull_request: paths: - '**.proto' - 'gen/pb-ruby/**' - 'protoc-builder/Dockerfile.ruby' - 'protoc-builder/versions.mk' jobs: build: strategy: matrix: ruby-version: - '2.7' - '3.0' - '3.1' - '3.2' fail-fast: false runs-on: ubuntu-latest defaults: run: working-directory: gen/pb-ruby steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Set up Ruby ${{ matrix.ruby-version }} uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252 # v1.229.0 with: ruby-version: ${{ matrix.ruby-version }} - name: Build run: | gem build sigstore_protobuf_specs.gemspec protobuf-specs-0.4.1/.github/workflows/ruby-release.yml000066400000000000000000000041271477352757300232360ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. on: push: tags: - 'release/ruby/v*' permissions: {} name: release Ruby Gem jobs: publish: name: Publish to RubyGems runs-on: ubuntu-latest permissions: contents: read defaults: run: working-directory: gen/pb-ruby steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - uses: ruby/setup-ruby@354a1ad156761f5ee2b7b13fa8e09943a5e8d252 # v1.229.0 with: ruby-version: '3.2' - name: Authenticate to Google Cloud uses: google-github-actions/auth@71f986410dfbc7added4569d411d040a91dc6935 # v2.1.8 with: workload_identity_provider: projects/306323169285/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider service_account: protobuf-specs-releaser@sigstore-secrets.iam.gserviceaccount.com - uses: google-github-actions/get-secretmanager-secrets@a8440875e1c2892062aef9061228d4f1af8f919b # v2.2.3 id: secrets with: secrets: |- rubygems_auth_token:sigstore-secrets/protobuf-specs-rubygems-auth-token - name: Build run: | gem build sigstore_protobuf_specs.gemspec - name: Publish run: | mkdir -p $HOME/.gem printf -- "---\n:rubygems_api_key: ${RUBYGEMS_AUTH_TOKEN}\n" > $HOME/.gem/credentials chmod 0600 $HOME/.gem/credentials gem push *.gem env: RUBYGEMS_AUTH_TOKEN: "${{ steps.secrets.outputs.rubygems_auth_token }}" protobuf-specs-0.4.1/.github/workflows/rust-build.yml000066400000000000000000000035211477352757300227260ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: Check Rust build permissions: contents: read on: push: paths: - "**.proto" - "gen/pb-rust/**" - "protoc-builder/Dockerfile.rust" - "protoc-builder/versions.mk" pull_request: paths: - "**.proto" - "gen/pb-rust/**" - "protoc-builder/Dockerfile.rust" - "protoc-builder/versions.mk" env: CARGO_TERM_COLOR: always jobs: build: runs-on: ubuntu-latest strategy: matrix: toolchain: - stable - beta - nightly steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} - run: | make rust - run: | RUST_ACTION="publish -p sigstore_protobuf_specs --dry-run" make rust test: runs-on: ubuntu-latest strategy: matrix: toolchain: - stable steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - run: rustup update ${{ matrix.toolchain }} && rustup default ${{ matrix.toolchain }} - run: | make rust RUST_ACTION=test protobuf-specs-0.4.1/.github/workflows/rust-release.yml000066400000000000000000000032741477352757300232540ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. on: push: tags: - 'release/rust/v*' permissions: {} name: release Rust crate jobs: publish: name: Publish to crates.io runs-on: ubuntu-latest permissions: contents: read steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Authenticate to Google Cloud uses: google-github-actions/auth@71f986410dfbc7added4569d411d040a91dc6935 # v2.1.8 with: workload_identity_provider: projects/306323169285/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider service_account: protobuf-specs-releaser@sigstore-secrets.iam.gserviceaccount.com - uses: google-github-actions/get-secretmanager-secrets@a8440875e1c2892062aef9061228d4f1af8f919b # v2.2.3 id: secrets with: secrets: |- cargo_registry_token:sigstore-secrets/protobuf-specs-cargo-registry-token - run: RUST_ACTION='publish -p sigstore_protobuf_specs' make rust env: CARGO_REGISTRY_TOKEN: "${{ steps.secrets.outputs.cargo_registry_token }}" protobuf-specs-0.4.1/.github/workflows/typescript-build.yml000066400000000000000000000034411477352757300241400ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. name: Check Typescript build permissions: contents: read on: push: paths: - '**.proto' - 'gen/pb-typescript/**' - 'protoc-builder/Dockerfile.typescript' - 'protoc-builder/hack/package*.json' - 'protoc-builder/versions.mk' pull_request: paths: - '**.proto' - 'gen/pb-typescript/**' - 'protoc-builder/Dockerfile.typescript' - 'protoc-builder/hack/package*.json' - 'protoc-builder/versions.mk' jobs: build: strategy: matrix: node-version: - 18.17.0 - 18.x - 20.5.0 - 20.x fail-fast: false runs-on: ubuntu-latest defaults: run: working-directory: gen/pb-typescript steps: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Setup node ${{ matrix.node-version }} uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0 with: node-version: ${{ matrix.node-version }} cache: npm cache-dependency-path: gen/pb-typescript/package-lock.json - name: Build run: | npm ci npm run build protobuf-specs-0.4.1/.github/workflows/typescript-publish.yml000066400000000000000000000041241477352757300245060ustar00rootroot00000000000000# # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. on: push: tags: - 'release/typescript/v*' name: Release TypeScript package permissions: {} jobs: publish: name: Publish package to npmjs runs-on: ubuntu-latest permissions: contents: read id-token: write defaults: run: working-directory: gen/pb-typescript steps: - name: Checkout source uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: persist-credentials: false - name: Setup node uses: actions/setup-node@cdca7365b2dadb8aad0a33bc7601856ffabcc48e # v4.3.0 with: node-version: 18 registry-url: 'https://registry.npmjs.org' - name: Authenticate to Google Cloud uses: google-github-actions/auth@71f986410dfbc7added4569d411d040a91dc6935 # v2.1.8 with: workload_identity_provider: projects/306323169285/locations/global/workloadIdentityPools/github-actions-pool/providers/github-actions-provider service_account: protobuf-specs-releaser@sigstore-secrets.iam.gserviceaccount.com - uses: google-github-actions/get-secretmanager-secrets@a8440875e1c2892062aef9061228d4f1af8f919b # v2.2.3 id: secrets with: secrets: |- npm_publish_token:sigstore-secrets/protobuf-specs-npm-publish-token - name: Build package run: | npm ci npm run build - name: Publish package run: | npm publish --provenance --access public env: NODE_AUTH_TOKEN: ${{ steps.secrets.outputs.npm_publish_token }} protobuf-specs-0.4.1/.gitignore000066400000000000000000000000031477352757300164740ustar00rootroot00000000000000*~ protobuf-specs-0.4.1/CHANGELOG.md000066400000000000000000000160551477352757300163330ustar00rootroot00000000000000# Changelog All notable changes to `protobuf-specs` will be documented in this file. The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). All versions prior to 0.2.0 are untracked. ## [Unreleased] ### Added ### Changed ## 0.4.0 * Announced deprecation of JSONSchema outputs from this project ([#493](https://github.com/sigstore/protobuf-specs/pull/493) ### Fixed * Fix toolchain to write generated code as the user running the build instead of root ([#473](https://github.com/sigstore/protobuf-specs/pull/473) ### Changed * Recreated toolchain for code generation instead of depending on third-party container image ([#469](https://github.com/sigstore/protobuf-specs/pull/469) ([#475](https://github.com/sigstore/protobuf-specs/pull/475) * Updated code generation tools for Go library to latest stable releases ([#476](https://github.com/sigstore/protobuf-specs/pull/476) * Updated code generation tools for JSONSchema files to latest stable releases ([#478](https://github.com/sigstore/protobuf-specs/pull/478) * Updated code generation tools for Python library to latest stable releases ([#479](https://github.com/sigstore/protobuf-specs/pull/479) * Updated code generation tools for Ruby library to latest stable releases ([#481](https://github.com/sigstore/protobuf-specs/pull/481) * Updated code generation tools for Rust library to latest stable releases ([#486](https://github.com/sigstore/protobuf-specs/pull/486) * Updated code generation tools for Typescript library to latest stable releases ([#488](https://github.com/sigstore/protobuf-specs/pull/488) ## 0.3.3 * Allowed specifying artifact digest for verification ([#406](https://github.com/sigstore/protobuf-specs/pull/406) * Added version to `SigningConfig` message ([#383](https://github.com/sigstore/protobuf-specs/pull/383) ### Changed * Docs: Clarify that integration time is only trustworthy with a Signed Entry Timestamp ([#442](https://github.com/sigstore/protobuf-specs/pull/442) * Docs: Clarify inclusion promise requirement ([#380](https://github.com/sigstore/protobuf-specs/pull/380) * Docs: Clarify that artifact digest verification should not be used with in-toto attestations ([#461](https://github.com/sigstore/protobuf-specs/pull/461) ## 0.3.2 * Added `TransparencyLogInstance.checkpoint_key_id` as an optional key identifier for logs that generate checkpoints ([#284](https://github.com/sigstore/protobuf-specs/pull/284)) ### Changed * Docs: Clarified DSSE envelope signature cardinality ([#318](https://github.com/sigstore/protobuf-specs/pull/318)) * Docs: Clarifier behavior of key identifiers ([#284](https://github.com/sigstore/protobuf-specs/pull/284)) ## 0.3.1 * Added client configuration message for signing ([#277](https://github.com/sigstore/protobuf-specs/pull/277)) * Added a new format for the media type that is compatible with OCI registries ([#279](https://github.com/sigstore/protobuf-specs/pull/279)) * Added events.proto for Ruby package ([#264](https://github.com/sigstore/protobuf-specs/pull/264)) * Targeted Node16 for Typescript package ([#230](https://github.com/sigstore/protobuf-specs/pull/230)) ### Changed * Docs: Removed timestamp from checkpoint ([#247](https://github.com/sigstore/protobuf-specs/pull/247)) * Remove EXPERIMENTAL prefix from LMS schemes ([#214](https://github.com/sigstore/protobuf-specs/pull/214)) ### Fixed * Docs: Clarified trust anchor in chain ([#245]([https://github.com/sigstore/protobuf-specs/pull/210](https://github.com/sigstore/protobuf-specs/pull/245)) ## 0.3.0 * Options for more generic observer time ([#179](https://github.com/sigstore/protobuf-specs/pull/179)) * **BREAKING**: `VerificationMaterials.contents` now has an additional `certificate` variant, which is preferred in `0.3` bundles with the Sigstore PGI ([#191](https://github.com/sigstore/protobuf-specs/pull/191)) * Added algorithm registry documentation and updated `PublicKeyDetails` message ([#194](https://github.com/sigstore/protobuf-specs/pull/194), [#212](https://github.com/sigstore/protobuf-specs/pull/212)) * Deterministic ECDSA is **deprecated** * NIST-P384 and NIST-P521 curves **added** * Existing (and underspecified) RSA key types are **deprecated**. New RSA keytypes are defined that specifies size of public modulus and hash algorithm. RSA now only supports [PKCS#1](https://datatracker.ietf.org/doc/html/rfc8017#section-8.2) signature scheme, and PKIX ([SubjectPublicKeyInfo](https://datatracker.ietf.org/doc/html/rfc5280#section-4.1)) encoding. * Experimental support for [LMS](https://datatracker.ietf.org/doc/html/rfc8554) key types. ### Changed * Deprecated support for detached SCTs ([#188](https://github.com/sigstore/protobuf-specs/pull/188)) ### Fixed * Docs: Clarified rotation of verification materials in the trust root ([#210](https://github.com/sigstore/protobuf-specs/pull/210) ## 0.2.1 ### Added * CloudEvents proto for Rekor pub/sub messages ([#86](https://github.com/sigstore/protobuf-specs/pull/86)) * Generate jsonschema ([#112](https://github.com/sigstore/protobuf-specs/pull/112)) * Rust bindings for jsonschema ([#118](https://github.com/sigstore/protobuf-specs/pull/118)) * Dependabot to update dependencies ([#99](https://github.com/sigstore/protobuf-specs/pull/99)) ### Changed There were no changes in this release. ### Fixed * Docs: Fixed spelling error ([#97](https://github.com/sigstore/protobuf-specs/pull/97)) * Docs: Clarified log index vs global log index ([#101](https://github.com/sigstore/protobuf-specs/pull/101)) * Docs: Clarified purpose of SET as a signed timestamp ([#100](https://github.com/sigstore/protobuf-specs/pull/100)) * Docs: Clarify message digest purpose ([#114](https://github.com/sigstore/protobuf-specs/pull/114)) ### Removed There were no removals in this release. ## 0.2.0 ### Added * Rust bindings have been added ([#88](https://github.com/sigstore/protobuf-specs/pull/88)) ### Changed * `TransparencyLogEntry.inclusion_proof` is now marked as required (was previously optional), while `TransparencyLogEntry.inclusion_promise` is now marked as optional (was previously required) ([#84](https://github.com/sigstore/protobuf-specs/pull/84)) * More Rekor messages and message fields have been marked as required ([#79](https://github.com/sigstore/protobuf-specs/pull/79)) * Ruby bindings: class names have been updated and now live in the `Sigstore::` namespace ([#87](https://github.com/sigstore/protobuf-specs/pull/87)) ### Fixed * Docs: Clarify that `TransparencyLogEntry.canonicalized_body` is optional ([#74](https://github.com/sigstore/protobuf-specs/pull/74)) * Docs: Clarify that key IDs are digests over SPKI encodings ([#73](https://github.com/sigstore/protobuf-specs/pull/73)) * Docs: Clarify that bundled certificate chains must not contain root or intermediate certificates that should be trused out-of-band ([#77](https://github.com/sigstore/protobuf-specs/pull/77)) * Docs: Clarify `TimeRange` validity periods ([#78](https://github.com/sigstore/protobuf-specs/pull/78)) ### Removed There were no removals in this release. protobuf-specs-0.4.1/CODE_OF_CONDUCT.md000066400000000000000000000062161477352757300173170ustar00rootroot00000000000000# Code of Conduct ## Our Pledge In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation. ## Our Standards Examples of behavior that contributes to creating a positive environment include: * Using welcoming and inclusive language * Being respectful of differing viewpoints and experiences * Gracefully accepting constructive criticism * Focusing on what is best for the community * Showing empathy towards other community members Examples of unacceptable behavior by participants include: * The use of sexualized language or imagery and unwelcome sexual attention or advances * Trolling, insulting/derogatory comments, and personal or political attacks * Public or private harassment * Publishing others' private information, such as a physical or electronic address, without explicit permission * Other conduct which could reasonably be considered inappropriate in a professional setting ## Our Responsibilities Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior. Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful. ## Scope This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at . All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership. ## Attribution This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version] [homepage]: http://contributor-covenant.org [version]: http://contributor-covenant.org/version/1/4/protobuf-specs-0.4.1/COPYRIGHT.txt000066400000000000000000000010631477352757300166240ustar00rootroot00000000000000 Copyright 2022 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. protobuf-specs-0.4.1/LICENSE000066400000000000000000000261361477352757300155300ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. protobuf-specs-0.4.1/Makefile000066400000000000000000000163631477352757300161640ustar00rootroot00000000000000# # Copyright 2022 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. PROTOC_IMAGE = protoc-base PROTOC_GO_IMAGE = protoc-go PROTOC_JSONSCHEMA_IMAGE = protoc-jsonschema PROTOC_PYTHON_IMAGE = protoc-python PROTOC_RUBY_IMAGE = protoc-ruby PROTOC_RUST_IMAGE = protoc-rust PROTOC_TYPESCRIPT_IMAGE = protoc-typescript RUST_ACTION ?= run -p sigstore-protobuf-specs-codegen PLATFORM ?= linux/amd64 UID ?= $(shell id -u) GID ?= $(shell id -g) DOCKER_BUILD = docker build --platform ${PLATFORM} --build-arg UID=${UID} DOCKER_RUN = docker run --platform ${PLATFORM} --user ${UID}:${GID} PROTOS = $(shell find protos/ -iname "*.proto" | sed 's|^|/defs/|') include protoc-builder/versions.mk # generate all language protobuf code all: go python typescript ruby jsonschema rust # generate Go protobuf code go: base-image-go @echo "Generating go proto Docker image" cd protoc-builder && ${DOCKER_BUILD} -t ${PROTOC_GO_IMAGE} -f Dockerfile.go . @echo "Generating go protobuf files" ${DOCKER_RUN} -v ${PWD}:/defs ${PROTOC_GO_IMAGE} \ -I/opt/include -I/googleapis -I/defs/protos \ --go_opt=module=github.com/sigstore/protobuf-specs/gen/pb-go --go_out=/defs/gen/pb-go ${PROTOS} python: base-image-python @echo "Generating python proto Docker image" cd protoc-builder && ${DOCKER_BUILD} -t ${PROTOC_PYTHON_IMAGE} -f Dockerfile.python . @echo "Generating python protobuf files" ${DOCKER_RUN} -v ${PWD}:/defs ${PROTOC_PYTHON_IMAGE} \ -I/opt/include -I/googleapis -I/defs/protos \ --python_betterproto_opt=pydantic_dataclasses --python_betterproto_out=/defs/gen/pb-python/sigstore_protobuf_specs ${PROTOS} typescript: base-image-typescript @echo "Generating typescript proto Docker image" cd protoc-builder && ${DOCKER_BUILD} -t ${PROTOC_TYPESCRIPT_IMAGE} -f Dockerfile.typescript . @echo "Generating javascript protobuf files" ${DOCKER_RUN} -v ${PWD}:/defs ${PROTOC_TYPESCRIPT_IMAGE} \ -I/opt/include -I/googleapis -I/defs/protos \ --ts_proto_out=/defs/gen/pb-typescript/src/__generated__ --ts_proto_opt=oneof=unions,forceLong=string,env=node,exportCommonSymbols=false,outputPartialMethods=false,outputEncodeMethods=false,unrecognizedEnum=false ${PROTOS} ruby: base-image-ruby @echo "Generating ruby proto Docker image" cd protoc-builder && ${DOCKER_BUILD} -t ${PROTOC_RUBY_IMAGE} -f Dockerfile.ruby . @echo "Generating ruby protobuf files" ${DOCKER_RUN} -v ${PWD}:/defs ${PROTOC_RUBY_IMAGE} \ -I/opt/include -I/googleapis -I/defs/protos --ruby_out=/defs/gen/pb-ruby/lib ${PROTOS} jsonschema: base-image-jsonschema @echo "Generating jsonschema proto Docker image" cd protoc-builder && ${DOCKER_BUILD} -t ${PROTOC_JSONSCHEMA_IMAGE} -f Dockerfile.jsonschema . @echo "Generating JSON schema files" mkdir -p gen/jsonschema/schemas ${DOCKER_RUN} -v ${PWD}:/defs ${PROTOC_JSONSCHEMA_IMAGE} \ -I/opt/include -I/googleapis -I/defs/protos \ --jsonschema_out=/defs/gen/jsonschema/schemas --jsonschema_opt=disallow_additional_properties --jsonschema_opt=enforce_oneof --jsonschema_opt=enums_as_strings_only --jsonschema_opt=file_extension=schema.json --jsonschema_opt=json_fieldnames ${PROTOS} rust: base-image-rust @echo "Generating rust proto Docker image" cd protoc-builder && ${DOCKER_BUILD} -t ${PROTOC_RUST_IMAGE} -f Dockerfile.rust . ${DOCKER_RUN} -v ${PWD}:/defs \ -e "RUST_BACKTRACE=1" -e "CARGO_REGISTRY_TOKEN" ${PROTOC_RUST_IMAGE} \ -c "cd /defs/gen/pb-rust && cargo ${RUST_ACTION}" .PHONY: base-image-go base-image-go: @echo "Building base docker image for Go" cd protoc-builder && ${DOCKER_BUILD} ${DOCKER_CACHE} -t ${PROTOC_IMAGE}:go -f Dockerfile.protoc \ --build-arg PROTOC_VERSION=${GO_PROTOC_VERSION} \ --build-arg PROTOC_CHECKSUM=${GO_PROTOC_CHECKSUM} \ --build-arg GOOGLEAPIS_COMMIT=${GO_GOOGLEAPIS_COMMIT} . .PHONY: base-image-jsonschema base-image-jsonschema: @echo "Building base docker image for jsonschema" cd protoc-builder && ${DOCKER_BUILD} ${DOCKER_CACHE} -t ${PROTOC_IMAGE}:jsonschema -f Dockerfile.protoc \ --build-arg PROTOC_VERSION=${JSONSCHEMA_PROTOC_VERSION} \ --build-arg PROTOC_CHECKSUM=${JSONSCHEMA_PROTOC_CHECKSUM} \ --build-arg GOOGLEAPIS_COMMIT=${JSONSCHEMA_GOOGLEAPIS_COMMIT} . .PHONY: base-image-python base-image-python: @echo "Building base docker image for Python" cd protoc-builder && ${DOCKER_BUILD} ${DOCKER_CACHE} -t ${PROTOC_IMAGE}:python -f Dockerfile.protoc \ --build-arg PROTOC_VERSION=${PYTHON_PROTOC_VERSION} \ --build-arg PROTOC_CHECKSUM=${PYTHON_PROTOC_CHECKSUM} \ --build-arg GOOGLEAPIS_COMMIT=${PYTHON_GOOGLEAPIS_COMMIT} . .PHONY: base-image-ruby base-image-ruby: @echo "Building base docker image for Ruby" cd protoc-builder && ${DOCKER_BUILD} ${DOCKER_CACHE} -t ${PROTOC_IMAGE}:ruby -f Dockerfile.protoc \ --build-arg PROTOC_VERSION=${RUBY_PROTOC_VERSION} \ --build-arg PROTOC_CHECKSUM=${RUBY_PROTOC_CHECKSUM} \ --build-arg GOOGLEAPIS_COMMIT=${RUBY_GOOGLEAPIS_COMMIT} . .PHONY: base-image-rust base-image-rust: @echo "Building base docker image for Rust" cd protoc-builder && ${DOCKER_BUILD} ${DOCKER_CACHE} -t ${PROTOC_IMAGE}:rust -f Dockerfile.protoc \ --build-arg PROTOC_VERSION=${RUST_PROTOC_VERSION} \ --build-arg PROTOC_CHECKSUM=${RUST_PROTOC_CHECKSUM} \ --build-arg GOOGLEAPIS_COMMIT=${RUST_GOOGLEAPIS_COMMIT} . .PHONY: base-image-typescript base-image-typescript: @echo "Building base docker image for Typescript" cd protoc-builder && ${DOCKER_BUILD} ${DOCKER_CACHE} -t ${PROTOC_IMAGE}:typescript -f Dockerfile.protoc \ --build-arg PROTOC_VERSION=${TYPESCRIPT_PROTOC_VERSION} \ --build-arg PROTOC_CHECKSUM=${TYPESCRIPT_PROTOC_CHECKSUM} \ --build-arg GOOGLEAPIS_COMMIT=${TYPESCRIPT_GOOGLEAPIS_COMMIT} . # to recover from a situation where a stale layer exist, just purging the # docker image via `make clean` is not enough. Re-building without layer # cache is the only solution. .PHONY: base-image-no-cache base-image-no-cache: @echo "Building development docker images with disabled cache" @DOCKER_CACHE="--no-cache" make base-image-go @DOCKER_CACHE="--no-cache" make base-image-jsonschema @DOCKER_CACHE="--no-cache" make base-image-python @DOCKER_CACHE="--no-cache" make base-image-ruby @DOCKER_CACHE="--no-cache" make base-image-rust @DOCKER_CACHE="--no-cache" make base-image-typescript # clean up generated files (not working? try sudo make clean) clean: rm -rf gen/pb-go/* \ gen/pb-typescript/src/__generated__/* \ gen/pb-python/sigstore_protobuf_specs/dev \ gen/pb-python/sigstore_protobuf_specs/io \ gen/pb-rust/target \ gen/jsonschema/schemas docker rmi -f ${PROTOC_IMAGE}:go ${PROTOC_GO_IMAGE} \ ${PROTOC_IMAGE}:jsonschema ${PROTOC_JSONSCHEMA_IMAGE} \ ${PROTOC_IMAGE}:python ${PROTOC_PYTHON_IMAGE} \ ${PROTOC_IMAGE}:ruby ${PROTOC_RUBY_IMAGE} \ ${PROTOC_IMAGE}:rust ${PROTOC_RUST_IMAGE} \ ${PROTOC_IMAGE}:typescript ${PROTOC_TYPESCRIPT_IMAGE} protobuf-specs-0.4.1/README.md000066400000000000000000000013461477352757300157760ustar00rootroot00000000000000# protobuf-specs This repository holds protobuf specifications for Sigstore messages. ## Protobuf If you change protobuf definitions, you will need to regenerate the code by running the protocol buffer compiler on the changed `.proto` files. You will need [Docker](https://docs.docker.com/get-docker/) installed to generate the protobuf stubs. Then run, ``` $ make all ``` to generate the Go and Python files under `gen/`. ## Deprecation Notice - Effective January 17th, 2025: the jsonschema generated files in gen/jsonschema/schemas/ are formally deprecated. They will be removed in 6 months from this repository and not built repeatedly going forward. If you are using them, please open an issue on this repository and let us know. protobuf-specs-0.4.1/RELEASE.md000066400000000000000000000112121477352757300161120ustar00rootroot00000000000000# Release management for protocol buffer specifications This repository primary provides two features: * Protobuf specifications for messages used within Sigstore. * Language bindings for different ecosystems. During a release, a few steps have to be synchronized to release the messages and the language clients. ## Notes on semantic versioning General information on evolving protocol buffers is described [here](https://developers.google.com/protocol-buffers/docs/proto3#updating). ### Major version change As expected this indicate a breaking change. Any major update MUST update the package name of the generated code. Examples of breaking changes are (non-complete list): * Deletion or rename of a field. * Changing the type of a field. * Altering the field number (**NEVER DO THIS!**). ### Minor version change An update which does not break the functionality of existing (older) clients. For more information on forward compatible changes in protobuf see the [Language Guide](https://developers.google.com/protocol-buffers/docs/proto3#updating). ### Patch version change Any update which does not change the behaviour. For the protocol buffer messages this is limited to _only_ capture changes in the comments, not the messages themselves. For language bindings patch versions MAY be used for bug-fixes. ## Releasing new versions of the messages Checklist prior to releasing: 1. Gather consensus among the community and maintainers of this repository that the messages are ready to be released. Create an issue to inform the community. The issue should describe the intended release, and any changes it introduces. The issue must be open for comments *at least* for a complete week (7 days). 1. Decide the new version of this release. The releases are versioned via [semver](https://semver.org/). 1. Two of the messages, [Bundle](https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_bundle.proto) and [TrustedRoot](https://github.com/sigstore/protobuf-specs/blob/main/protos/sigstore_trustroot.proto), are expected to be persisted and serialized to disk, and exchanged via other mechanisms such as the [Sigstore TUF root](https://github.com/sigstore/root-signing). Therefore they contain a `media_type`. The media types are versioned, and so they must be updated appropriately according to semver. Each message SHOULD be versioned independently and so MAY differ from the targeted release. The media type represents the version of the message, not the release. Note that the media type does NOT capture the patch version, only major/minor. 1. Update [pyproject.toml](gen/pb-python/pyproject.toml) so the `version` matches the targeted release. 1. Update [package.json](gen/pb-typescript/package.json) so the `version` matches the targeted release. 1. Update [version.rb](gen/pb-ruby/lib/sigstore_protobuf_specs/version.rb) so the `version` matches the targeted release. 1. Update [Cargo.toml](gen/pb-rust/Cargo.toml) so the `version` matches the targeted release. 1. Update the [CHANGELOG](https://github.com/sigstore/protobuf-specs/blob/main/CHANGELOG.md). When all of the above are set, prepare for release by creating a tag with the following pattern: `vX.Y.Z` and push to the repository. Bonus point if the tag is signed :champagne:. ## Releasing new language bindings ### Go Prepare a tag with the pattern `vX.Y.Z` and push it. No workflow is needed. **WARNING**: Tags should not be updated to a new ref or deleted/recreated after creation. Go provides a checksum database that persists an immutable mapping between version and ref, and updating the tag will break clients that have already downloaded the release. ### Java Prepare a tag with the following pattern `release/java/vX.Y.Z` and push it. The [workflow](.github/workflows/java-build-for-release.yml) will automatically start. After the job is finished, complete the release following [java release instructions](https://github.com/sigstore/protobuf-specs/blob/main/java/README.md#releasing). ### Python Prepare a tag with the following pattern `release/python/vX.Y.Z` and push it. The [workflow](.github/workflows/python-release.yml) will automatically start. ### Ruby Prepare a tag with the following pattern `release/ruby/vX.Y.Z` and push it. The [workflow](.github/workflows/ruby-release.yml) will automatically start. ### Rust Prepare a tag with the following pattern `release/rust/vX.Y.Z` and push it. The [workflow](.github/workflows/rust-release.yml) will automatically start. ### TypeScript Prepare a tag with the following pattern `release/typescript/vX.Y.Z` and push it. The [workflow](.github/workflows/typescript-release.yml) will automatically start. protobuf-specs-0.4.1/docs/000077500000000000000000000000001477352757300154435ustar00rootroot00000000000000protobuf-specs-0.4.1/docs/algorithm-registry.md000066400000000000000000000076351477352757300216340ustar00rootroot00000000000000# Algorithm Registry This file is designed to act as a source of truth regarding what signing algorithms are recommended across the Sigstore ecosystem. Any changes to this file **must** be reflected in the `PublicKeyDetails` enumeration in [sigstore_common.proto](../protos/sigstore_common.proto). Note that Sigstore clients and services aren't required support all algorithms in this registry and may support algorithms that aren't in the registry. The algorithm registry is more of a guideline than a rule and is meant to serve as a secure set of defaults that the community can follow. Refer to the [Sigstore: Configurable Crypto Algorithms](https://docs.google.com/document/d/18vTKFvTQdRt3OGz6Qd1xf04o-hugRYSup-1EAOWn7MQ/) specification for the design rationale for this registry. ## Signature Algorithms | Algorithm | Name | Usage | Notes | |-----------|----------------------------|-------------| -------------------------------------------------------------------------------- | | RSA | rsa-sign-pkcs1-2048-sha256 | verify only | Not recommended. | | | rsa-sign-pkcs1-3072-sha256 | sign/verify | | | | rsa-sign-pkcs1-4096-sha256 | sign/verify | | | | rsa-sign-pss-2048-sha256 | verify only | Not recommended. | | | rsa-sign-pss-3072-sha256 | sign/verify | | | | rsa-sign-pss-4096-sha256 | sign/verify | | | ECDSA | ecdsa-sha2-256-nistp256 | sign/verify | | | | ecdsa-sha2-384-nistp384 | sign/verify | | | | ecdsa-sha2-512-nistp521 | sign/verify | | | EdDSA | ed25519 | sign/verify | | | | ed25519-ph | sign/verify | Recommended only for `hashedrekord`. | | LMS | lms-sha256 | sign/verify | Stateful; signer selects the `H` parameter. Not recommended for keyless signing. | | LM-OTS | lmots-sha256 | sign/verify | One-time use only; signer selects `n` and `w`. | ### Parameter configuration for LMS and LM-OTS LMS and LM-OTS are both hash-based signature schemes. Both require the signing party to make parameter choices during key generation. In both cases, the selected parameters are encoded in the public key representation. See [RFC 8554 S5.3](https://www.rfc-editor.org/rfc/rfc8554.html#section-5.3) for LMS and [RFC 8554 S4.3](https://www.rfc-editor.org/rfc/rfc8554.html#section-4.3) for LM-OTS public key formats. Additionally, see [RFC 8708 S4](https://www.rfc-editor.org/rfc/rfc8708.html) for `SubjectPublicKeyInfo` and `AlgorithmIdentifier` encodings for both LMS and LM-OTS public keys. ## Hash Algorithms Generally speaking, these hash algorithms are implied by the above signing suites. However, clients *may* need to list or configure them explicitly, e.g. for custom signing schemes or as part of a `hashedrekord` entry. | Algorithm | Name | |-----------|--------------| | SHA2 | sha2-256 | | | sha2-384 | | | sha2-512 | | SHA3 | sha3-256 | | | sha3-384 | protobuf-specs-0.4.1/gen/000077500000000000000000000000001477352757300152645ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/jsonschema/000077500000000000000000000000001477352757300174165ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/jsonschema/schemas/000077500000000000000000000000001477352757300210415ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/jsonschema/schemas/Artifact.schema.json000066400000000000000000000062311477352757300247320ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Artifact", "definitions": { "Artifact": { "properties": { "artifactUri": { "type": "string", "description": "Location of the artifact" }, "artifact": { "type": "string", "description": "The raw bytes of the artifact", "format": "binary", "binaryEncoding": "base64" }, "artifactDigest": { "$ref": "#/definitions/dev.sigstore.common.v1.HashOutput", "additionalProperties": false, "description": "Digest of the artifact. SHOULD NOT be used when verifying an in-toto attestation as the subject digest cannot be reconstructed. This option will not work with Ed25519 signatures, use Ed25519Ph or another algorithm instead." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "artifact_uri" ] }, { "required": [ "artifact" ] }, { "required": [ "artifact_digest" ] } ], "title": "Artifact" }, "dev.sigstore.common.v1.HashOutput": { "properties": { "algorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "digest": { "type": "string", "description": "This is the raw octets of the message digest as computed by the hash algorithm.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Hash Output", "description": "HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/ArtifactVerificationOptions.schema.json000066400000000000000000000376151477352757300306630ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/ArtifactVerificationOptions", "definitions": { "ArtifactVerificationOptions": { "properties": { "certificateIdentities": { "$ref": "#/definitions/dev.sigstore.verification.v1.CertificateIdentities", "additionalProperties": false }, "publicKeys": { "$ref": "#/definitions/dev.sigstore.verification.v1.PublicKeyIdentities", "additionalProperties": false, "description": "To simplify verification implementation, the logic for bundle verification should be implemented as a higher-order function, where one of argument should be an interface over the set of trusted public keys, like this: `Verify(bytes artifact, bytes signature, string key_id)`. This way the caller is in full control of mapping the identified (or hinted) key in the bundle to one of the trusted keys, as this process is inherently application specific." }, "tlogOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions", "additionalProperties": false, "description": "Optional options for artifact transparency log verification. If none is provided, the default verification options are: Threshold: 1 Online verification: false Disable: false" }, "ctlogOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions", "additionalProperties": false, "description": "Optional options for certificate transparency log verification. If none is provided, the default verification options are: Threshold: 1 Disable: false" }, "tsaOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions", "additionalProperties": false, "description": "Optional options for certificate signed timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true" }, "integratedTsOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions", "additionalProperties": false, "description": "Optional options for integrated timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true" }, "observerOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions", "additionalProperties": false, "description": "Optional options for observed timestamp verification. If none is provided, the default verification options are: Threshold 1 Disable: false" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "certificate_identities" ] }, { "required": [ "public_keys" ] }, { "required": [ "tlog_options" ] }, { "required": [ "ctlog_options" ] }, { "required": [ "tsa_options" ] }, { "required": [ "integrated_ts_options" ] }, { "required": [ "observer_options" ] } ], "title": "Artifact Verification Options", "description": "A light-weight set of options/policies for identifying trusted signers, used during verification of a single artifact." }, "dev.sigstore.common.v1.ObjectIdentifier": { "properties": { "id": { "items": { "type": "integer" }, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier", "description": "An ASN.1 OBJECT IDENTIFIER" }, "dev.sigstore.common.v1.ObjectIdentifierValuePair": { "properties": { "oid": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifier", "additionalProperties": false }, "value": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier Value Pair", "description": "An OID and the corresponding (byte) value." }, "dev.sigstore.common.v1.PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.SubjectAlternativeName": { "properties": { "type": { "enum": [ "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", "EMAIL", "URI", "OTHER_NAME" ], "type": "string", "title": "Subject Alternative Name Type" }, "regexp": { "type": "string", "description": "A regular expression describing the expected value for the SAN." }, "value": { "type": "string", "description": "The exact value to match against." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "regexp" ] }, { "required": [ "value" ] } ], "title": "Subject Alternative Name" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of ct transparency logs the certificate must appear on." }, "disable": { "type": "boolean", "description": "Disable ct transparency log verification" } }, "additionalProperties": false, "type": "object", "title": "Ctlog Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of external observers of the timestamp. This is a union of RFC3161 signed timestamps, and integrated timestamps from a transparency log, that could include additional timestamp sources in the future." }, "disable": { "type": "boolean", "description": "Disable observer timestamp verification." } }, "additionalProperties": false, "type": "object", "title": "Observer Timestamp Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of signed timestamps that are expected." }, "disable": { "type": "boolean", "description": "Disable signed timestamp verification." } }, "additionalProperties": false, "type": "object", "title": "Timestamp Authority Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of integrated timestamps that are expected." }, "disable": { "type": "boolean", "description": "Disable integrated timestamp verification." } }, "additionalProperties": false, "type": "object", "title": "Tlog Integrated Timestamp Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions": { "properties": { "threshold": { "type": "integer", "description": "Number of transparency logs the entry must appear on." }, "performOnlineVerification": { "type": "boolean", "description": "Perform an online inclusion proof." }, "disable": { "type": "boolean", "description": "Disable verification for transparency logs." } }, "additionalProperties": false, "type": "object", "title": "Tlog Options" }, "dev.sigstore.verification.v1.CertificateIdentities": { "properties": { "identities": { "items": { "$ref": "#/definitions/dev.sigstore.verification.v1.CertificateIdentity" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Certificate Identities" }, "dev.sigstore.verification.v1.CertificateIdentity": { "properties": { "issuer": { "type": "string", "description": "The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1)" }, "san": { "$ref": "#/definitions/dev.sigstore.common.v1.SubjectAlternativeName", "additionalProperties": false }, "oids": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifierValuePair" }, "additionalProperties": false, "type": "array", "description": "An unordered list of OIDs that must be verified. All OID/values provided in this list MUST exactly match against the values in the certificate for verification to be successful." } }, "additionalProperties": false, "type": "object", "title": "Certificate Identity", "description": "The identity of a X.509 Certificate signer." }, "dev.sigstore.verification.v1.PublicKeyIdentities": { "properties": { "publicKeys": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identities" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/Bundle.schema.json000066400000000000000000000606021477352757300244100ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Bundle", "definitions": { "Bundle": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.bundle.v0.3+json when when encoded as JSON. Clients must to be able to accept media type using the previously defined formats: * application/vnd.dev.sigstore.bundle+json;version=0.1 * application/vnd.dev.sigstore.bundle+json;version=0.2 * application/vnd.dev.sigstore.bundle+json;version=0.3" }, "verificationMaterial": { "$ref": "#/definitions/dev.sigstore.bundle.v1.VerificationMaterial", "additionalProperties": false, "description": "When a signer is identified by a X.509 certificate, a verifier MUST verify that the signature was computed at the time the certificate was valid as described in the Sigstore client spec: \"Verification using a Bundle\". \u003chttps://docs.google.com/document/d/1kbhK2qyPPk8SLavHzYSDM8-Ueul9_oxIMVFuWMWKz0E/edit#heading=h.x8bduppe89ln\u003e If the verification material contains a public key identifier (key hint) and the `content` is a DSSE envelope, the key hints MUST be exactly the same in the verification material and in the DSSE envelope." }, "messageSignature": { "$ref": "#/definitions/dev.sigstore.common.v1.MessageSignature", "additionalProperties": false }, "dsseEnvelope": { "$ref": "#/definitions/io.intoto.Envelope", "additionalProperties": false, "description": "A DSSE envelope can contain arbitrary payloads. Verifiers must verify that the payload type is a supported and expected type. This is part of the DSSE protocol which is defined here: \u003chttps://github.com/secure-systems-lab/dsse/blob/master/protocol.md\u003e DSSE envelopes in a bundle MUST have exactly one signature. This is a limitation from the DSSE spec, as it can contain multiple signatures. There are two primary reasons: 1. It simplifies the verification logic and policy 2. The bundle (currently) can only contain a single instance of the required verification materials During verification a client MUST reject an envelope if the number of signatures is not equal to one." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "message_signature" ] }, { "required": [ "dsse_envelope" ] } ], "title": "Bundle" }, "dev.sigstore.bundle.v1.TimestampVerificationData": { "properties": { "rfc3161Timestamps": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.RFC3161SignedTimestamp" }, "additionalProperties": false, "type": "array", "description": "A list of RFC3161 signed timestamps provided by the user. This can be used when the entry has not been stored on a transparency log, or in conjunction for a stronger trust model. Clients MUST verify the hashed message in the message imprint against the signature in the bundle." } }, "additionalProperties": false, "type": "object", "title": "Notes on versioning.\n The primary message ('Bundle') MUST be versioned, by populating the\n 'media_type' field. Semver-ish (only major/minor versions) scheme MUST\n be used. The current version as specified by this file is:\n application/vnd.dev.sigstore.bundle.v0.3+json\n The semantic version is thus '0.3'.", "description": "Notes on versioning. The primary message ('Bundle') MUST be versioned, by populating the 'media_type' field. Semver-ish (only major/minor versions) scheme MUST be used. The current version as specified by this file is: application/vnd.dev.sigstore.bundle.v0.3+json The semantic version is thus '0.3'. Various timestamped counter signatures over the artifacts signature. Currently only RFC3161 signatures are provided. More formats may be added in the future." }, "dev.sigstore.bundle.v1.VerificationMaterial": { "properties": { "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKeyIdentifier", "additionalProperties": false }, "x509CertificateChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false }, "certificate": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate", "additionalProperties": false }, "tlogEntries": { "items": { "$ref": "#/definitions/dev.sigstore.rekor.v1.TransparencyLogEntry" }, "additionalProperties": false, "type": "array", "description": "An inclusion proof and an optional signed timestamp from the log. Client verification libraries MAY provide an option to support v0.1 bundles for backwards compatibility, which may contain an inclusion promise and not an inclusion proof. In this case, the client MUST validate the promise. Verifiers SHOULD NOT allow v0.1 bundles if they're used in an ecosystem which never produced them." }, "timestampVerificationData": { "$ref": "#/definitions/dev.sigstore.bundle.v1.TimestampVerificationData", "additionalProperties": false, "description": "Timestamp may also come from tlog_entries.inclusion_promise.signed_entry_timestamp." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "public_key" ] }, { "required": [ "x509_certificate_chain" ] }, { "required": [ "certificate" ] } ], "title": "Verification Material", "description": "VerificationMaterial captures details on the materials used to verify signatures. This message may be embedded in a DSSE envelope as a signature extension. Specifically, the `ext` field of the extension will expect this message when the signature extension is for Sigstore. This is identified by the `kind` field in the extension, which must be set to application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. When used as a DSSE extension, if the `public_key` field is used to indicate the key identifier, it MUST match the `keyid` field of the signature the extension is attached to." }, "dev.sigstore.common.v1.HashOutput": { "properties": { "algorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "digest": { "type": "string", "description": "This is the raw octets of the message digest as computed by the hash algorithm.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Hash Output", "description": "HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used." }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.common.v1.MessageSignature": { "properties": { "messageDigest": { "$ref": "#/definitions/dev.sigstore.common.v1.HashOutput", "additionalProperties": false, "description": "Message digest can be used to identify the artifact. Clients MUST NOT attempt to use this digest to verify the associated signature; it is intended solely for identification." }, "signature": { "type": "string", "description": "The raw bytes as returned from the signature algorithm. The signature algorithm (and so the format of the signature bytes) are determined by the contents of the 'verification_material', either a key-pair or a certificate. If using a certificate, the certificate contains the required information on the signature algorithm. When using a key pair, the algorithm MUST be part of the public key, which MUST be communicated out-of-band.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Message Signature", "description": "MessageSignature stores the computed signature over a message." }, "dev.sigstore.common.v1.PublicKeyIdentifier": { "properties": { "hint": { "type": "string", "description": "Optional unauthenticated hint on which key to use. The format of the hint must be agreed upon out of band by the signer and the verifiers, and so is not subject to this specification. Example use-case is to specify the public key to use, from a trusted key-ring. Implementors are RECOMMENDED to derive the value from the public key as described in RFC 6962. See: \u003chttps://www.rfc-editor.org/rfc/rfc6962#section-3.2\u003e" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identifier", "description": "PublicKeyIdentifier can be used to identify an (out of band) delivered key, to verify a signature." }, "dev.sigstore.common.v1.RFC3161SignedTimestamp": { "properties": { "signedTimestamp": { "type": "string", "description": "Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "RFC 3161 Signed Timestamp", "description": "This message holds a RFC 3161 timestamp." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" }, "dev.sigstore.common.v1.X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." }, "dev.sigstore.rekor.v1.Checkpoint": { "properties": { "envelope": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Checkpoint", "description": "The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go" }, "dev.sigstore.rekor.v1.InclusionPromise": { "properties": { "signedEntryTimestamp": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Inclusion Promise", "description": "The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry." }, "dev.sigstore.rekor.v1.InclusionProof": { "properties": { "logIndex": { "type": "string", "description": "The index of the entry in the tree it was written to." }, "rootHash": { "type": "string", "description": "The hash digest stored at the root of the merkle tree at the time the proof was generated.", "format": "binary", "binaryEncoding": "base64" }, "treeSize": { "type": "string", "description": "The size of the merkle tree at the time the proof was generated." }, "hashes": { "items": { "type": "string" }, "type": "array", "description": "A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client.", "format": "binary", "binaryEncoding": "base64" }, "checkpoint": { "$ref": "#/definitions/dev.sigstore.rekor.v1.Checkpoint", "additionalProperties": false, "description": "Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details." } }, "additionalProperties": false, "type": "object", "title": "Inclusion Proof", "description": "InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log." }, "dev.sigstore.rekor.v1.KindVersion": { "properties": { "kind": { "type": "string", "description": "Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types" }, "version": { "type": "string", "description": "The specific api version of the type." } }, "additionalProperties": false, "type": "object", "title": "Kind Version", "description": "KindVersion contains the entry's kind and api version." }, "dev.sigstore.rekor.v1.TransparencyLogEntry": { "properties": { "logIndex": { "type": "string", "description": "The global index of the entry, used when querying the log by index." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier of the log." }, "kindVersion": { "$ref": "#/definitions/dev.sigstore.rekor.v1.KindVersion", "additionalProperties": false, "description": "The kind (type) and version of the object associated with this entry. These values are required to construct the entry during verification." }, "integratedTime": { "type": "string", "description": "The UNIX timestamp from the log when the entry was persisted. The integration time MUST NOT be trusted if inclusion_promise is omitted." }, "inclusionPromise": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionPromise", "additionalProperties": false, "description": "The inclusion promise/signed entry timestamp from the log. Required for v0.1 bundles, and MUST be verified. Optional for \u003e= v0.2 bundles if another suitable source of time is present (such as another source of signed time, or the current system time for long-lived certificates). MUST be verified if no other suitable source of time is present, and SHOULD be verified otherwise." }, "inclusionProof": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionProof", "additionalProperties": false, "description": "The inclusion proof can be used for offline or online verification that the entry was appended to the log, and that the log has not been altered." }, "canonicalizedBody": { "type": "string", "description": "Optional. The canonicalized transparency log entry, used to reconstruct the Signed Entry Timestamp (SET) during verification. The contents of this field are the same as the `body` field in a Rekor response, meaning that it does **not** include the \"full\" canonicalized form (of log index, ID, etc.) which are exposed as separate fields. The verifier is responsible for combining the `canonicalized_body`, `log_index`, `log_id`, and `integrated_time` into the payload that the SET's signature is generated over. This field is intended to be used in cases where the SET cannot be produced determinisitically (e.g. inconsistent JSON field ordering, differing whitespace, etc). If set, clients MUST verify that the signature referenced in the `canonicalized_body` matches the signature provided in the `Bundle.content`. If not set, clients are responsible for constructing an equivalent payload from other sources to verify the signature.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Entry", "description": "TransparencyLogEntry captures all the details required from Rekor to reconstruct an entry, given that the payload is provided via other means. This type can easily be created from the existing response from Rekor. Future iterations could rely on Rekor returning the minimal set of attributes (excluding the payload) that are required for verifying the inclusion promise. The inclusion promise (called SignedEntryTimestamp in the response from Rekor) is similar to a Signed Certificate Timestamp as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2." }, "io.intoto.Envelope": { "properties": { "payload": { "type": "string", "description": "Message to be signed. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "payloadType": { "type": "string", "description": "String unambiguously identifying how to interpret payload. REQUIRED." }, "signatures": { "items": { "$ref": "#/definitions/io.intoto.Signature" }, "additionalProperties": false, "type": "array", "description": "Signature over: PAE(type, payload) Where PAE is defined as: PAE(type, payload) = \"DSSEv1\" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload + = concatenation SP = ASCII space [0x20] \"DSSEv1\" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros REQUIRED (length \u003e= 1)." } }, "additionalProperties": false, "type": "object", "title": "Envelope", "description": "An authenticated message of arbitrary type." }, "io.intoto.Signature": { "properties": { "sig": { "type": "string", "description": "Signature itself. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "keyid": { "type": "string", "description": "*Unauthenticated* hint identifying which public key was used. OPTIONAL." } }, "additionalProperties": false, "type": "object", "title": "Signature" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/CertificateAuthority.schema.json000066400000000000000000000113331477352757300273270ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/CertificateAuthority", "definitions": { "CertificateAuthority": { "properties": { "subject": { "$ref": "#/definitions/dev.sigstore.common.v1.DistinguishedName", "additionalProperties": false, "description": "The root certificate MUST be self-signed, and so the subject and issuer are the same." }, "uri": { "type": "string", "description": "The URI identifies the certificate authority. It is RECOMMENDED that the URI is the base URL for the certificate authority, that can be provided to any SDK/client provided by the certificate authority to interact with the certificate authority." }, "certChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false, "description": "The certificate chain for this CA. The last certificate in the chain MUST be the trust anchor. The trust anchor MAY be a self-signed root CA certificate or MAY be an intermediate CA certificate." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "The time the *entire* chain was valid. This is at max the longest interval when *all* certificates in the chain were valid, but it MAY be shorter. Clients MUST check timestamps against *both* the `valid_for` time range *and* the entire certificate chain. The TimeRange should be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Certificate Authority", "description": "CertificateAuthority enlists the information required to identify which CA to use and perform signature verification." }, "dev.sigstore.common.v1.DistinguishedName": { "properties": { "organization": { "type": "string" }, "commonName": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Distinguished Name" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" }, "dev.sigstore.common.v1.X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/CertificateIdentities.schema.json000066400000000000000000000100631477352757300274370ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/CertificateIdentities", "definitions": { "CertificateIdentities": { "properties": { "identities": { "items": { "$ref": "#/definitions/dev.sigstore.verification.v1.CertificateIdentity" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Certificate Identities" }, "dev.sigstore.common.v1.ObjectIdentifier": { "properties": { "id": { "items": { "type": "integer" }, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier", "description": "An ASN.1 OBJECT IDENTIFIER" }, "dev.sigstore.common.v1.ObjectIdentifierValuePair": { "properties": { "oid": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifier", "additionalProperties": false }, "value": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier Value Pair", "description": "An OID and the corresponding (byte) value." }, "dev.sigstore.common.v1.SubjectAlternativeName": { "properties": { "type": { "enum": [ "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", "EMAIL", "URI", "OTHER_NAME" ], "type": "string", "title": "Subject Alternative Name Type" }, "regexp": { "type": "string", "description": "A regular expression describing the expected value for the SAN." }, "value": { "type": "string", "description": "The exact value to match against." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "regexp" ] }, { "required": [ "value" ] } ], "title": "Subject Alternative Name" }, "dev.sigstore.verification.v1.CertificateIdentity": { "properties": { "issuer": { "type": "string", "description": "The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1)" }, "san": { "$ref": "#/definitions/dev.sigstore.common.v1.SubjectAlternativeName", "additionalProperties": false }, "oids": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifierValuePair" }, "additionalProperties": false, "type": "array", "description": "An unordered list of OIDs that must be verified. All OID/values provided in this list MUST exactly match against the values in the certificate for verification to be successful." } }, "additionalProperties": false, "type": "object", "title": "Certificate Identity", "description": "The identity of a X.509 Certificate signer." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/CertificateIdentity.schema.json000066400000000000000000000070441477352757300271340ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/CertificateIdentity", "definitions": { "CertificateIdentity": { "properties": { "issuer": { "type": "string", "description": "The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1)" }, "san": { "$ref": "#/definitions/dev.sigstore.common.v1.SubjectAlternativeName", "additionalProperties": false }, "oids": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifierValuePair" }, "additionalProperties": false, "type": "array", "description": "An unordered list of OIDs that must be verified. All OID/values provided in this list MUST exactly match against the values in the certificate for verification to be successful." } }, "additionalProperties": false, "type": "object", "title": "Certificate Identity", "description": "The identity of a X.509 Certificate signer." }, "dev.sigstore.common.v1.ObjectIdentifier": { "properties": { "id": { "items": { "type": "integer" }, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier", "description": "An ASN.1 OBJECT IDENTIFIER" }, "dev.sigstore.common.v1.ObjectIdentifierValuePair": { "properties": { "oid": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifier", "additionalProperties": false }, "value": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier Value Pair", "description": "An OID and the corresponding (byte) value." }, "dev.sigstore.common.v1.SubjectAlternativeName": { "properties": { "type": { "enum": [ "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", "EMAIL", "URI", "OTHER_NAME" ], "type": "string", "title": "Subject Alternative Name Type" }, "regexp": { "type": "string", "description": "A regular expression describing the expected value for the SAN." }, "value": { "type": "string", "description": "The exact value to match against." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "regexp" ] }, { "required": [ "value" ] } ], "title": "Subject Alternative Name" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/Checkpoint.schema.json000066400000000000000000000021761477352757300252700ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Checkpoint", "definitions": { "Checkpoint": { "properties": { "envelope": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Checkpoint", "description": "The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/ClientTrustConfig.schema.json000066400000000000000000000606261477352757300266130ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/ClientTrustConfig", "definitions": { "ClientTrustConfig": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json" }, "trustedRoot": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TrustedRoot", "additionalProperties": false, "description": "The root of trust, which MUST be present." }, "signingConfig": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.SigningConfig", "additionalProperties": false, "description": "Configuration for signing clients, which MUST be present." } }, "additionalProperties": false, "type": "object", "title": "Client Trust Config", "description": "ClientTrustConfig describes the complete state needed by a client to perform both signing and verification operations against a particular instance of Sigstore." }, "dev.sigstore.common.v1.DistinguishedName": { "properties": { "organization": { "type": "string" }, "commonName": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Distinguished Name" }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.common.v1.PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" }, "dev.sigstore.common.v1.X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." }, "dev.sigstore.trustroot.v1.CertificateAuthority": { "properties": { "subject": { "$ref": "#/definitions/dev.sigstore.common.v1.DistinguishedName", "additionalProperties": false, "description": "The root certificate MUST be self-signed, and so the subject and issuer are the same." }, "uri": { "type": "string", "description": "The URI identifies the certificate authority. It is RECOMMENDED that the URI is the base URL for the certificate authority, that can be provided to any SDK/client provided by the certificate authority to interact with the certificate authority." }, "certChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false, "description": "The certificate chain for this CA. The last certificate in the chain MUST be the trust anchor. The trust anchor MAY be a self-signed root CA certificate or MAY be an intermediate CA certificate." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "The time the *entire* chain was valid. This is at max the longest interval when *all* certificates in the chain were valid, but it MAY be shorter. Clients MUST check timestamps against *both* the `valid_for` time range *and* the entire certificate chain. The TimeRange should be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Certificate Authority", "description": "CertificateAuthority enlists the information required to identify which CA to use and perform signature verification." }, "dev.sigstore.trustroot.v1.Service": { "properties": { "url": { "type": "string", "description": "URL of the service. MUST include scheme and authority. MAY include path." }, "majorApiVersion": { "type": "integer", "description": "Specifies the major API version. A value of 0 represents a service that has not yet been released." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Validity period of a service. A service that has only a start date SHOULD be considered the most recent instance of that service, but the client MUST NOT assume there is only one valid instance. The TimeRange MUST be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Service", "description": "Service represents an instance of a service that is a part of Sigstore infrastructure. Clients MUST use the API version hint to determine the service with the highest API version that the client is compatible with. Clients MUST also only connect to services within the specified validity period and that has the newest validity start date." }, "dev.sigstore.trustroot.v1.ServiceConfiguration": { "properties": { "selector": { "enum": [ "SERVICE_SELECTOR_UNDEFINED", "ALL", "ANY", "EXACT" ], "type": "string", "title": "Service Selector", "description": "ServiceSelector specifies how a client SHOULD select a set of Services to connect to. A client SHOULD throw an error if the value is SERVICE_SELECTOR_UNDEFINED." }, "count": { "type": "integer", "description": "count specifies the number of Services the client should use. Only used when selector is set to EXACT, and count MUST be greater than 0. count MUST be less than or equal to the number of Services." } }, "additionalProperties": false, "type": "object", "title": "Service Configuration", "description": "ServiceConfiguration specifies how a client should select a set of Services to connect to, along with a count when a specific number of Services is requested." }, "dev.sigstore.trustroot.v1.SigningConfig": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json Clients MAY choose to also support application/vnd.dev.sigstore.signingconfig.v0.1+json" }, "caUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to Fulcio-compatible CAs, capable of receiving Certificate Signing Requests (CSRs) and responding with issued certificates. These URLs MUST be the \"base\" URL for the CAs, which clients should construct an appropriate CSR endpoint on top of. For example, if a CA URL is `https://example.com/ca`, then the client MAY construct the CSR endpoint as `https://example.com/ca/api/v2/signingCert`. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first." }, "oidcUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to OpenID Connect identity providers. These URLs MUST be the \"base\" URLs for the OIDC IdPs, which clients should perform well-known OpenID Connect discovery against. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first." }, "rekorTlogUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to Rekor transparency logs. These URL MUST be the \"base\" URLs for the transparency logs, which clients should construct appropriate API endpoints on top of. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `rekor_tlog_config`." }, "rekorTlogConfig": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.ServiceConfiguration", "additionalProperties": false, "description": "Specifies how a client should select the set of Rekor transparency logs to write to." }, "tsaUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to RFC 3161 Time Stamping Authorities (TSA). These URLs MUST be the *full* URL for the TSA, meaning that it should be suitable for submitting Time Stamp Requests (TSRs) to via HTTP, per RFC 3161. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `tsa_config`." }, "tsaConfig": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.ServiceConfiguration", "additionalProperties": false, "description": "Specifies how a client should select the set of TSAs to request signed timestamps from." } }, "additionalProperties": false, "type": "object", "title": "Signing Config", "description": "SigningConfig represents the trusted entities/state needed by Sigstore signing. In particular, it primarily contains service URLs that a Sigstore signer may need to connect to for the online aspects of signing." }, "dev.sigstore.trustroot.v1.TransparencyLogInstance": { "properties": { "baseUrl": { "type": "string", "description": "The base URL at which can be used to URLs for the client." }, "hashAlgorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey", "additionalProperties": false, "description": "The public key used to verify signatures generated by the log. This attribute contains the signature algorithm used by the log." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier for this transparency log. Represented as the SHA-256 hash of the log's public key, calculated over the DER encoding of the key represented as SubjectPublicKeyInfo. See https://www.rfc-editor.org/rfc/rfc6962#section-3.2" }, "checkpointKeyId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The checkpoint key identifier for the log used in a checkpoint. Optional, not provided for logs that do not generate checkpoints. For logs that do generate checkpoints, if not set, assume log_id equals checkpoint_key_id. Follows the specification described here for ECDSA and Ed25519 signatures: https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures For RSA signatures, the key ID will match the ECDSA format, the hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT use RSA-signed checkpoints, since witnesses do not support RSA signatures. This is provided for convenience. Clients can also calculate the checkpoint key ID given the log's public key. SHOULD be set for logs generating Ed25519 signatures. SHOULD be 4 bytes long, as a truncated hash." } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Instance", "description": "TransparencyLogInstance describes the immutable parameters from a transparency log. See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters for more details. The included parameters are the minimal set required to identify a log, and verify an inclusion proof/promise." }, "dev.sigstore.trustroot.v1.TrustedRoot": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json when encoded as JSON. Clients MUST be able to process and parse content with the media type defined in the old format: application/vnd.dev.sigstore.trustedroot+json;version=0.1" }, "tlogs": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TransparencyLogInstance" }, "additionalProperties": false, "type": "array", "description": "A set of trusted Rekor servers." }, "certificateAuthorities": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.CertificateAuthority" }, "additionalProperties": false, "type": "array", "description": "A set of trusted certificate authorities (e.g Fulcio), and any intermediate certificates they provide. If a CA is issuing multiple intermediate certificate, each combination shall be represented as separate chain. I.e, a single root cert may appear in multiple chains but with different intermediate and/or leaf certificates. The certificates are intended to be used for verifying artifact signatures." }, "ctlogs": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TransparencyLogInstance" }, "additionalProperties": false, "type": "array", "description": "A set of trusted certificate transparency logs." }, "timestampAuthorities": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.CertificateAuthority" }, "additionalProperties": false, "type": "array", "description": "A set of trusted timestamping authorities." } }, "additionalProperties": false, "type": "object", "title": "Trusted Root", "description": "TrustedRoot describes the client's complete set of trusted entities. How the TrustedRoot is populated is not specified, but can be a combination of many sources such as TUF repositories, files on disk etc. The TrustedRoot is not meant to be used for any artifact verification, only to capture the complete/global set of trusted verification materials. When verifying an artifact, based on the artifact and policies, a selection of keys/authorities are expected to be extracted and provided to the verification function. This way the set of keys/authorities can be kept to a minimal set by the policy to gain better control over what signatures that are allowed. The embedded transparency logs, CT logs, CAs and TSAs MUST include any previously used instance -- otherwise signatures made in the past cannot be verified. All the listed instances SHOULD be sorted by the 'valid_for' in ascending order, that is, the oldest instance first. Only the last instance is allowed to have their 'end' timestamp unset. All previous instances MUST have a closed interval of validity. The last instance MAY have a closed interval. Clients MUST accept instances that overlaps in time, if not clients may experience problems during rotations of verification materials. To be able to manage planned rotations of either transparency logs or certificate authorities, clienst MUST accept lists of instances where the last instance have a 'valid_for' that belongs to the future. This should not be a problem as clients SHOULD first seek the trust root for a suitable instance before creating a per artifact trust root (that is, a sub-set of the complete trust root) that is used for verification." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/CloudEvent.schema.json000066400000000000000000000137501477352757300252510ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/CloudEvent", "definitions": { "CloudEvent": { "properties": { "id": { "type": "string", "title": "-- CloudEvent Context Attributes", "description": "-- CloudEvent Context Attributes Required Attributes" }, "source": { "type": "string", "description": "URI-reference" }, "specVersion": { "type": "string" }, "type": { "type": "string" }, "attributes": { "additionalProperties": { "$ref": "#/definitions/dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue", "additionalProperties": false }, "type": "object", "description": "Optional \u0026 Extension Attributes" }, "binaryData": { "type": "string", "format": "binary", "binaryEncoding": "base64" }, "textData": { "type": "string" }, "protoData": { "properties": { "typeUrl": { "type": "string", "description": "A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics." }, "value": { "type": "string", "description": "Must be a valid serialized protocol buffer of the above specified type.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "binary_data" ] }, { "required": [ "text_data" ] }, { "required": [ "proto_data" ] } ], "title": "Cloud Event" }, "dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue": { "properties": { "ceBoolean": { "type": "boolean" }, "ceInteger": { "type": "integer" }, "ceString": { "type": "string" }, "ceBytes": { "type": "string", "format": "binary", "binaryEncoding": "base64" }, "ceUri": { "type": "string" }, "ceUriRef": { "type": "string" }, "ceTimestamp": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "ce_boolean" ] }, { "required": [ "ce_integer" ] }, { "required": [ "ce_string" ] }, { "required": [ "ce_bytes" ] }, { "required": [ "ce_uri" ] }, { "required": [ "ce_uri_ref" ] }, { "required": [ "ce_timestamp" ] } ], "title": "*\n The CloudEvent specification defines\n seven attribute value types...", "description": "* The CloudEvent specification defines seven attribute value types..." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/CloudEventBatch.schema.json000066400000000000000000000150521477352757300262100ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/CloudEventBatch", "definitions": { "CloudEventBatch": { "properties": { "events": { "items": { "$ref": "#/definitions/dev.sigstore.events.v1.CloudEvent" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "*\n CloudEvent Protobuf Batch Format", "description": "* CloudEvent Protobuf Batch Format" }, "dev.sigstore.events.v1.CloudEvent": { "properties": { "id": { "type": "string", "title": "-- CloudEvent Context Attributes", "description": "-- CloudEvent Context Attributes Required Attributes" }, "source": { "type": "string", "description": "URI-reference" }, "specVersion": { "type": "string" }, "type": { "type": "string" }, "attributes": { "additionalProperties": { "$ref": "#/definitions/dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue", "additionalProperties": false }, "type": "object", "description": "Optional \u0026 Extension Attributes" }, "binaryData": { "type": "string", "format": "binary", "binaryEncoding": "base64" }, "textData": { "type": "string" }, "protoData": { "properties": { "typeUrl": { "type": "string", "description": "A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one \"/\" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading \".\" is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics." }, "value": { "type": "string", "description": "Must be a valid serialized protocol buffer of the above specified type.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "binary_data" ] }, { "required": [ "text_data" ] }, { "required": [ "proto_data" ] } ], "title": "Cloud Event" }, "dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue": { "properties": { "ceBoolean": { "type": "boolean" }, "ceInteger": { "type": "integer" }, "ceString": { "type": "string" }, "ceBytes": { "type": "string", "format": "binary", "binaryEncoding": "base64" }, "ceUri": { "type": "string" }, "ceUriRef": { "type": "string" }, "ceTimestamp": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "ce_boolean" ] }, { "required": [ "ce_integer" ] }, { "required": [ "ce_string" ] }, { "required": [ "ce_bytes" ] }, { "required": [ "ce_uri" ] }, { "required": [ "ce_uri_ref" ] }, { "required": [ "ce_timestamp" ] } ], "title": "*\n The CloudEvent specification defines\n seven attribute value types...", "description": "* The CloudEvent specification defines seven attribute value types..." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/DistinguishedName.schema.json000066400000000000000000000007771477352757300266120ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/DistinguishedName", "definitions": { "DistinguishedName": { "properties": { "organization": { "type": "string" }, "commonName": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Distinguished Name" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/Envelope.schema.json000066400000000000000000000042551477352757300247560ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Envelope", "definitions": { "Envelope": { "properties": { "payload": { "type": "string", "description": "Message to be signed. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "payloadType": { "type": "string", "description": "String unambiguously identifying how to interpret payload. REQUIRED." }, "signatures": { "items": { "$ref": "#/definitions/io.intoto.Signature" }, "additionalProperties": false, "type": "array", "description": "Signature over: PAE(type, payload) Where PAE is defined as: PAE(type, payload) = \"DSSEv1\" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload + = concatenation SP = ASCII space [0x20] \"DSSEv1\" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros REQUIRED (length \u003e= 1)." } }, "additionalProperties": false, "type": "object", "title": "Envelope", "description": "An authenticated message of arbitrary type." }, "io.intoto.Signature": { "properties": { "sig": { "type": "string", "description": "Signature itself. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "keyid": { "type": "string", "description": "*Unauthenticated* hint identifying which public key was used. OPTIONAL." } }, "additionalProperties": false, "type": "object", "title": "Signature" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/HashOutput.schema.json000066400000000000000000000033311477352757300252770ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/HashOutput", "definitions": { "HashOutput": { "properties": { "algorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "digest": { "type": "string", "description": "This is the raw octets of the message digest as computed by the hash algorithm.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Hash Output", "description": "HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/InclusionPromise.schema.json000066400000000000000000000022711477352757300264770ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/InclusionPromise", "definitions": { "InclusionPromise": { "properties": { "signedEntryTimestamp": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Inclusion Promise", "description": "The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/InclusionProof.schema.json000066400000000000000000000060221477352757300261440ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/InclusionProof", "definitions": { "InclusionProof": { "properties": { "logIndex": { "type": "string", "description": "The index of the entry in the tree it was written to." }, "rootHash": { "type": "string", "description": "The hash digest stored at the root of the merkle tree at the time the proof was generated.", "format": "binary", "binaryEncoding": "base64" }, "treeSize": { "type": "string", "description": "The size of the merkle tree at the time the proof was generated." }, "hashes": { "items": { "type": "string" }, "type": "array", "description": "A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client.", "format": "binary", "binaryEncoding": "base64" }, "checkpoint": { "$ref": "#/definitions/dev.sigstore.rekor.v1.Checkpoint", "additionalProperties": false, "description": "Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details." } }, "additionalProperties": false, "type": "object", "title": "Inclusion Proof", "description": "InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log." }, "dev.sigstore.rekor.v1.Checkpoint": { "properties": { "envelope": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Checkpoint", "description": "The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/Input.schema.json000066400000000000000000001524251477352757300243030ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Input", "definitions": { "Input": { "properties": { "artifactTrustRoot": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TrustedRoot", "additionalProperties": false, "description": "The verification materials provided during a bundle verification. The running process is usually preloaded with a \"global\" dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to verifying an artifact (i.e a bundle), and/or based on current policy, some selection is expected to happen, to filter out the exact certificate authority to use, which transparency logs are relevant etc. The result should b ecaptured in the `artifact_trust_root`." }, "artifactVerificationOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions", "additionalProperties": false }, "bundle": { "$ref": "#/definitions/dev.sigstore.bundle.v1.Bundle", "additionalProperties": false }, "artifact": { "$ref": "#/definitions/dev.sigstore.verification.v1.Artifact", "additionalProperties": false, "description": "If the bundle contains a message signature, the artifact must be provided." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "artifact" ] } ], "title": "Input", "description": "Input captures all that is needed to call the bundle verification method, to verify a single artifact referenced by the bundle." }, "dev.sigstore.bundle.v1.Bundle": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.bundle.v0.3+json when when encoded as JSON. Clients must to be able to accept media type using the previously defined formats: * application/vnd.dev.sigstore.bundle+json;version=0.1 * application/vnd.dev.sigstore.bundle+json;version=0.2 * application/vnd.dev.sigstore.bundle+json;version=0.3" }, "verificationMaterial": { "$ref": "#/definitions/dev.sigstore.bundle.v1.VerificationMaterial", "additionalProperties": false, "description": "When a signer is identified by a X.509 certificate, a verifier MUST verify that the signature was computed at the time the certificate was valid as described in the Sigstore client spec: \"Verification using a Bundle\". \u003chttps://docs.google.com/document/d/1kbhK2qyPPk8SLavHzYSDM8-Ueul9_oxIMVFuWMWKz0E/edit#heading=h.x8bduppe89ln\u003e If the verification material contains a public key identifier (key hint) and the `content` is a DSSE envelope, the key hints MUST be exactly the same in the verification material and in the DSSE envelope." }, "messageSignature": { "$ref": "#/definitions/dev.sigstore.common.v1.MessageSignature", "additionalProperties": false }, "dsseEnvelope": { "$ref": "#/definitions/io.intoto.Envelope", "additionalProperties": false, "description": "A DSSE envelope can contain arbitrary payloads. Verifiers must verify that the payload type is a supported and expected type. This is part of the DSSE protocol which is defined here: \u003chttps://github.com/secure-systems-lab/dsse/blob/master/protocol.md\u003e DSSE envelopes in a bundle MUST have exactly one signature. This is a limitation from the DSSE spec, as it can contain multiple signatures. There are two primary reasons: 1. It simplifies the verification logic and policy 2. The bundle (currently) can only contain a single instance of the required verification materials During verification a client MUST reject an envelope if the number of signatures is not equal to one." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "message_signature" ] }, { "required": [ "dsse_envelope" ] } ], "title": "Bundle" }, "dev.sigstore.bundle.v1.TimestampVerificationData": { "properties": { "rfc3161Timestamps": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.RFC3161SignedTimestamp" }, "additionalProperties": false, "type": "array", "description": "A list of RFC3161 signed timestamps provided by the user. This can be used when the entry has not been stored on a transparency log, or in conjunction for a stronger trust model. Clients MUST verify the hashed message in the message imprint against the signature in the bundle." } }, "additionalProperties": false, "type": "object", "title": "Notes on versioning.\n The primary message ('Bundle') MUST be versioned, by populating the\n 'media_type' field. Semver-ish (only major/minor versions) scheme MUST\n be used. The current version as specified by this file is:\n application/vnd.dev.sigstore.bundle.v0.3+json\n The semantic version is thus '0.3'.", "description": "Notes on versioning. The primary message ('Bundle') MUST be versioned, by populating the 'media_type' field. Semver-ish (only major/minor versions) scheme MUST be used. The current version as specified by this file is: application/vnd.dev.sigstore.bundle.v0.3+json The semantic version is thus '0.3'. Various timestamped counter signatures over the artifacts signature. Currently only RFC3161 signatures are provided. More formats may be added in the future." }, "dev.sigstore.bundle.v1.VerificationMaterial": { "properties": { "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKeyIdentifier", "additionalProperties": false }, "x509CertificateChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false }, "certificate": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate", "additionalProperties": false }, "tlogEntries": { "items": { "$ref": "#/definitions/dev.sigstore.rekor.v1.TransparencyLogEntry" }, "additionalProperties": false, "type": "array", "description": "An inclusion proof and an optional signed timestamp from the log. Client verification libraries MAY provide an option to support v0.1 bundles for backwards compatibility, which may contain an inclusion promise and not an inclusion proof. In this case, the client MUST validate the promise. Verifiers SHOULD NOT allow v0.1 bundles if they're used in an ecosystem which never produced them." }, "timestampVerificationData": { "$ref": "#/definitions/dev.sigstore.bundle.v1.TimestampVerificationData", "additionalProperties": false, "description": "Timestamp may also come from tlog_entries.inclusion_promise.signed_entry_timestamp." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "public_key" ] }, { "required": [ "x509_certificate_chain" ] }, { "required": [ "certificate" ] } ], "title": "Verification Material", "description": "VerificationMaterial captures details on the materials used to verify signatures. This message may be embedded in a DSSE envelope as a signature extension. Specifically, the `ext` field of the extension will expect this message when the signature extension is for Sigstore. This is identified by the `kind` field in the extension, which must be set to application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. When used as a DSSE extension, if the `public_key` field is used to indicate the key identifier, it MUST match the `keyid` field of the signature the extension is attached to." }, "dev.sigstore.common.v1.DistinguishedName": { "properties": { "organization": { "type": "string" }, "commonName": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Distinguished Name" }, "dev.sigstore.common.v1.HashOutput": { "properties": { "algorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "digest": { "type": "string", "description": "This is the raw octets of the message digest as computed by the hash algorithm.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Hash Output", "description": "HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used." }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.common.v1.MessageSignature": { "properties": { "messageDigest": { "$ref": "#/definitions/dev.sigstore.common.v1.HashOutput", "additionalProperties": false, "description": "Message digest can be used to identify the artifact. Clients MUST NOT attempt to use this digest to verify the associated signature; it is intended solely for identification." }, "signature": { "type": "string", "description": "The raw bytes as returned from the signature algorithm. The signature algorithm (and so the format of the signature bytes) are determined by the contents of the 'verification_material', either a key-pair or a certificate. If using a certificate, the certificate contains the required information on the signature algorithm. When using a key pair, the algorithm MUST be part of the public key, which MUST be communicated out-of-band.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Message Signature", "description": "MessageSignature stores the computed signature over a message." }, "dev.sigstore.common.v1.ObjectIdentifier": { "properties": { "id": { "items": { "type": "integer" }, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier", "description": "An ASN.1 OBJECT IDENTIFIER" }, "dev.sigstore.common.v1.ObjectIdentifierValuePair": { "properties": { "oid": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifier", "additionalProperties": false }, "value": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier Value Pair", "description": "An OID and the corresponding (byte) value." }, "dev.sigstore.common.v1.PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.PublicKeyIdentifier": { "properties": { "hint": { "type": "string", "description": "Optional unauthenticated hint on which key to use. The format of the hint must be agreed upon out of band by the signer and the verifiers, and so is not subject to this specification. Example use-case is to specify the public key to use, from a trusted key-ring. Implementors are RECOMMENDED to derive the value from the public key as described in RFC 6962. See: \u003chttps://www.rfc-editor.org/rfc/rfc6962#section-3.2\u003e" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identifier", "description": "PublicKeyIdentifier can be used to identify an (out of band) delivered key, to verify a signature." }, "dev.sigstore.common.v1.RFC3161SignedTimestamp": { "properties": { "signedTimestamp": { "type": "string", "description": "Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "RFC 3161 Signed Timestamp", "description": "This message holds a RFC 3161 timestamp." }, "dev.sigstore.common.v1.SubjectAlternativeName": { "properties": { "type": { "enum": [ "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", "EMAIL", "URI", "OTHER_NAME" ], "type": "string", "title": "Subject Alternative Name Type" }, "regexp": { "type": "string", "description": "A regular expression describing the expected value for the SAN." }, "value": { "type": "string", "description": "The exact value to match against." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "regexp" ] }, { "required": [ "value" ] } ], "title": "Subject Alternative Name" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" }, "dev.sigstore.common.v1.X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." }, "dev.sigstore.rekor.v1.Checkpoint": { "properties": { "envelope": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Checkpoint", "description": "The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go" }, "dev.sigstore.rekor.v1.InclusionPromise": { "properties": { "signedEntryTimestamp": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Inclusion Promise", "description": "The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry." }, "dev.sigstore.rekor.v1.InclusionProof": { "properties": { "logIndex": { "type": "string", "description": "The index of the entry in the tree it was written to." }, "rootHash": { "type": "string", "description": "The hash digest stored at the root of the merkle tree at the time the proof was generated.", "format": "binary", "binaryEncoding": "base64" }, "treeSize": { "type": "string", "description": "The size of the merkle tree at the time the proof was generated." }, "hashes": { "items": { "type": "string" }, "type": "array", "description": "A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client.", "format": "binary", "binaryEncoding": "base64" }, "checkpoint": { "$ref": "#/definitions/dev.sigstore.rekor.v1.Checkpoint", "additionalProperties": false, "description": "Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details." } }, "additionalProperties": false, "type": "object", "title": "Inclusion Proof", "description": "InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log." }, "dev.sigstore.rekor.v1.KindVersion": { "properties": { "kind": { "type": "string", "description": "Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types" }, "version": { "type": "string", "description": "The specific api version of the type." } }, "additionalProperties": false, "type": "object", "title": "Kind Version", "description": "KindVersion contains the entry's kind and api version." }, "dev.sigstore.rekor.v1.TransparencyLogEntry": { "properties": { "logIndex": { "type": "string", "description": "The global index of the entry, used when querying the log by index." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier of the log." }, "kindVersion": { "$ref": "#/definitions/dev.sigstore.rekor.v1.KindVersion", "additionalProperties": false, "description": "The kind (type) and version of the object associated with this entry. These values are required to construct the entry during verification." }, "integratedTime": { "type": "string", "description": "The UNIX timestamp from the log when the entry was persisted. The integration time MUST NOT be trusted if inclusion_promise is omitted." }, "inclusionPromise": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionPromise", "additionalProperties": false, "description": "The inclusion promise/signed entry timestamp from the log. Required for v0.1 bundles, and MUST be verified. Optional for \u003e= v0.2 bundles if another suitable source of time is present (such as another source of signed time, or the current system time for long-lived certificates). MUST be verified if no other suitable source of time is present, and SHOULD be verified otherwise." }, "inclusionProof": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionProof", "additionalProperties": false, "description": "The inclusion proof can be used for offline or online verification that the entry was appended to the log, and that the log has not been altered." }, "canonicalizedBody": { "type": "string", "description": "Optional. The canonicalized transparency log entry, used to reconstruct the Signed Entry Timestamp (SET) during verification. The contents of this field are the same as the `body` field in a Rekor response, meaning that it does **not** include the \"full\" canonicalized form (of log index, ID, etc.) which are exposed as separate fields. The verifier is responsible for combining the `canonicalized_body`, `log_index`, `log_id`, and `integrated_time` into the payload that the SET's signature is generated over. This field is intended to be used in cases where the SET cannot be produced determinisitically (e.g. inconsistent JSON field ordering, differing whitespace, etc). If set, clients MUST verify that the signature referenced in the `canonicalized_body` matches the signature provided in the `Bundle.content`. If not set, clients are responsible for constructing an equivalent payload from other sources to verify the signature.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Entry", "description": "TransparencyLogEntry captures all the details required from Rekor to reconstruct an entry, given that the payload is provided via other means. This type can easily be created from the existing response from Rekor. Future iterations could rely on Rekor returning the minimal set of attributes (excluding the payload) that are required for verifying the inclusion promise. The inclusion promise (called SignedEntryTimestamp in the response from Rekor) is similar to a Signed Certificate Timestamp as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2." }, "dev.sigstore.trustroot.v1.CertificateAuthority": { "properties": { "subject": { "$ref": "#/definitions/dev.sigstore.common.v1.DistinguishedName", "additionalProperties": false, "description": "The root certificate MUST be self-signed, and so the subject and issuer are the same." }, "uri": { "type": "string", "description": "The URI identifies the certificate authority. It is RECOMMENDED that the URI is the base URL for the certificate authority, that can be provided to any SDK/client provided by the certificate authority to interact with the certificate authority." }, "certChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false, "description": "The certificate chain for this CA. The last certificate in the chain MUST be the trust anchor. The trust anchor MAY be a self-signed root CA certificate or MAY be an intermediate CA certificate." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "The time the *entire* chain was valid. This is at max the longest interval when *all* certificates in the chain were valid, but it MAY be shorter. Clients MUST check timestamps against *both* the `valid_for` time range *and* the entire certificate chain. The TimeRange should be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Certificate Authority", "description": "CertificateAuthority enlists the information required to identify which CA to use and perform signature verification." }, "dev.sigstore.trustroot.v1.TransparencyLogInstance": { "properties": { "baseUrl": { "type": "string", "description": "The base URL at which can be used to URLs for the client." }, "hashAlgorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey", "additionalProperties": false, "description": "The public key used to verify signatures generated by the log. This attribute contains the signature algorithm used by the log." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier for this transparency log. Represented as the SHA-256 hash of the log's public key, calculated over the DER encoding of the key represented as SubjectPublicKeyInfo. See https://www.rfc-editor.org/rfc/rfc6962#section-3.2" }, "checkpointKeyId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The checkpoint key identifier for the log used in a checkpoint. Optional, not provided for logs that do not generate checkpoints. For logs that do generate checkpoints, if not set, assume log_id equals checkpoint_key_id. Follows the specification described here for ECDSA and Ed25519 signatures: https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures For RSA signatures, the key ID will match the ECDSA format, the hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT use RSA-signed checkpoints, since witnesses do not support RSA signatures. This is provided for convenience. Clients can also calculate the checkpoint key ID given the log's public key. SHOULD be set for logs generating Ed25519 signatures. SHOULD be 4 bytes long, as a truncated hash." } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Instance", "description": "TransparencyLogInstance describes the immutable parameters from a transparency log. See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters for more details. The included parameters are the minimal set required to identify a log, and verify an inclusion proof/promise." }, "dev.sigstore.trustroot.v1.TrustedRoot": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json when encoded as JSON. Clients MUST be able to process and parse content with the media type defined in the old format: application/vnd.dev.sigstore.trustedroot+json;version=0.1" }, "tlogs": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TransparencyLogInstance" }, "additionalProperties": false, "type": "array", "description": "A set of trusted Rekor servers." }, "certificateAuthorities": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.CertificateAuthority" }, "additionalProperties": false, "type": "array", "description": "A set of trusted certificate authorities (e.g Fulcio), and any intermediate certificates they provide. If a CA is issuing multiple intermediate certificate, each combination shall be represented as separate chain. I.e, a single root cert may appear in multiple chains but with different intermediate and/or leaf certificates. The certificates are intended to be used for verifying artifact signatures." }, "ctlogs": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TransparencyLogInstance" }, "additionalProperties": false, "type": "array", "description": "A set of trusted certificate transparency logs." }, "timestampAuthorities": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.CertificateAuthority" }, "additionalProperties": false, "type": "array", "description": "A set of trusted timestamping authorities." } }, "additionalProperties": false, "type": "object", "title": "Trusted Root", "description": "TrustedRoot describes the client's complete set of trusted entities. How the TrustedRoot is populated is not specified, but can be a combination of many sources such as TUF repositories, files on disk etc. The TrustedRoot is not meant to be used for any artifact verification, only to capture the complete/global set of trusted verification materials. When verifying an artifact, based on the artifact and policies, a selection of keys/authorities are expected to be extracted and provided to the verification function. This way the set of keys/authorities can be kept to a minimal set by the policy to gain better control over what signatures that are allowed. The embedded transparency logs, CT logs, CAs and TSAs MUST include any previously used instance -- otherwise signatures made in the past cannot be verified. All the listed instances SHOULD be sorted by the 'valid_for' in ascending order, that is, the oldest instance first. Only the last instance is allowed to have their 'end' timestamp unset. All previous instances MUST have a closed interval of validity. The last instance MAY have a closed interval. Clients MUST accept instances that overlaps in time, if not clients may experience problems during rotations of verification materials. To be able to manage planned rotations of either transparency logs or certificate authorities, clienst MUST accept lists of instances where the last instance have a 'valid_for' that belongs to the future. This should not be a problem as clients SHOULD first seek the trust root for a suitable instance before creating a per artifact trust root (that is, a sub-set of the complete trust root) that is used for verification." }, "dev.sigstore.verification.v1.Artifact": { "properties": { "artifactUri": { "type": "string", "description": "Location of the artifact" }, "artifact": { "type": "string", "description": "The raw bytes of the artifact", "format": "binary", "binaryEncoding": "base64" }, "artifactDigest": { "$ref": "#/definitions/dev.sigstore.common.v1.HashOutput", "additionalProperties": false, "description": "Digest of the artifact. SHOULD NOT be used when verifying an in-toto attestation as the subject digest cannot be reconstructed. This option will not work with Ed25519 signatures, use Ed25519Ph or another algorithm instead." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "artifact_uri" ] }, { "required": [ "artifact" ] }, { "required": [ "artifact_digest" ] } ], "title": "Artifact" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions": { "properties": { "certificateIdentities": { "$ref": "#/definitions/dev.sigstore.verification.v1.CertificateIdentities", "additionalProperties": false }, "publicKeys": { "$ref": "#/definitions/dev.sigstore.verification.v1.PublicKeyIdentities", "additionalProperties": false, "description": "To simplify verification implementation, the logic for bundle verification should be implemented as a higher-order function, where one of argument should be an interface over the set of trusted public keys, like this: `Verify(bytes artifact, bytes signature, string key_id)`. This way the caller is in full control of mapping the identified (or hinted) key in the bundle to one of the trusted keys, as this process is inherently application specific." }, "tlogOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions", "additionalProperties": false, "description": "Optional options for artifact transparency log verification. If none is provided, the default verification options are: Threshold: 1 Online verification: false Disable: false" }, "ctlogOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions", "additionalProperties": false, "description": "Optional options for certificate transparency log verification. If none is provided, the default verification options are: Threshold: 1 Disable: false" }, "tsaOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions", "additionalProperties": false, "description": "Optional options for certificate signed timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true" }, "integratedTsOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions", "additionalProperties": false, "description": "Optional options for integrated timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true" }, "observerOptions": { "$ref": "#/definitions/dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions", "additionalProperties": false, "description": "Optional options for observed timestamp verification. If none is provided, the default verification options are: Threshold 1 Disable: false" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "certificate_identities" ] }, { "required": [ "public_keys" ] }, { "required": [ "tlog_options" ] }, { "required": [ "ctlog_options" ] }, { "required": [ "tsa_options" ] }, { "required": [ "integrated_ts_options" ] }, { "required": [ "observer_options" ] } ], "title": "Artifact Verification Options", "description": "A light-weight set of options/policies for identifying trusted signers, used during verification of a single artifact." }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of ct transparency logs the certificate must appear on." }, "disable": { "type": "boolean", "description": "Disable ct transparency log verification" } }, "additionalProperties": false, "type": "object", "title": "Ctlog Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of external observers of the timestamp. This is a union of RFC3161 signed timestamps, and integrated timestamps from a transparency log, that could include additional timestamp sources in the future." }, "disable": { "type": "boolean", "description": "Disable observer timestamp verification." } }, "additionalProperties": false, "type": "object", "title": "Observer Timestamp Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of signed timestamps that are expected." }, "disable": { "type": "boolean", "description": "Disable signed timestamp verification." } }, "additionalProperties": false, "type": "object", "title": "Timestamp Authority Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions": { "properties": { "threshold": { "type": "integer", "description": "The number of integrated timestamps that are expected." }, "disable": { "type": "boolean", "description": "Disable integrated timestamp verification." } }, "additionalProperties": false, "type": "object", "title": "Tlog Integrated Timestamp Options" }, "dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions": { "properties": { "threshold": { "type": "integer", "description": "Number of transparency logs the entry must appear on." }, "performOnlineVerification": { "type": "boolean", "description": "Perform an online inclusion proof." }, "disable": { "type": "boolean", "description": "Disable verification for transparency logs." } }, "additionalProperties": false, "type": "object", "title": "Tlog Options" }, "dev.sigstore.verification.v1.CertificateIdentities": { "properties": { "identities": { "items": { "$ref": "#/definitions/dev.sigstore.verification.v1.CertificateIdentity" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Certificate Identities" }, "dev.sigstore.verification.v1.CertificateIdentity": { "properties": { "issuer": { "type": "string", "description": "The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1)" }, "san": { "$ref": "#/definitions/dev.sigstore.common.v1.SubjectAlternativeName", "additionalProperties": false }, "oids": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifierValuePair" }, "additionalProperties": false, "type": "array", "description": "An unordered list of OIDs that must be verified. All OID/values provided in this list MUST exactly match against the values in the certificate for verification to be successful." } }, "additionalProperties": false, "type": "object", "title": "Certificate Identity", "description": "The identity of a X.509 Certificate signer." }, "dev.sigstore.verification.v1.PublicKeyIdentities": { "properties": { "publicKeys": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identities" }, "io.intoto.Envelope": { "properties": { "payload": { "type": "string", "description": "Message to be signed. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "payloadType": { "type": "string", "description": "String unambiguously identifying how to interpret payload. REQUIRED." }, "signatures": { "items": { "$ref": "#/definitions/io.intoto.Signature" }, "additionalProperties": false, "type": "array", "description": "Signature over: PAE(type, payload) Where PAE is defined as: PAE(type, payload) = \"DSSEv1\" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload + = concatenation SP = ASCII space [0x20] \"DSSEv1\" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros REQUIRED (length \u003e= 1)." } }, "additionalProperties": false, "type": "object", "title": "Envelope", "description": "An authenticated message of arbitrary type." }, "io.intoto.Signature": { "properties": { "sig": { "type": "string", "description": "Signature itself. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "keyid": { "type": "string", "description": "*Unauthenticated* hint identifying which public key was used. OPTIONAL." } }, "additionalProperties": false, "type": "object", "title": "Signature" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/KindVersion.schema.json000066400000000000000000000014471477352757300254340ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/KindVersion", "definitions": { "KindVersion": { "properties": { "kind": { "type": "string", "description": "Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types" }, "version": { "type": "string", "description": "The specific api version of the type." } }, "additionalProperties": false, "type": "object", "title": "Kind Version", "description": "KindVersion contains the entry's kind and api version." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/LogId.schema.json000066400000000000000000000012121477352757300241650ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/LogId", "definitions": { "LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/MessageSignature.schema.json000066400000000000000000000060521477352757300264440ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/MessageSignature", "definitions": { "MessageSignature": { "properties": { "messageDigest": { "$ref": "#/definitions/dev.sigstore.common.v1.HashOutput", "additionalProperties": false, "description": "Message digest can be used to identify the artifact. Clients MUST NOT attempt to use this digest to verify the associated signature; it is intended solely for identification." }, "signature": { "type": "string", "description": "The raw bytes as returned from the signature algorithm. The signature algorithm (and so the format of the signature bytes) are determined by the contents of the 'verification_material', either a key-pair or a certificate. If using a certificate, the certificate contains the required information on the signature algorithm. When using a key pair, the algorithm MUST be part of the public key, which MUST be communicated out-of-band.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Message Signature", "description": "MessageSignature stores the computed signature over a message." }, "dev.sigstore.common.v1.HashOutput": { "properties": { "algorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "digest": { "type": "string", "description": "This is the raw octets of the message digest as computed by the hash algorithm.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Hash Output", "description": "HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/ObjectIdentifier.schema.json000066400000000000000000000010621477352757300264030ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/ObjectIdentifier", "definitions": { "ObjectIdentifier": { "properties": { "id": { "items": { "type": "integer" }, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier", "description": "An ASN.1 OBJECT IDENTIFIER" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/ObjectIdentifierValuePair.schema.json000066400000000000000000000023221477352757300302140ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/ObjectIdentifierValuePair", "definitions": { "ObjectIdentifierValuePair": { "properties": { "oid": { "$ref": "#/definitions/dev.sigstore.common.v1.ObjectIdentifier", "additionalProperties": false }, "value": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier Value Pair", "description": "An OID and the corresponding (byte) value." }, "dev.sigstore.common.v1.ObjectIdentifier": { "properties": { "id": { "items": { "type": "integer" }, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Object Identifier", "description": "An ASN.1 OBJECT IDENTIFIER" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/PublicKey.schema.json000066400000000000000000000075721477352757300250750ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/PublicKey", "definitions": { "PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/PublicKeyIdentifier.schema.json000066400000000000000000000017671477352757300271000ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/PublicKeyIdentifier", "definitions": { "PublicKeyIdentifier": { "properties": { "hint": { "type": "string", "description": "Optional unauthenticated hint on which key to use. The format of the hint must be agreed upon out of band by the signer and the verifiers, and so is not subject to this specification. Example use-case is to specify the public key to use, from a trusted key-ring. Implementors are RECOMMENDED to derive the value from the public key as described in RFC 6962. See: \u003chttps://www.rfc-editor.org/rfc/rfc6962#section-3.2\u003e" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identifier", "description": "PublicKeyIdentifier can be used to identify an (out of band) delivered key, to verify a signature." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/PublicKeyIdentities.schema.json000066400000000000000000000105701477352757300271070ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/PublicKeyIdentities", "definitions": { "PublicKeyIdentities": { "properties": { "publicKeys": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey" }, "additionalProperties": false, "type": "array" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identities" }, "dev.sigstore.common.v1.PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/RFC3161SignedTimestamp.schema.json000066400000000000000000000013651477352757300271430ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/RFC3161SignedTimestamp", "definitions": { "RFC3161SignedTimestamp": { "properties": { "signedTimestamp": { "type": "string", "description": "Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "RFC 3161 Signed Timestamp", "description": "This message holds a RFC 3161 timestamp." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/Service.schema.json000066400000000000000000000045341477352757300246010ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Service", "definitions": { "Service": { "properties": { "url": { "type": "string", "description": "URL of the service. MUST include scheme and authority. MAY include path." }, "majorApiVersion": { "type": "integer", "description": "Specifies the major API version. A value of 0 represents a service that has not yet been released." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Validity period of a service. A service that has only a start date SHOULD be considered the most recent instance of that service, but the client MUST NOT assume there is only one valid instance. The TimeRange MUST be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Service", "description": "Service represents an instance of a service that is a part of Sigstore infrastructure. Clients MUST use the API version hint to determine the service with the highest API version that the client is compatible with. Clients MUST also only connect to services within the specified validity period and that has the newest validity start date." }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/ServiceConfiguration.schema.json000066400000000000000000000025511477352757300273260ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/ServiceConfiguration", "definitions": { "ServiceConfiguration": { "properties": { "selector": { "enum": [ "SERVICE_SELECTOR_UNDEFINED", "ALL", "ANY", "EXACT" ], "type": "string", "title": "Service Selector", "description": "ServiceSelector specifies how a client SHOULD select a set of Services to connect to. A client SHOULD throw an error if the value is SERVICE_SELECTOR_UNDEFINED." }, "count": { "type": "integer", "description": "count specifies the number of Services the client should use. Only used when selector is set to EXACT, and count MUST be greater than 0. count MUST be less than or equal to the number of Services." } }, "additionalProperties": false, "type": "object", "title": "Service Configuration", "description": "ServiceConfiguration specifies how a client should select a set of Services to connect to, along with a count when a specific number of Services is requested." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/Signature.schema.json000066400000000000000000000014061477352757300251350ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/Signature", "definitions": { "Signature": { "properties": { "sig": { "type": "string", "description": "Signature itself. (In JSON, this is encoded as base64.) REQUIRED.", "format": "binary", "binaryEncoding": "base64" }, "keyid": { "type": "string", "description": "*Unauthenticated* hint identifying which public key was used. OPTIONAL." } }, "additionalProperties": false, "type": "object", "title": "Signature" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/SigningConfig.schema.json000066400000000000000000000207511477352757300257240ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/SigningConfig", "definitions": { "SigningConfig": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json Clients MAY choose to also support application/vnd.dev.sigstore.signingconfig.v0.1+json" }, "caUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to Fulcio-compatible CAs, capable of receiving Certificate Signing Requests (CSRs) and responding with issued certificates. These URLs MUST be the \"base\" URL for the CAs, which clients should construct an appropriate CSR endpoint on top of. For example, if a CA URL is `https://example.com/ca`, then the client MAY construct the CSR endpoint as `https://example.com/ca/api/v2/signingCert`. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first." }, "oidcUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to OpenID Connect identity providers. These URLs MUST be the \"base\" URLs for the OIDC IdPs, which clients should perform well-known OpenID Connect discovery against. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first." }, "rekorTlogUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to Rekor transparency logs. These URL MUST be the \"base\" URLs for the transparency logs, which clients should construct appropriate API endpoints on top of. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `rekor_tlog_config`." }, "rekorTlogConfig": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.ServiceConfiguration", "additionalProperties": false, "description": "Specifies how a client should select the set of Rekor transparency logs to write to." }, "tsaUrls": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.Service" }, "additionalProperties": false, "type": "array", "description": "URLs to RFC 3161 Time Stamping Authorities (TSA). These URLs MUST be the *full* URL for the TSA, meaning that it should be suitable for submitting Time Stamp Requests (TSRs) to via HTTP, per RFC 3161. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `tsa_config`." }, "tsaConfig": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.ServiceConfiguration", "additionalProperties": false, "description": "Specifies how a client should select the set of TSAs to request signed timestamps from." } }, "additionalProperties": false, "type": "object", "title": "Signing Config", "description": "SigningConfig represents the trusted entities/state needed by Sigstore signing. In particular, it primarily contains service URLs that a Sigstore signer may need to connect to for the online aspects of signing." }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." }, "dev.sigstore.trustroot.v1.Service": { "properties": { "url": { "type": "string", "description": "URL of the service. MUST include scheme and authority. MAY include path." }, "majorApiVersion": { "type": "integer", "description": "Specifies the major API version. A value of 0 represents a service that has not yet been released." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Validity period of a service. A service that has only a start date SHOULD be considered the most recent instance of that service, but the client MUST NOT assume there is only one valid instance. The TimeRange MUST be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Service", "description": "Service represents an instance of a service that is a part of Sigstore infrastructure. Clients MUST use the API version hint to determine the service with the highest API version that the client is compatible with. Clients MUST also only connect to services within the specified validity period and that has the newest validity start date." }, "dev.sigstore.trustroot.v1.ServiceConfiguration": { "properties": { "selector": { "enum": [ "SERVICE_SELECTOR_UNDEFINED", "ALL", "ANY", "EXACT" ], "type": "string", "title": "Service Selector", "description": "ServiceSelector specifies how a client SHOULD select a set of Services to connect to. A client SHOULD throw an error if the value is SERVICE_SELECTOR_UNDEFINED." }, "count": { "type": "integer", "description": "count specifies the number of Services the client should use. Only used when selector is set to EXACT, and count MUST be greater than 0. count MUST be less than or equal to the number of Services." } }, "additionalProperties": false, "type": "object", "title": "Service Configuration", "description": "ServiceConfiguration specifies how a client should select a set of Services to connect to, along with a count when a specific number of Services is requested." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/SubjectAlternativeName.schema.json000066400000000000000000000025011477352757300275700ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/SubjectAlternativeName", "definitions": { "SubjectAlternativeName": { "properties": { "type": { "enum": [ "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", "EMAIL", "URI", "OTHER_NAME" ], "type": "string", "title": "Subject Alternative Name Type" }, "regexp": { "type": "string", "description": "A regular expression describing the expected value for the SAN." }, "value": { "type": "string", "description": "The exact value to match against." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "regexp" ] }, { "required": [ "value" ] } ], "title": "Subject Alternative Name" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/TimeRange.schema.json000066400000000000000000000016351477352757300250530ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/TimeRange", "definitions": { "TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/TimestampVerificationData.schema.json000066400000000000000000000045151477352757300303000ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/TimestampVerificationData", "definitions": { "TimestampVerificationData": { "properties": { "rfc3161Timestamps": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.RFC3161SignedTimestamp" }, "additionalProperties": false, "type": "array", "description": "A list of RFC3161 signed timestamps provided by the user. This can be used when the entry has not been stored on a transparency log, or in conjunction for a stronger trust model. Clients MUST verify the hashed message in the message imprint against the signature in the bundle." } }, "additionalProperties": false, "type": "object", "title": "Notes on versioning.\n The primary message ('Bundle') MUST be versioned, by populating the\n 'media_type' field. Semver-ish (only major/minor versions) scheme MUST\n be used. The current version as specified by this file is:\n application/vnd.dev.sigstore.bundle.v0.3+json\n The semantic version is thus '0.3'.", "description": "Notes on versioning. The primary message ('Bundle') MUST be versioned, by populating the 'media_type' field. Semver-ish (only major/minor versions) scheme MUST be used. The current version as specified by this file is: application/vnd.dev.sigstore.bundle.v0.3+json The semantic version is thus '0.3'. Various timestamped counter signatures over the artifacts signature. Currently only RFC3161 signatures are provided. More formats may be added in the future." }, "dev.sigstore.common.v1.RFC3161SignedTimestamp": { "properties": { "signedTimestamp": { "type": "string", "description": "Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "RFC 3161 Signed Timestamp", "description": "This message holds a RFC 3161 timestamp." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/TransparencyLogEntry.schema.json000066400000000000000000000224031477352757300273310ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/TransparencyLogEntry", "definitions": { "TransparencyLogEntry": { "properties": { "logIndex": { "type": "string", "description": "The global index of the entry, used when querying the log by index." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier of the log." }, "kindVersion": { "$ref": "#/definitions/dev.sigstore.rekor.v1.KindVersion", "additionalProperties": false, "description": "The kind (type) and version of the object associated with this entry. These values are required to construct the entry during verification." }, "integratedTime": { "type": "string", "description": "The UNIX timestamp from the log when the entry was persisted. The integration time MUST NOT be trusted if inclusion_promise is omitted." }, "inclusionPromise": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionPromise", "additionalProperties": false, "description": "The inclusion promise/signed entry timestamp from the log. Required for v0.1 bundles, and MUST be verified. Optional for \u003e= v0.2 bundles if another suitable source of time is present (such as another source of signed time, or the current system time for long-lived certificates). MUST be verified if no other suitable source of time is present, and SHOULD be verified otherwise." }, "inclusionProof": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionProof", "additionalProperties": false, "description": "The inclusion proof can be used for offline or online verification that the entry was appended to the log, and that the log has not been altered." }, "canonicalizedBody": { "type": "string", "description": "Optional. The canonicalized transparency log entry, used to reconstruct the Signed Entry Timestamp (SET) during verification. The contents of this field are the same as the `body` field in a Rekor response, meaning that it does **not** include the \"full\" canonicalized form (of log index, ID, etc.) which are exposed as separate fields. The verifier is responsible for combining the `canonicalized_body`, `log_index`, `log_id`, and `integrated_time` into the payload that the SET's signature is generated over. This field is intended to be used in cases where the SET cannot be produced determinisitically (e.g. inconsistent JSON field ordering, differing whitespace, etc). If set, clients MUST verify that the signature referenced in the `canonicalized_body` matches the signature provided in the `Bundle.content`. If not set, clients are responsible for constructing an equivalent payload from other sources to verify the signature.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Entry", "description": "TransparencyLogEntry captures all the details required from Rekor to reconstruct an entry, given that the payload is provided via other means. This type can easily be created from the existing response from Rekor. Future iterations could rely on Rekor returning the minimal set of attributes (excluding the payload) that are required for verifying the inclusion promise. The inclusion promise (called SignedEntryTimestamp in the response from Rekor) is similar to a Signed Certificate Timestamp as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2." }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.rekor.v1.Checkpoint": { "properties": { "envelope": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Checkpoint", "description": "The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go" }, "dev.sigstore.rekor.v1.InclusionPromise": { "properties": { "signedEntryTimestamp": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Inclusion Promise", "description": "The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry." }, "dev.sigstore.rekor.v1.InclusionProof": { "properties": { "logIndex": { "type": "string", "description": "The index of the entry in the tree it was written to." }, "rootHash": { "type": "string", "description": "The hash digest stored at the root of the merkle tree at the time the proof was generated.", "format": "binary", "binaryEncoding": "base64" }, "treeSize": { "type": "string", "description": "The size of the merkle tree at the time the proof was generated." }, "hashes": { "items": { "type": "string" }, "type": "array", "description": "A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client.", "format": "binary", "binaryEncoding": "base64" }, "checkpoint": { "$ref": "#/definitions/dev.sigstore.rekor.v1.Checkpoint", "additionalProperties": false, "description": "Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details." } }, "additionalProperties": false, "type": "object", "title": "Inclusion Proof", "description": "InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log." }, "dev.sigstore.rekor.v1.KindVersion": { "properties": { "kind": { "type": "string", "description": "Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types" }, "version": { "type": "string", "description": "The specific api version of the type." } }, "additionalProperties": false, "type": "object", "title": "Kind Version", "description": "KindVersion contains the entry's kind and api version." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/TransparencyLogInstance.schema.json000066400000000000000000000175721477352757300300070ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/TransparencyLogInstance", "definitions": { "TransparencyLogInstance": { "properties": { "baseUrl": { "type": "string", "description": "The base URL at which can be used to URLs for the client." }, "hashAlgorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey", "additionalProperties": false, "description": "The public key used to verify signatures generated by the log. This attribute contains the signature algorithm used by the log." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier for this transparency log. Represented as the SHA-256 hash of the log's public key, calculated over the DER encoding of the key represented as SubjectPublicKeyInfo. See https://www.rfc-editor.org/rfc/rfc6962#section-3.2" }, "checkpointKeyId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The checkpoint key identifier for the log used in a checkpoint. Optional, not provided for logs that do not generate checkpoints. For logs that do generate checkpoints, if not set, assume log_id equals checkpoint_key_id. Follows the specification described here for ECDSA and Ed25519 signatures: https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures For RSA signatures, the key ID will match the ECDSA format, the hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT use RSA-signed checkpoints, since witnesses do not support RSA signatures. This is provided for convenience. Clients can also calculate the checkpoint key ID given the log's public key. SHOULD be set for logs generating Ed25519 signatures. SHOULD be 4 bytes long, as a truncated hash." } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Instance", "description": "TransparencyLogInstance describes the immutable parameters from a transparency log. See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters for more details. The included parameters are the minimal set required to identify a log, and verify an inclusion proof/promise." }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.common.v1.PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/TrustedRoot.schema.json000066400000000000000000000373001477352757300254740ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/TrustedRoot", "definitions": { "TrustedRoot": { "properties": { "mediaType": { "type": "string", "description": "MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json when encoded as JSON. Clients MUST be able to process and parse content with the media type defined in the old format: application/vnd.dev.sigstore.trustedroot+json;version=0.1" }, "tlogs": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TransparencyLogInstance" }, "additionalProperties": false, "type": "array", "description": "A set of trusted Rekor servers." }, "certificateAuthorities": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.CertificateAuthority" }, "additionalProperties": false, "type": "array", "description": "A set of trusted certificate authorities (e.g Fulcio), and any intermediate certificates they provide. If a CA is issuing multiple intermediate certificate, each combination shall be represented as separate chain. I.e, a single root cert may appear in multiple chains but with different intermediate and/or leaf certificates. The certificates are intended to be used for verifying artifact signatures." }, "ctlogs": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.TransparencyLogInstance" }, "additionalProperties": false, "type": "array", "description": "A set of trusted certificate transparency logs." }, "timestampAuthorities": { "items": { "$ref": "#/definitions/dev.sigstore.trustroot.v1.CertificateAuthority" }, "additionalProperties": false, "type": "array", "description": "A set of trusted timestamping authorities." } }, "additionalProperties": false, "type": "object", "title": "Trusted Root", "description": "TrustedRoot describes the client's complete set of trusted entities. How the TrustedRoot is populated is not specified, but can be a combination of many sources such as TUF repositories, files on disk etc. The TrustedRoot is not meant to be used for any artifact verification, only to capture the complete/global set of trusted verification materials. When verifying an artifact, based on the artifact and policies, a selection of keys/authorities are expected to be extracted and provided to the verification function. This way the set of keys/authorities can be kept to a minimal set by the policy to gain better control over what signatures that are allowed. The embedded transparency logs, CT logs, CAs and TSAs MUST include any previously used instance -- otherwise signatures made in the past cannot be verified. All the listed instances SHOULD be sorted by the 'valid_for' in ascending order, that is, the oldest instance first. Only the last instance is allowed to have their 'end' timestamp unset. All previous instances MUST have a closed interval of validity. The last instance MAY have a closed interval. Clients MUST accept instances that overlaps in time, if not clients may experience problems during rotations of verification materials. To be able to manage planned rotations of either transparency logs or certificate authorities, clienst MUST accept lists of instances where the last instance have a 'valid_for' that belongs to the future. This should not be a problem as clients SHOULD first seek the trust root for a suitable instance before creating a per artifact trust root (that is, a sub-set of the complete trust root) that is used for verification." }, "dev.sigstore.common.v1.DistinguishedName": { "properties": { "organization": { "type": "string" }, "commonName": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Distinguished Name" }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.common.v1.PublicKey": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded public key, encoding method is specified by the key_details attribute.", "format": "binary", "binaryEncoding": "base64" }, "keyDetails": { "enum": [ "PUBLIC_KEY_DETAILS_UNSPECIFIED", "PKCS1_RSA_PKCS1V5", "PKCS1_RSA_PSS", "PKIX_RSA_PKCS1V5", "PKIX_RSA_PSS", "PKIX_RSA_PKCS1V15_2048_SHA256", "PKIX_RSA_PKCS1V15_3072_SHA256", "PKIX_RSA_PKCS1V15_4096_SHA256", "PKIX_RSA_PSS_2048_SHA256", "PKIX_RSA_PSS_3072_SHA256", "PKIX_RSA_PSS_4096_SHA256", "PKIX_ECDSA_P256_HMAC_SHA_256", "PKIX_ECDSA_P256_SHA_256", "PKIX_ECDSA_P384_SHA_384", "PKIX_ECDSA_P521_SHA_512", "PKIX_ED25519", "PKIX_ED25519_PH", "PKIX_ECDSA_P384_SHA_256", "PKIX_ECDSA_P521_SHA_256", "LMS_SHA256", "LMOTS_SHA256" ], "type": "string", "title": "Public Key Details", "description": "Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm)." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "Optional validity period for this key, *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "raw_bytes" ] }, { "required": [ "valid_for" ] } ], "title": "Public Key" }, "dev.sigstore.common.v1.TimeRange": { "properties": { "start": { "type": "string", "format": "date-time" }, "end": { "type": "string", "format": "date-time" } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "end" ] } ], "title": "Time Range", "description": "The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" }, "dev.sigstore.common.v1.X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." }, "dev.sigstore.trustroot.v1.CertificateAuthority": { "properties": { "subject": { "$ref": "#/definitions/dev.sigstore.common.v1.DistinguishedName", "additionalProperties": false, "description": "The root certificate MUST be self-signed, and so the subject and issuer are the same." }, "uri": { "type": "string", "description": "The URI identifies the certificate authority. It is RECOMMENDED that the URI is the base URL for the certificate authority, that can be provided to any SDK/client provided by the certificate authority to interact with the certificate authority." }, "certChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false, "description": "The certificate chain for this CA. The last certificate in the chain MUST be the trust anchor. The trust anchor MAY be a self-signed root CA certificate or MAY be an intermediate CA certificate." }, "validFor": { "$ref": "#/definitions/dev.sigstore.common.v1.TimeRange", "additionalProperties": false, "description": "The time the *entire* chain was valid. This is at max the longest interval when *all* certificates in the chain were valid, but it MAY be shorter. Clients MUST check timestamps against *both* the `valid_for` time range *and* the entire certificate chain. The TimeRange should be considered valid *inclusive* of the endpoints." } }, "additionalProperties": false, "type": "object", "title": "Certificate Authority", "description": "CertificateAuthority enlists the information required to identify which CA to use and perform signature verification." }, "dev.sigstore.trustroot.v1.TransparencyLogInstance": { "properties": { "baseUrl": { "type": "string", "description": "The base URL at which can be used to URLs for the client." }, "hashAlgorithm": { "enum": [ "HASH_ALGORITHM_UNSPECIFIED", "SHA2_256", "SHA2_384", "SHA2_512", "SHA3_256", "SHA3_384" ], "type": "string", "title": "This package defines commonly used message types within the Sigstore\n community.", "description": "This package defines commonly used message types within the Sigstore community. Only a subset of the secure hash standard algorithms are supported. See \u003chttps://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.180-4.pdf\u003e for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'." }, "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKey", "additionalProperties": false, "description": "The public key used to verify signatures generated by the log. This attribute contains the signature algorithm used by the log." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier for this transparency log. Represented as the SHA-256 hash of the log's public key, calculated over the DER encoding of the key represented as SubjectPublicKeyInfo. See https://www.rfc-editor.org/rfc/rfc6962#section-3.2" }, "checkpointKeyId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The checkpoint key identifier for the log used in a checkpoint. Optional, not provided for logs that do not generate checkpoints. For logs that do generate checkpoints, if not set, assume log_id equals checkpoint_key_id. Follows the specification described here for ECDSA and Ed25519 signatures: https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures For RSA signatures, the key ID will match the ECDSA format, the hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT use RSA-signed checkpoints, since witnesses do not support RSA signatures. This is provided for convenience. Clients can also calculate the checkpoint key ID given the log's public key. SHOULD be set for logs generating Ed25519 signatures. SHOULD be 4 bytes long, as a truncated hash." } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Instance", "description": "TransparencyLogInstance describes the immutable parameters from a transparency log. See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters for more details. The included parameters are the minimal set required to identify a log, and verify an inclusion proof/promise." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/VerificationMaterial.schema.json000066400000000000000000000412151477352757300272770ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/VerificationMaterial", "definitions": { "VerificationMaterial": { "properties": { "publicKey": { "$ref": "#/definitions/dev.sigstore.common.v1.PublicKeyIdentifier", "additionalProperties": false }, "x509CertificateChain": { "$ref": "#/definitions/dev.sigstore.common.v1.X509CertificateChain", "additionalProperties": false }, "certificate": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate", "additionalProperties": false }, "tlogEntries": { "items": { "$ref": "#/definitions/dev.sigstore.rekor.v1.TransparencyLogEntry" }, "additionalProperties": false, "type": "array", "description": "An inclusion proof and an optional signed timestamp from the log. Client verification libraries MAY provide an option to support v0.1 bundles for backwards compatibility, which may contain an inclusion promise and not an inclusion proof. In this case, the client MUST validate the promise. Verifiers SHOULD NOT allow v0.1 bundles if they're used in an ecosystem which never produced them." }, "timestampVerificationData": { "$ref": "#/definitions/dev.sigstore.bundle.v1.TimestampVerificationData", "additionalProperties": false, "description": "Timestamp may also come from tlog_entries.inclusion_promise.signed_entry_timestamp." } }, "additionalProperties": false, "type": "object", "oneOf": [ { "required": [ "public_key" ] }, { "required": [ "x509_certificate_chain" ] }, { "required": [ "certificate" ] } ], "title": "Verification Material", "description": "VerificationMaterial captures details on the materials used to verify signatures. This message may be embedded in a DSSE envelope as a signature extension. Specifically, the `ext` field of the extension will expect this message when the signature extension is for Sigstore. This is identified by the `kind` field in the extension, which must be set to application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. When used as a DSSE extension, if the `public_key` field is used to indicate the key identifier, it MUST match the `keyid` field of the signature the extension is attached to." }, "dev.sigstore.bundle.v1.TimestampVerificationData": { "properties": { "rfc3161Timestamps": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.RFC3161SignedTimestamp" }, "additionalProperties": false, "type": "array", "description": "A list of RFC3161 signed timestamps provided by the user. This can be used when the entry has not been stored on a transparency log, or in conjunction for a stronger trust model. Clients MUST verify the hashed message in the message imprint against the signature in the bundle." } }, "additionalProperties": false, "type": "object", "title": "Notes on versioning.\n The primary message ('Bundle') MUST be versioned, by populating the\n 'media_type' field. Semver-ish (only major/minor versions) scheme MUST\n be used. The current version as specified by this file is:\n application/vnd.dev.sigstore.bundle.v0.3+json\n The semantic version is thus '0.3'.", "description": "Notes on versioning. The primary message ('Bundle') MUST be versioned, by populating the 'media_type' field. Semver-ish (only major/minor versions) scheme MUST be used. The current version as specified by this file is: application/vnd.dev.sigstore.bundle.v0.3+json The semantic version is thus '0.3'. Various timestamped counter signatures over the artifacts signature. Currently only RFC3161 signatures are provided. More formats may be added in the future." }, "dev.sigstore.common.v1.LogId": { "properties": { "keyId": { "type": "string", "description": "The unique identity of the log, represented by its public key.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Log Id", "description": "LogId captures the identity of a transparency log." }, "dev.sigstore.common.v1.PublicKeyIdentifier": { "properties": { "hint": { "type": "string", "description": "Optional unauthenticated hint on which key to use. The format of the hint must be agreed upon out of band by the signer and the verifiers, and so is not subject to this specification. Example use-case is to specify the public key to use, from a trusted key-ring. Implementors are RECOMMENDED to derive the value from the public key as described in RFC 6962. See: \u003chttps://www.rfc-editor.org/rfc/rfc6962#section-3.2\u003e" } }, "additionalProperties": false, "type": "object", "title": "Public Key Identifier", "description": "PublicKeyIdentifier can be used to identify an (out of band) delivered key, to verify a signature." }, "dev.sigstore.common.v1.RFC3161SignedTimestamp": { "properties": { "signedTimestamp": { "type": "string", "description": "Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "RFC 3161 Signed Timestamp", "description": "This message holds a RFC 3161 timestamp." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" }, "dev.sigstore.common.v1.X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." }, "dev.sigstore.rekor.v1.Checkpoint": { "properties": { "envelope": { "type": "string" } }, "additionalProperties": false, "type": "object", "title": "Checkpoint", "description": "The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go" }, "dev.sigstore.rekor.v1.InclusionPromise": { "properties": { "signedEntryTimestamp": { "type": "string", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Inclusion Promise", "description": "The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry." }, "dev.sigstore.rekor.v1.InclusionProof": { "properties": { "logIndex": { "type": "string", "description": "The index of the entry in the tree it was written to." }, "rootHash": { "type": "string", "description": "The hash digest stored at the root of the merkle tree at the time the proof was generated.", "format": "binary", "binaryEncoding": "base64" }, "treeSize": { "type": "string", "description": "The size of the merkle tree at the time the proof was generated." }, "hashes": { "items": { "type": "string" }, "type": "array", "description": "A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client.", "format": "binary", "binaryEncoding": "base64" }, "checkpoint": { "$ref": "#/definitions/dev.sigstore.rekor.v1.Checkpoint", "additionalProperties": false, "description": "Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details." } }, "additionalProperties": false, "type": "object", "title": "Inclusion Proof", "description": "InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log." }, "dev.sigstore.rekor.v1.KindVersion": { "properties": { "kind": { "type": "string", "description": "Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types" }, "version": { "type": "string", "description": "The specific api version of the type." } }, "additionalProperties": false, "type": "object", "title": "Kind Version", "description": "KindVersion contains the entry's kind and api version." }, "dev.sigstore.rekor.v1.TransparencyLogEntry": { "properties": { "logIndex": { "type": "string", "description": "The global index of the entry, used when querying the log by index." }, "logId": { "$ref": "#/definitions/dev.sigstore.common.v1.LogId", "additionalProperties": false, "description": "The unique identifier of the log." }, "kindVersion": { "$ref": "#/definitions/dev.sigstore.rekor.v1.KindVersion", "additionalProperties": false, "description": "The kind (type) and version of the object associated with this entry. These values are required to construct the entry during verification." }, "integratedTime": { "type": "string", "description": "The UNIX timestamp from the log when the entry was persisted. The integration time MUST NOT be trusted if inclusion_promise is omitted." }, "inclusionPromise": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionPromise", "additionalProperties": false, "description": "The inclusion promise/signed entry timestamp from the log. Required for v0.1 bundles, and MUST be verified. Optional for \u003e= v0.2 bundles if another suitable source of time is present (such as another source of signed time, or the current system time for long-lived certificates). MUST be verified if no other suitable source of time is present, and SHOULD be verified otherwise." }, "inclusionProof": { "$ref": "#/definitions/dev.sigstore.rekor.v1.InclusionProof", "additionalProperties": false, "description": "The inclusion proof can be used for offline or online verification that the entry was appended to the log, and that the log has not been altered." }, "canonicalizedBody": { "type": "string", "description": "Optional. The canonicalized transparency log entry, used to reconstruct the Signed Entry Timestamp (SET) during verification. The contents of this field are the same as the `body` field in a Rekor response, meaning that it does **not** include the \"full\" canonicalized form (of log index, ID, etc.) which are exposed as separate fields. The verifier is responsible for combining the `canonicalized_body`, `log_index`, `log_id`, and `integrated_time` into the payload that the SET's signature is generated over. This field is intended to be used in cases where the SET cannot be produced determinisitically (e.g. inconsistent JSON field ordering, differing whitespace, etc). If set, clients MUST verify that the signature referenced in the `canonicalized_body` matches the signature provided in the `Bundle.content`. If not set, clients are responsible for constructing an equivalent payload from other sources to verify the signature.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "Transparency Log Entry", "description": "TransparencyLogEntry captures all the details required from Rekor to reconstruct an entry, given that the payload is provided via other means. This type can easily be created from the existing response from Rekor. Future iterations could rely on Rekor returning the minimal set of attributes (excluding the payload) that are required for verifying the inclusion promise. The inclusion promise (called SignedEntryTimestamp in the response from Rekor) is similar to a Signed Certificate Timestamp as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2." } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/X509Certificate.schema.json000066400000000000000000000010731477352757300260040ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/X509Certificate", "definitions": { "X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" } } }protobuf-specs-0.4.1/gen/jsonschema/schemas/X509CertificateChain.schema.json000066400000000000000000000030721477352757300267500ustar00rootroot00000000000000{ "$schema": "http://json-schema.org/draft-04/schema#", "$ref": "#/definitions/X509CertificateChain", "definitions": { "X509CertificateChain": { "properties": { "certificates": { "items": { "$ref": "#/definitions/dev.sigstore.common.v1.X509Certificate" }, "additionalProperties": false, "type": "array", "description": "One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order." } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate Chain", "description": "A collection of X.509 certificates. This \"chain\" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building." }, "dev.sigstore.common.v1.X509Certificate": { "properties": { "rawBytes": { "type": "string", "description": "DER-encoded X.509 certificate.", "format": "binary", "binaryEncoding": "base64" } }, "additionalProperties": false, "type": "object", "title": "X 509 Certificate" } } }protobuf-specs-0.4.1/gen/pb-go/000077500000000000000000000000001477352757300162705ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/bundle/000077500000000000000000000000001477352757300175415ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/bundle/v1/000077500000000000000000000000001477352757300200675ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/bundle/v1/sigstore_bundle.pb.go000066400000000000000000000610241477352757300242110ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: sigstore_bundle.proto package v1 import ( v1 "github.com/sigstore/protobuf-specs/gen/pb-go/common/v1" dsse "github.com/sigstore/protobuf-specs/gen/pb-go/dsse" v11 "github.com/sigstore/protobuf-specs/gen/pb-go/rekor/v1" _ "google.golang.org/genproto/googleapis/api/annotations" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // Various timestamped counter signatures over the artifacts signature. // Currently only RFC3161 signatures are provided. More formats may be added // in the future. type TimestampVerificationData struct { state protoimpl.MessageState `protogen:"open.v1"` // A list of RFC3161 signed timestamps provided by the user. // This can be used when the entry has not been stored on a // transparency log, or in conjunction for a stronger trust model. // Clients MUST verify the hashed message in the message imprint // against the signature in the bundle. Rfc3161Timestamps []*v1.RFC3161SignedTimestamp `protobuf:"bytes,1,rep,name=rfc3161_timestamps,json=rfc3161Timestamps,proto3" json:"rfc3161_timestamps,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *TimestampVerificationData) Reset() { *x = TimestampVerificationData{} mi := &file_sigstore_bundle_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *TimestampVerificationData) String() string { return protoimpl.X.MessageStringOf(x) } func (*TimestampVerificationData) ProtoMessage() {} func (x *TimestampVerificationData) ProtoReflect() protoreflect.Message { mi := &file_sigstore_bundle_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TimestampVerificationData.ProtoReflect.Descriptor instead. func (*TimestampVerificationData) Descriptor() ([]byte, []int) { return file_sigstore_bundle_proto_rawDescGZIP(), []int{0} } func (x *TimestampVerificationData) GetRfc3161Timestamps() []*v1.RFC3161SignedTimestamp { if x != nil { return x.Rfc3161Timestamps } return nil } // VerificationMaterial captures details on the materials used to verify // signatures. This message may be embedded in a DSSE envelope as a signature // extension. Specifically, the `ext` field of the extension will expect this // message when the signature extension is for Sigstore. This is identified by // the `kind` field in the extension, which must be set to // application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. // When used as a DSSE extension, if the `public_key` field is used to indicate // the key identifier, it MUST match the `keyid` field of the signature the // extension is attached to. type VerificationMaterial struct { state protoimpl.MessageState `protogen:"open.v1"` // The key material for verification purposes. // // This allows key material to be conveyed in one of three forms: // // 1. An unspecified public key identifier, for retrieving a key // from an out-of-band mechanism (such as a keyring); // // 2. A sequence of one or more X.509 certificates, of which the first member // MUST be a leaf certificate conveying the signing key. Subsequent members // SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`. // // Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT // include intermediate CA certificates that appear in an independent root of trust // (such as the Public Good Instance's trusted root). // // Verifiers MUST validate the chain carefully to ensure that it chains up // to a CA certificate that they independently trust. Verifiers SHOULD // handle old or non-complying bundles that have superfluous intermediate and/or // root CA certificates by either ignoring them or explicitly considering them // untrusted for the purposes of chain building. // // 3. A single X.509 certificate, which MUST be a leaf certificate conveying // the signing key. // // When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing // via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1) // MAY be used with the PGI for self-managed keys. // // When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing, // form (2) MUST be used. // // When used in a `0.3` bundle with the PGI and "keyless" signing, // form (3) MUST be used. // // Types that are valid to be assigned to Content: // // *VerificationMaterial_PublicKey // *VerificationMaterial_X509CertificateChain // *VerificationMaterial_Certificate Content isVerificationMaterial_Content `protobuf_oneof:"content"` // An inclusion proof and an optional signed timestamp from the log. // Client verification libraries MAY provide an option to support v0.1 // bundles for backwards compatibility, which may contain an inclusion // promise and not an inclusion proof. In this case, the client MUST // validate the promise. // Verifiers SHOULD NOT allow v0.1 bundles if they're used in an // ecosystem which never produced them. TlogEntries []*v11.TransparencyLogEntry `protobuf:"bytes,3,rep,name=tlog_entries,json=tlogEntries,proto3" json:"tlog_entries,omitempty"` // Timestamp may also come from // tlog_entries.inclusion_promise.signed_entry_timestamp. TimestampVerificationData *TimestampVerificationData `protobuf:"bytes,4,opt,name=timestamp_verification_data,json=timestampVerificationData,proto3" json:"timestamp_verification_data,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *VerificationMaterial) Reset() { *x = VerificationMaterial{} mi := &file_sigstore_bundle_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *VerificationMaterial) String() string { return protoimpl.X.MessageStringOf(x) } func (*VerificationMaterial) ProtoMessage() {} func (x *VerificationMaterial) ProtoReflect() protoreflect.Message { mi := &file_sigstore_bundle_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use VerificationMaterial.ProtoReflect.Descriptor instead. func (*VerificationMaterial) Descriptor() ([]byte, []int) { return file_sigstore_bundle_proto_rawDescGZIP(), []int{1} } func (x *VerificationMaterial) GetContent() isVerificationMaterial_Content { if x != nil { return x.Content } return nil } func (x *VerificationMaterial) GetPublicKey() *v1.PublicKeyIdentifier { if x != nil { if x, ok := x.Content.(*VerificationMaterial_PublicKey); ok { return x.PublicKey } } return nil } func (x *VerificationMaterial) GetX509CertificateChain() *v1.X509CertificateChain { if x != nil { if x, ok := x.Content.(*VerificationMaterial_X509CertificateChain); ok { return x.X509CertificateChain } } return nil } func (x *VerificationMaterial) GetCertificate() *v1.X509Certificate { if x != nil { if x, ok := x.Content.(*VerificationMaterial_Certificate); ok { return x.Certificate } } return nil } func (x *VerificationMaterial) GetTlogEntries() []*v11.TransparencyLogEntry { if x != nil { return x.TlogEntries } return nil } func (x *VerificationMaterial) GetTimestampVerificationData() *TimestampVerificationData { if x != nil { return x.TimestampVerificationData } return nil } type isVerificationMaterial_Content interface { isVerificationMaterial_Content() } type VerificationMaterial_PublicKey struct { PublicKey *v1.PublicKeyIdentifier `protobuf:"bytes,1,opt,name=public_key,json=publicKey,proto3,oneof"` } type VerificationMaterial_X509CertificateChain struct { X509CertificateChain *v1.X509CertificateChain `protobuf:"bytes,2,opt,name=x509_certificate_chain,json=x509CertificateChain,proto3,oneof"` } type VerificationMaterial_Certificate struct { Certificate *v1.X509Certificate `protobuf:"bytes,5,opt,name=certificate,proto3,oneof"` } func (*VerificationMaterial_PublicKey) isVerificationMaterial_Content() {} func (*VerificationMaterial_X509CertificateChain) isVerificationMaterial_Content() {} func (*VerificationMaterial_Certificate) isVerificationMaterial_Content() {} type Bundle struct { state protoimpl.MessageState `protogen:"open.v1"` // MUST be application/vnd.dev.sigstore.bundle.v0.3+json when // when encoded as JSON. // Clients must to be able to accept media type using the previously // defined formats: // * application/vnd.dev.sigstore.bundle+json;version=0.1 // * application/vnd.dev.sigstore.bundle+json;version=0.2 // * application/vnd.dev.sigstore.bundle+json;version=0.3 MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"` // When a signer is identified by a X.509 certificate, a verifier MUST // verify that the signature was computed at the time the certificate // was valid as described in the Sigstore client spec: "Verification // using a Bundle". // // If the verification material contains a public key identifier // (key hint) and the `content` is a DSSE envelope, the key hints // MUST be exactly the same in the verification material and in the // DSSE envelope. VerificationMaterial *VerificationMaterial `protobuf:"bytes,2,opt,name=verification_material,json=verificationMaterial,proto3" json:"verification_material,omitempty"` // Types that are valid to be assigned to Content: // // *Bundle_MessageSignature // *Bundle_DsseEnvelope Content isBundle_Content `protobuf_oneof:"content"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Bundle) Reset() { *x = Bundle{} mi := &file_sigstore_bundle_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Bundle) String() string { return protoimpl.X.MessageStringOf(x) } func (*Bundle) ProtoMessage() {} func (x *Bundle) ProtoReflect() protoreflect.Message { mi := &file_sigstore_bundle_proto_msgTypes[2] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Bundle.ProtoReflect.Descriptor instead. func (*Bundle) Descriptor() ([]byte, []int) { return file_sigstore_bundle_proto_rawDescGZIP(), []int{2} } func (x *Bundle) GetMediaType() string { if x != nil { return x.MediaType } return "" } func (x *Bundle) GetVerificationMaterial() *VerificationMaterial { if x != nil { return x.VerificationMaterial } return nil } func (x *Bundle) GetContent() isBundle_Content { if x != nil { return x.Content } return nil } func (x *Bundle) GetMessageSignature() *v1.MessageSignature { if x != nil { if x, ok := x.Content.(*Bundle_MessageSignature); ok { return x.MessageSignature } } return nil } func (x *Bundle) GetDsseEnvelope() *dsse.Envelope { if x != nil { if x, ok := x.Content.(*Bundle_DsseEnvelope); ok { return x.DsseEnvelope } } return nil } type isBundle_Content interface { isBundle_Content() } type Bundle_MessageSignature struct { MessageSignature *v1.MessageSignature `protobuf:"bytes,3,opt,name=message_signature,json=messageSignature,proto3,oneof"` } type Bundle_DsseEnvelope struct { // A DSSE envelope can contain arbitrary payloads. // Verifiers must verify that the payload type is a // supported and expected type. This is part of the DSSE // protocol which is defined here: // // DSSE envelopes in a bundle MUST have exactly one signature. // This is a limitation from the DSSE spec, as it can contain // multiple signatures. There are two primary reasons: // 1. It simplifies the verification logic and policy // 2. The bundle (currently) can only contain a single // instance of the required verification materials // // During verification a client MUST reject an envelope if // the number of signatures is not equal to one. DsseEnvelope *dsse.Envelope `protobuf:"bytes,4,opt,name=dsse_envelope,json=dsseEnvelope,proto3,oneof"` } func (*Bundle_MessageSignature) isBundle_Content() {} func (*Bundle_DsseEnvelope) isBundle_Content() {} var File_sigstore_bundle_proto protoreflect.FileDescriptor var file_sigstore_bundle_proto_rawDesc = string([]byte{ 0x0a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x0e, 0x65, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x14, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x7a, 0x0a, 0x19, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x12, 0x5d, 0x0a, 0x12, 0x72, 0x66, 0x63, 0x33, 0x31, 0x36, 0x31, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x52, 0x46, 0x43, 0x33, 0x31, 0x36, 0x31, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x11, 0x72, 0x66, 0x63, 0x33, 0x31, 0x36, 0x31, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x73, 0x22, 0xf4, 0x03, 0x0a, 0x14, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x12, 0x51, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x48, 0x00, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x69, 0x0a, 0x16, 0x78, 0x35, 0x30, 0x39, 0x5f, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x5f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x58, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x48, 0x00, 0x52, 0x14, 0x78, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x12, 0x50, 0x0a, 0x0b, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x58, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x12, 0x4e, 0x0a, 0x0c, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x65, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2b, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0b, 0x74, 0x6c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x69, 0x65, 0x73, 0x12, 0x71, 0x0a, 0x1b, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x52, 0x19, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x44, 0x61, 0x74, 0x61, 0x42, 0x09, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x22, 0xbf, 0x02, 0x0a, 0x06, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x66, 0x0a, 0x15, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x14, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4d, 0x61, 0x74, 0x65, 0x72, 0x69, 0x61, 0x6c, 0x12, 0x5c, 0x0a, 0x11, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x48, 0x00, 0x52, 0x10, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x3f, 0x0a, 0x0d, 0x64, 0x73, 0x73, 0x65, 0x5f, 0x65, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x13, 0x2e, 0x69, 0x6f, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x74, 0x6f, 0x2e, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x48, 0x00, 0x52, 0x0c, 0x64, 0x73, 0x73, 0x65, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x4a, 0x04, 0x08, 0x05, 0x10, 0x33, 0x42, 0x7c, 0x0a, 0x1c, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x42, 0x0b, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2f, 0x76, 0x31, 0xea, 0x02, 0x14, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_sigstore_bundle_proto_rawDescOnce sync.Once file_sigstore_bundle_proto_rawDescData []byte ) func file_sigstore_bundle_proto_rawDescGZIP() []byte { file_sigstore_bundle_proto_rawDescOnce.Do(func() { file_sigstore_bundle_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_sigstore_bundle_proto_rawDesc), len(file_sigstore_bundle_proto_rawDesc))) }) return file_sigstore_bundle_proto_rawDescData } var file_sigstore_bundle_proto_msgTypes = make([]protoimpl.MessageInfo, 3) var file_sigstore_bundle_proto_goTypes = []any{ (*TimestampVerificationData)(nil), // 0: dev.sigstore.bundle.v1.TimestampVerificationData (*VerificationMaterial)(nil), // 1: dev.sigstore.bundle.v1.VerificationMaterial (*Bundle)(nil), // 2: dev.sigstore.bundle.v1.Bundle (*v1.RFC3161SignedTimestamp)(nil), // 3: dev.sigstore.common.v1.RFC3161SignedTimestamp (*v1.PublicKeyIdentifier)(nil), // 4: dev.sigstore.common.v1.PublicKeyIdentifier (*v1.X509CertificateChain)(nil), // 5: dev.sigstore.common.v1.X509CertificateChain (*v1.X509Certificate)(nil), // 6: dev.sigstore.common.v1.X509Certificate (*v11.TransparencyLogEntry)(nil), // 7: dev.sigstore.rekor.v1.TransparencyLogEntry (*v1.MessageSignature)(nil), // 8: dev.sigstore.common.v1.MessageSignature (*dsse.Envelope)(nil), // 9: io.intoto.Envelope } var file_sigstore_bundle_proto_depIdxs = []int32{ 3, // 0: dev.sigstore.bundle.v1.TimestampVerificationData.rfc3161_timestamps:type_name -> dev.sigstore.common.v1.RFC3161SignedTimestamp 4, // 1: dev.sigstore.bundle.v1.VerificationMaterial.public_key:type_name -> dev.sigstore.common.v1.PublicKeyIdentifier 5, // 2: dev.sigstore.bundle.v1.VerificationMaterial.x509_certificate_chain:type_name -> dev.sigstore.common.v1.X509CertificateChain 6, // 3: dev.sigstore.bundle.v1.VerificationMaterial.certificate:type_name -> dev.sigstore.common.v1.X509Certificate 7, // 4: dev.sigstore.bundle.v1.VerificationMaterial.tlog_entries:type_name -> dev.sigstore.rekor.v1.TransparencyLogEntry 0, // 5: dev.sigstore.bundle.v1.VerificationMaterial.timestamp_verification_data:type_name -> dev.sigstore.bundle.v1.TimestampVerificationData 1, // 6: dev.sigstore.bundle.v1.Bundle.verification_material:type_name -> dev.sigstore.bundle.v1.VerificationMaterial 8, // 7: dev.sigstore.bundle.v1.Bundle.message_signature:type_name -> dev.sigstore.common.v1.MessageSignature 9, // 8: dev.sigstore.bundle.v1.Bundle.dsse_envelope:type_name -> io.intoto.Envelope 9, // [9:9] is the sub-list for method output_type 9, // [9:9] is the sub-list for method input_type 9, // [9:9] is the sub-list for extension type_name 9, // [9:9] is the sub-list for extension extendee 0, // [0:9] is the sub-list for field type_name } func init() { file_sigstore_bundle_proto_init() } func file_sigstore_bundle_proto_init() { if File_sigstore_bundle_proto != nil { return } file_sigstore_bundle_proto_msgTypes[1].OneofWrappers = []any{ (*VerificationMaterial_PublicKey)(nil), (*VerificationMaterial_X509CertificateChain)(nil), (*VerificationMaterial_Certificate)(nil), } file_sigstore_bundle_proto_msgTypes[2].OneofWrappers = []any{ (*Bundle_MessageSignature)(nil), (*Bundle_DsseEnvelope)(nil), } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_sigstore_bundle_proto_rawDesc), len(file_sigstore_bundle_proto_rawDesc)), NumEnums: 0, NumMessages: 3, NumExtensions: 0, NumServices: 0, }, GoTypes: file_sigstore_bundle_proto_goTypes, DependencyIndexes: file_sigstore_bundle_proto_depIdxs, MessageInfos: file_sigstore_bundle_proto_msgTypes, }.Build() File_sigstore_bundle_proto = out.File file_sigstore_bundle_proto_goTypes = nil file_sigstore_bundle_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-go/common/000077500000000000000000000000001477352757300175605ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/common/v1/000077500000000000000000000000001477352757300201065ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/common/v1/sigstore_common.pb.go000066400000000000000000001437661477352757300242650ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: sigstore_common.proto package v1 import ( _ "google.golang.org/genproto/googleapis/api/annotations" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // Only a subset of the secure hash standard algorithms are supported. // See for more // details. // UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force // any proto JSON serialization to emit the used hash algorithm, as default // option is to *omit* the default value of an enum (which is the first // value, represented by '0'. type HashAlgorithm int32 const ( HashAlgorithm_HASH_ALGORITHM_UNSPECIFIED HashAlgorithm = 0 HashAlgorithm_SHA2_256 HashAlgorithm = 1 HashAlgorithm_SHA2_384 HashAlgorithm = 2 HashAlgorithm_SHA2_512 HashAlgorithm = 3 HashAlgorithm_SHA3_256 HashAlgorithm = 4 HashAlgorithm_SHA3_384 HashAlgorithm = 5 ) // Enum value maps for HashAlgorithm. var ( HashAlgorithm_name = map[int32]string{ 0: "HASH_ALGORITHM_UNSPECIFIED", 1: "SHA2_256", 2: "SHA2_384", 3: "SHA2_512", 4: "SHA3_256", 5: "SHA3_384", } HashAlgorithm_value = map[string]int32{ "HASH_ALGORITHM_UNSPECIFIED": 0, "SHA2_256": 1, "SHA2_384": 2, "SHA2_512": 3, "SHA3_256": 4, "SHA3_384": 5, } ) func (x HashAlgorithm) Enum() *HashAlgorithm { p := new(HashAlgorithm) *p = x return p } func (x HashAlgorithm) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (HashAlgorithm) Descriptor() protoreflect.EnumDescriptor { return file_sigstore_common_proto_enumTypes[0].Descriptor() } func (HashAlgorithm) Type() protoreflect.EnumType { return &file_sigstore_common_proto_enumTypes[0] } func (x HashAlgorithm) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use HashAlgorithm.Descriptor instead. func (HashAlgorithm) EnumDescriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{0} } // Details of a specific public key, capturing the the key encoding method, // and signature algorithm. // // PublicKeyDetails captures the public key/hash algorithm combinations // recommended in the Sigstore ecosystem. // // This is modelled as a linear set as we want to provide a small number of // opinionated options instead of allowing every possible permutation. // // Any changes to this enum MUST be reflected in the algorithm registry. // See: docs/algorithm-registry.md // // To avoid the possibility of contradicting formats such as PKCS1 with // ED25519 the valid permutations are listed as a linear set instead of a // cartesian set (i.e one combined variable instead of two, one for encoding // and one for the signature algorithm). type PublicKeyDetails int32 const ( PublicKeyDetails_PUBLIC_KEY_DETAILS_UNSPECIFIED PublicKeyDetails = 0 // RSA // // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKCS1_RSA_PKCS1V5 PublicKeyDetails = 1 // See RFC8017 // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKCS1_RSA_PSS PublicKeyDetails = 2 // See RFC8017 // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKIX_RSA_PKCS1V5 PublicKeyDetails = 3 // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKIX_RSA_PSS PublicKeyDetails = 4 // RSA public key in PKIX format, PKCS#1v1.5 signature PublicKeyDetails_PKIX_RSA_PKCS1V15_2048_SHA256 PublicKeyDetails = 9 PublicKeyDetails_PKIX_RSA_PKCS1V15_3072_SHA256 PublicKeyDetails = 10 PublicKeyDetails_PKIX_RSA_PKCS1V15_4096_SHA256 PublicKeyDetails = 11 // RSA public key in PKIX format, RSASSA-PSS signature PublicKeyDetails_PKIX_RSA_PSS_2048_SHA256 PublicKeyDetails = 16 // See RFC4055 PublicKeyDetails_PKIX_RSA_PSS_3072_SHA256 PublicKeyDetails = 17 PublicKeyDetails_PKIX_RSA_PSS_4096_SHA256 PublicKeyDetails = 18 // ECDSA // // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKIX_ECDSA_P256_HMAC_SHA_256 PublicKeyDetails = 6 // See RFC6979 PublicKeyDetails_PKIX_ECDSA_P256_SHA_256 PublicKeyDetails = 5 // See NIST FIPS 186-4 PublicKeyDetails_PKIX_ECDSA_P384_SHA_384 PublicKeyDetails = 12 PublicKeyDetails_PKIX_ECDSA_P521_SHA_512 PublicKeyDetails = 13 // Ed 25519 PublicKeyDetails_PKIX_ED25519 PublicKeyDetails = 7 // See RFC8032 PublicKeyDetails_PKIX_ED25519_PH PublicKeyDetails = 8 // These algorithms are deprecated and should not be used, but they // were/are being used by most Sigstore clients implementations. // // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKIX_ECDSA_P384_SHA_256 PublicKeyDetails = 19 // Deprecated: Marked as deprecated in sigstore_common.proto. PublicKeyDetails_PKIX_ECDSA_P521_SHA_256 PublicKeyDetails = 20 // LMS and LM-OTS // // These keys and signatures may be used by private Sigstore // deployments, but are not currently supported by the public // good instance. // // USER WARNING: LMS and LM-OTS are both stateful signature schemes. // Using them correctly requires discretion and careful consideration // to ensure that individual secret keys are not used more than once. // In addition, LM-OTS is a single-use scheme, meaning that it // MUST NOT be used for more than one signature per LM-OTS key. // If you cannot maintain these invariants, you MUST NOT use these // schemes. PublicKeyDetails_LMS_SHA256 PublicKeyDetails = 14 PublicKeyDetails_LMOTS_SHA256 PublicKeyDetails = 15 ) // Enum value maps for PublicKeyDetails. var ( PublicKeyDetails_name = map[int32]string{ 0: "PUBLIC_KEY_DETAILS_UNSPECIFIED", 1: "PKCS1_RSA_PKCS1V5", 2: "PKCS1_RSA_PSS", 3: "PKIX_RSA_PKCS1V5", 4: "PKIX_RSA_PSS", 9: "PKIX_RSA_PKCS1V15_2048_SHA256", 10: "PKIX_RSA_PKCS1V15_3072_SHA256", 11: "PKIX_RSA_PKCS1V15_4096_SHA256", 16: "PKIX_RSA_PSS_2048_SHA256", 17: "PKIX_RSA_PSS_3072_SHA256", 18: "PKIX_RSA_PSS_4096_SHA256", 6: "PKIX_ECDSA_P256_HMAC_SHA_256", 5: "PKIX_ECDSA_P256_SHA_256", 12: "PKIX_ECDSA_P384_SHA_384", 13: "PKIX_ECDSA_P521_SHA_512", 7: "PKIX_ED25519", 8: "PKIX_ED25519_PH", 19: "PKIX_ECDSA_P384_SHA_256", 20: "PKIX_ECDSA_P521_SHA_256", 14: "LMS_SHA256", 15: "LMOTS_SHA256", } PublicKeyDetails_value = map[string]int32{ "PUBLIC_KEY_DETAILS_UNSPECIFIED": 0, "PKCS1_RSA_PKCS1V5": 1, "PKCS1_RSA_PSS": 2, "PKIX_RSA_PKCS1V5": 3, "PKIX_RSA_PSS": 4, "PKIX_RSA_PKCS1V15_2048_SHA256": 9, "PKIX_RSA_PKCS1V15_3072_SHA256": 10, "PKIX_RSA_PKCS1V15_4096_SHA256": 11, "PKIX_RSA_PSS_2048_SHA256": 16, "PKIX_RSA_PSS_3072_SHA256": 17, "PKIX_RSA_PSS_4096_SHA256": 18, "PKIX_ECDSA_P256_HMAC_SHA_256": 6, "PKIX_ECDSA_P256_SHA_256": 5, "PKIX_ECDSA_P384_SHA_384": 12, "PKIX_ECDSA_P521_SHA_512": 13, "PKIX_ED25519": 7, "PKIX_ED25519_PH": 8, "PKIX_ECDSA_P384_SHA_256": 19, "PKIX_ECDSA_P521_SHA_256": 20, "LMS_SHA256": 14, "LMOTS_SHA256": 15, } ) func (x PublicKeyDetails) Enum() *PublicKeyDetails { p := new(PublicKeyDetails) *p = x return p } func (x PublicKeyDetails) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (PublicKeyDetails) Descriptor() protoreflect.EnumDescriptor { return file_sigstore_common_proto_enumTypes[1].Descriptor() } func (PublicKeyDetails) Type() protoreflect.EnumType { return &file_sigstore_common_proto_enumTypes[1] } func (x PublicKeyDetails) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use PublicKeyDetails.Descriptor instead. func (PublicKeyDetails) EnumDescriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{1} } type SubjectAlternativeNameType int32 const ( SubjectAlternativeNameType_SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED SubjectAlternativeNameType = 0 SubjectAlternativeNameType_EMAIL SubjectAlternativeNameType = 1 SubjectAlternativeNameType_URI SubjectAlternativeNameType = 2 // OID 1.3.6.1.4.1.57264.1.7 // See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san // for more details. SubjectAlternativeNameType_OTHER_NAME SubjectAlternativeNameType = 3 ) // Enum value maps for SubjectAlternativeNameType. var ( SubjectAlternativeNameType_name = map[int32]string{ 0: "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", 1: "EMAIL", 2: "URI", 3: "OTHER_NAME", } SubjectAlternativeNameType_value = map[string]int32{ "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": 0, "EMAIL": 1, "URI": 2, "OTHER_NAME": 3, } ) func (x SubjectAlternativeNameType) Enum() *SubjectAlternativeNameType { p := new(SubjectAlternativeNameType) *p = x return p } func (x SubjectAlternativeNameType) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (SubjectAlternativeNameType) Descriptor() protoreflect.EnumDescriptor { return file_sigstore_common_proto_enumTypes[2].Descriptor() } func (SubjectAlternativeNameType) Type() protoreflect.EnumType { return &file_sigstore_common_proto_enumTypes[2] } func (x SubjectAlternativeNameType) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use SubjectAlternativeNameType.Descriptor instead. func (SubjectAlternativeNameType) EnumDescriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{2} } // HashOutput captures a digest of a 'message' (generic octet sequence) // and the corresponding hash algorithm used. type HashOutput struct { state protoimpl.MessageState `protogen:"open.v1"` Algorithm HashAlgorithm `protobuf:"varint,1,opt,name=algorithm,proto3,enum=dev.sigstore.common.v1.HashAlgorithm" json:"algorithm,omitempty"` // This is the raw octets of the message digest as computed by // the hash algorithm. Digest []byte `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *HashOutput) Reset() { *x = HashOutput{} mi := &file_sigstore_common_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *HashOutput) String() string { return protoimpl.X.MessageStringOf(x) } func (*HashOutput) ProtoMessage() {} func (x *HashOutput) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use HashOutput.ProtoReflect.Descriptor instead. func (*HashOutput) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{0} } func (x *HashOutput) GetAlgorithm() HashAlgorithm { if x != nil { return x.Algorithm } return HashAlgorithm_HASH_ALGORITHM_UNSPECIFIED } func (x *HashOutput) GetDigest() []byte { if x != nil { return x.Digest } return nil } // MessageSignature stores the computed signature over a message. type MessageSignature struct { state protoimpl.MessageState `protogen:"open.v1"` // Message digest can be used to identify the artifact. // Clients MUST NOT attempt to use this digest to verify the associated // signature; it is intended solely for identification. MessageDigest *HashOutput `protobuf:"bytes,1,opt,name=message_digest,json=messageDigest,proto3" json:"message_digest,omitempty"` // The raw bytes as returned from the signature algorithm. // The signature algorithm (and so the format of the signature bytes) // are determined by the contents of the 'verification_material', // either a key-pair or a certificate. If using a certificate, the // certificate contains the required information on the signature // algorithm. // When using a key pair, the algorithm MUST be part of the public // key, which MUST be communicated out-of-band. Signature []byte `protobuf:"bytes,2,opt,name=signature,proto3" json:"signature,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *MessageSignature) Reset() { *x = MessageSignature{} mi := &file_sigstore_common_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *MessageSignature) String() string { return protoimpl.X.MessageStringOf(x) } func (*MessageSignature) ProtoMessage() {} func (x *MessageSignature) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use MessageSignature.ProtoReflect.Descriptor instead. func (*MessageSignature) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{1} } func (x *MessageSignature) GetMessageDigest() *HashOutput { if x != nil { return x.MessageDigest } return nil } func (x *MessageSignature) GetSignature() []byte { if x != nil { return x.Signature } return nil } // LogId captures the identity of a transparency log. type LogId struct { state protoimpl.MessageState `protogen:"open.v1"` // The unique identity of the log, represented by its public key. KeyId []byte `protobuf:"bytes,1,opt,name=key_id,json=keyId,proto3" json:"key_id,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *LogId) Reset() { *x = LogId{} mi := &file_sigstore_common_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *LogId) String() string { return protoimpl.X.MessageStringOf(x) } func (*LogId) ProtoMessage() {} func (x *LogId) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[2] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use LogId.ProtoReflect.Descriptor instead. func (*LogId) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{2} } func (x *LogId) GetKeyId() []byte { if x != nil { return x.KeyId } return nil } // This message holds a RFC 3161 timestamp. type RFC3161SignedTimestamp struct { state protoimpl.MessageState `protogen:"open.v1"` // Signed timestamp is the DER encoded TimeStampResponse. // See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2 SignedTimestamp []byte `protobuf:"bytes,1,opt,name=signed_timestamp,json=signedTimestamp,proto3" json:"signed_timestamp,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *RFC3161SignedTimestamp) Reset() { *x = RFC3161SignedTimestamp{} mi := &file_sigstore_common_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *RFC3161SignedTimestamp) String() string { return protoimpl.X.MessageStringOf(x) } func (*RFC3161SignedTimestamp) ProtoMessage() {} func (x *RFC3161SignedTimestamp) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[3] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use RFC3161SignedTimestamp.ProtoReflect.Descriptor instead. func (*RFC3161SignedTimestamp) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{3} } func (x *RFC3161SignedTimestamp) GetSignedTimestamp() []byte { if x != nil { return x.SignedTimestamp } return nil } type PublicKey struct { state protoimpl.MessageState `protogen:"open.v1"` // DER-encoded public key, encoding method is specified by the // key_details attribute. RawBytes []byte `protobuf:"bytes,1,opt,name=raw_bytes,json=rawBytes,proto3,oneof" json:"raw_bytes,omitempty"` // Key encoding and signature algorithm to use for this key. KeyDetails PublicKeyDetails `protobuf:"varint,2,opt,name=key_details,json=keyDetails,proto3,enum=dev.sigstore.common.v1.PublicKeyDetails" json:"key_details,omitempty"` // Optional validity period for this key, *inclusive* of the endpoints. ValidFor *TimeRange `protobuf:"bytes,3,opt,name=valid_for,json=validFor,proto3,oneof" json:"valid_for,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *PublicKey) Reset() { *x = PublicKey{} mi := &file_sigstore_common_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *PublicKey) String() string { return protoimpl.X.MessageStringOf(x) } func (*PublicKey) ProtoMessage() {} func (x *PublicKey) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[4] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PublicKey.ProtoReflect.Descriptor instead. func (*PublicKey) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{4} } func (x *PublicKey) GetRawBytes() []byte { if x != nil { return x.RawBytes } return nil } func (x *PublicKey) GetKeyDetails() PublicKeyDetails { if x != nil { return x.KeyDetails } return PublicKeyDetails_PUBLIC_KEY_DETAILS_UNSPECIFIED } func (x *PublicKey) GetValidFor() *TimeRange { if x != nil { return x.ValidFor } return nil } // PublicKeyIdentifier can be used to identify an (out of band) delivered // key, to verify a signature. type PublicKeyIdentifier struct { state protoimpl.MessageState `protogen:"open.v1"` // Optional unauthenticated hint on which key to use. // The format of the hint must be agreed upon out of band by the // signer and the verifiers, and so is not subject to this // specification. // Example use-case is to specify the public key to use, from a // trusted key-ring. // Implementors are RECOMMENDED to derive the value from the public // key as described in RFC 6962. // See: Hint string `protobuf:"bytes,1,opt,name=hint,proto3" json:"hint,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *PublicKeyIdentifier) Reset() { *x = PublicKeyIdentifier{} mi := &file_sigstore_common_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *PublicKeyIdentifier) String() string { return protoimpl.X.MessageStringOf(x) } func (*PublicKeyIdentifier) ProtoMessage() {} func (x *PublicKeyIdentifier) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[5] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PublicKeyIdentifier.ProtoReflect.Descriptor instead. func (*PublicKeyIdentifier) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{5} } func (x *PublicKeyIdentifier) GetHint() string { if x != nil { return x.Hint } return "" } // An ASN.1 OBJECT IDENTIFIER type ObjectIdentifier struct { state protoimpl.MessageState `protogen:"open.v1"` Id []int32 `protobuf:"varint,1,rep,packed,name=id,proto3" json:"id,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ObjectIdentifier) Reset() { *x = ObjectIdentifier{} mi := &file_sigstore_common_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ObjectIdentifier) String() string { return protoimpl.X.MessageStringOf(x) } func (*ObjectIdentifier) ProtoMessage() {} func (x *ObjectIdentifier) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[6] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ObjectIdentifier.ProtoReflect.Descriptor instead. func (*ObjectIdentifier) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{6} } func (x *ObjectIdentifier) GetId() []int32 { if x != nil { return x.Id } return nil } // An OID and the corresponding (byte) value. type ObjectIdentifierValuePair struct { state protoimpl.MessageState `protogen:"open.v1"` Oid *ObjectIdentifier `protobuf:"bytes,1,opt,name=oid,proto3" json:"oid,omitempty"` Value []byte `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ObjectIdentifierValuePair) Reset() { *x = ObjectIdentifierValuePair{} mi := &file_sigstore_common_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ObjectIdentifierValuePair) String() string { return protoimpl.X.MessageStringOf(x) } func (*ObjectIdentifierValuePair) ProtoMessage() {} func (x *ObjectIdentifierValuePair) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[7] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ObjectIdentifierValuePair.ProtoReflect.Descriptor instead. func (*ObjectIdentifierValuePair) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{7} } func (x *ObjectIdentifierValuePair) GetOid() *ObjectIdentifier { if x != nil { return x.Oid } return nil } func (x *ObjectIdentifierValuePair) GetValue() []byte { if x != nil { return x.Value } return nil } type DistinguishedName struct { state protoimpl.MessageState `protogen:"open.v1"` Organization string `protobuf:"bytes,1,opt,name=organization,proto3" json:"organization,omitempty"` CommonName string `protobuf:"bytes,2,opt,name=common_name,json=commonName,proto3" json:"common_name,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *DistinguishedName) Reset() { *x = DistinguishedName{} mi := &file_sigstore_common_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *DistinguishedName) String() string { return protoimpl.X.MessageStringOf(x) } func (*DistinguishedName) ProtoMessage() {} func (x *DistinguishedName) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[8] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use DistinguishedName.ProtoReflect.Descriptor instead. func (*DistinguishedName) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{8} } func (x *DistinguishedName) GetOrganization() string { if x != nil { return x.Organization } return "" } func (x *DistinguishedName) GetCommonName() string { if x != nil { return x.CommonName } return "" } type X509Certificate struct { state protoimpl.MessageState `protogen:"open.v1"` // DER-encoded X.509 certificate. RawBytes []byte `protobuf:"bytes,1,opt,name=raw_bytes,json=rawBytes,proto3" json:"raw_bytes,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *X509Certificate) Reset() { *x = X509Certificate{} mi := &file_sigstore_common_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *X509Certificate) String() string { return protoimpl.X.MessageStringOf(x) } func (*X509Certificate) ProtoMessage() {} func (x *X509Certificate) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[9] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use X509Certificate.ProtoReflect.Descriptor instead. func (*X509Certificate) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{9} } func (x *X509Certificate) GetRawBytes() []byte { if x != nil { return x.RawBytes } return nil } type SubjectAlternativeName struct { state protoimpl.MessageState `protogen:"open.v1"` Type SubjectAlternativeNameType `protobuf:"varint,1,opt,name=type,proto3,enum=dev.sigstore.common.v1.SubjectAlternativeNameType" json:"type,omitempty"` // Types that are valid to be assigned to Identity: // // *SubjectAlternativeName_Regexp // *SubjectAlternativeName_Value Identity isSubjectAlternativeName_Identity `protobuf_oneof:"identity"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *SubjectAlternativeName) Reset() { *x = SubjectAlternativeName{} mi := &file_sigstore_common_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *SubjectAlternativeName) String() string { return protoimpl.X.MessageStringOf(x) } func (*SubjectAlternativeName) ProtoMessage() {} func (x *SubjectAlternativeName) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[10] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SubjectAlternativeName.ProtoReflect.Descriptor instead. func (*SubjectAlternativeName) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{10} } func (x *SubjectAlternativeName) GetType() SubjectAlternativeNameType { if x != nil { return x.Type } return SubjectAlternativeNameType_SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED } func (x *SubjectAlternativeName) GetIdentity() isSubjectAlternativeName_Identity { if x != nil { return x.Identity } return nil } func (x *SubjectAlternativeName) GetRegexp() string { if x != nil { if x, ok := x.Identity.(*SubjectAlternativeName_Regexp); ok { return x.Regexp } } return "" } func (x *SubjectAlternativeName) GetValue() string { if x != nil { if x, ok := x.Identity.(*SubjectAlternativeName_Value); ok { return x.Value } } return "" } type isSubjectAlternativeName_Identity interface { isSubjectAlternativeName_Identity() } type SubjectAlternativeName_Regexp struct { // A regular expression describing the expected value for // the SAN. Regexp string `protobuf:"bytes,2,opt,name=regexp,proto3,oneof"` } type SubjectAlternativeName_Value struct { // The exact value to match against. Value string `protobuf:"bytes,3,opt,name=value,proto3,oneof"` } func (*SubjectAlternativeName_Regexp) isSubjectAlternativeName_Identity() {} func (*SubjectAlternativeName_Value) isSubjectAlternativeName_Identity() {} // A collection of X.509 certificates. // // This "chain" can be used in multiple contexts, such as providing a root CA // certificate within a TUF root of trust or multiple untrusted certificates for // the purpose of chain building. type X509CertificateChain struct { state protoimpl.MessageState `protogen:"open.v1"` // One or more DER-encoded certificates. // // In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence // has an imposed order. Unless explicitly specified, there is otherwise no // guaranteed order. Certificates []*X509Certificate `protobuf:"bytes,1,rep,name=certificates,proto3" json:"certificates,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *X509CertificateChain) Reset() { *x = X509CertificateChain{} mi := &file_sigstore_common_proto_msgTypes[11] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *X509CertificateChain) String() string { return protoimpl.X.MessageStringOf(x) } func (*X509CertificateChain) ProtoMessage() {} func (x *X509CertificateChain) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[11] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use X509CertificateChain.ProtoReflect.Descriptor instead. func (*X509CertificateChain) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{11} } func (x *X509CertificateChain) GetCertificates() []*X509Certificate { if x != nil { return x.Certificates } return nil } // The time range is closed and includes both the start and end times, // (i.e., [start, end]). // End is optional to be able to capture a period that has started but // has no known end. type TimeRange struct { state protoimpl.MessageState `protogen:"open.v1"` Start *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=start,proto3" json:"start,omitempty"` End *timestamppb.Timestamp `protobuf:"bytes,2,opt,name=end,proto3,oneof" json:"end,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *TimeRange) Reset() { *x = TimeRange{} mi := &file_sigstore_common_proto_msgTypes[12] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *TimeRange) String() string { return protoimpl.X.MessageStringOf(x) } func (*TimeRange) ProtoMessage() {} func (x *TimeRange) ProtoReflect() protoreflect.Message { mi := &file_sigstore_common_proto_msgTypes[12] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TimeRange.ProtoReflect.Descriptor instead. func (*TimeRange) Descriptor() ([]byte, []int) { return file_sigstore_common_proto_rawDescGZIP(), []int{12} } func (x *TimeRange) GetStart() *timestamppb.Timestamp { if x != nil { return x.Start } return nil } func (x *TimeRange) GetEnd() *timestamppb.Timestamp { if x != nil { return x.End } return nil } var File_sigstore_common_proto protoreflect.FileDescriptor var file_sigstore_common_proto_rawDesc = string([]byte{ 0x0a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x69, 0x0a, 0x0a, 0x48, 0x61, 0x73, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x43, 0x0a, 0x09, 0x61, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x52, 0x09, 0x61, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x22, 0x80, 0x01, 0x0a, 0x10, 0x4d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x49, 0x0a, 0x0e, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x5f, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x52, 0x0d, 0x6d, 0x65, 0x73, 0x73, 0x61, 0x67, 0x65, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x12, 0x21, 0x0a, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x09, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x22, 0x23, 0x0a, 0x05, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x1a, 0x0a, 0x06, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x49, 0x64, 0x22, 0x48, 0x0a, 0x16, 0x52, 0x46, 0x43, 0x33, 0x31, 0x36, 0x31, 0x53, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x12, 0x2e, 0x0a, 0x10, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0f, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xd9, 0x01, 0x0a, 0x09, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x20, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x72, 0x61, 0x77, 0x42, 0x79, 0x74, 0x65, 0x73, 0x88, 0x01, 0x01, 0x12, 0x49, 0x0a, 0x0b, 0x6b, 0x65, 0x79, 0x5f, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x28, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x52, 0x0a, 0x6b, 0x65, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x43, 0x0a, 0x09, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x48, 0x01, 0x52, 0x08, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x46, 0x6f, 0x72, 0x88, 0x01, 0x01, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x72, 0x61, 0x77, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x42, 0x0c, 0x0a, 0x0a, 0x5f, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x22, 0x29, 0x0a, 0x13, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x12, 0x0a, 0x04, 0x68, 0x69, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x68, 0x69, 0x6e, 0x74, 0x22, 0x27, 0x0a, 0x10, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x12, 0x13, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x03, 0x28, 0x05, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x02, 0x69, 0x64, 0x22, 0x6d, 0x0a, 0x19, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x12, 0x3a, 0x0a, 0x03, 0x6f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x52, 0x03, 0x6f, 0x69, 0x64, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x22, 0x58, 0x0a, 0x11, 0x44, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x75, 0x69, 0x73, 0x68, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x22, 0x0a, 0x0c, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0c, 0x6f, 0x72, 0x67, 0x61, 0x6e, 0x69, 0x7a, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x4e, 0x61, 0x6d, 0x65, 0x22, 0x33, 0x0a, 0x0f, 0x58, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x12, 0x20, 0x0a, 0x09, 0x72, 0x61, 0x77, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x72, 0x61, 0x77, 0x42, 0x79, 0x74, 0x65, 0x73, 0x22, 0x9e, 0x01, 0x0a, 0x16, 0x53, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x41, 0x6c, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x46, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x32, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x41, 0x6c, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x06, 0x72, 0x65, 0x67, 0x65, 0x78, 0x70, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x06, 0x72, 0x65, 0x67, 0x65, 0x78, 0x70, 0x12, 0x16, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x42, 0x0a, 0x0a, 0x08, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x22, 0x63, 0x0a, 0x14, 0x58, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x12, 0x4b, 0x0a, 0x0c, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x58, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x52, 0x0c, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x73, 0x22, 0x78, 0x0a, 0x09, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x12, 0x30, 0x0a, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x05, 0x73, 0x74, 0x61, 0x72, 0x74, 0x12, 0x31, 0x0a, 0x03, 0x65, 0x6e, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x03, 0x65, 0x6e, 0x64, 0x88, 0x01, 0x01, 0x42, 0x06, 0x0a, 0x04, 0x5f, 0x65, 0x6e, 0x64, 0x2a, 0x75, 0x0a, 0x0d, 0x48, 0x61, 0x73, 0x68, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x12, 0x1e, 0x0a, 0x1a, 0x48, 0x41, 0x53, 0x48, 0x5f, 0x41, 0x4c, 0x47, 0x4f, 0x52, 0x49, 0x54, 0x48, 0x4d, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x48, 0x41, 0x32, 0x5f, 0x32, 0x35, 0x36, 0x10, 0x01, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x48, 0x41, 0x32, 0x5f, 0x33, 0x38, 0x34, 0x10, 0x02, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x48, 0x41, 0x32, 0x5f, 0x35, 0x31, 0x32, 0x10, 0x03, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x48, 0x41, 0x33, 0x5f, 0x32, 0x35, 0x36, 0x10, 0x04, 0x12, 0x0c, 0x0a, 0x08, 0x53, 0x48, 0x41, 0x33, 0x5f, 0x33, 0x38, 0x34, 0x10, 0x05, 0x2a, 0xe9, 0x04, 0x0a, 0x10, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x44, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x12, 0x22, 0x0a, 0x1e, 0x50, 0x55, 0x42, 0x4c, 0x49, 0x43, 0x5f, 0x4b, 0x45, 0x59, 0x5f, 0x44, 0x45, 0x54, 0x41, 0x49, 0x4c, 0x53, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x19, 0x0a, 0x11, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x56, 0x35, 0x10, 0x01, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x15, 0x0a, 0x0d, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x53, 0x53, 0x10, 0x02, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x18, 0x0a, 0x10, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x56, 0x35, 0x10, 0x03, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x14, 0x0a, 0x0c, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x53, 0x53, 0x10, 0x04, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x21, 0x0a, 0x1d, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x56, 0x31, 0x35, 0x5f, 0x32, 0x30, 0x34, 0x38, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x09, 0x12, 0x21, 0x0a, 0x1d, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x56, 0x31, 0x35, 0x5f, 0x33, 0x30, 0x37, 0x32, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x0a, 0x12, 0x21, 0x0a, 0x1d, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x4b, 0x43, 0x53, 0x31, 0x56, 0x31, 0x35, 0x5f, 0x34, 0x30, 0x39, 0x36, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x0b, 0x12, 0x1c, 0x0a, 0x18, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x53, 0x53, 0x5f, 0x32, 0x30, 0x34, 0x38, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x10, 0x12, 0x1c, 0x0a, 0x18, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x53, 0x53, 0x5f, 0x33, 0x30, 0x37, 0x32, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x11, 0x12, 0x1c, 0x0a, 0x18, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x52, 0x53, 0x41, 0x5f, 0x50, 0x53, 0x53, 0x5f, 0x34, 0x30, 0x39, 0x36, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x12, 0x12, 0x24, 0x0a, 0x1c, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x43, 0x44, 0x53, 0x41, 0x5f, 0x50, 0x32, 0x35, 0x36, 0x5f, 0x48, 0x4d, 0x41, 0x43, 0x5f, 0x53, 0x48, 0x41, 0x5f, 0x32, 0x35, 0x36, 0x10, 0x06, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x1b, 0x0a, 0x17, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x43, 0x44, 0x53, 0x41, 0x5f, 0x50, 0x32, 0x35, 0x36, 0x5f, 0x53, 0x48, 0x41, 0x5f, 0x32, 0x35, 0x36, 0x10, 0x05, 0x12, 0x1b, 0x0a, 0x17, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x43, 0x44, 0x53, 0x41, 0x5f, 0x50, 0x33, 0x38, 0x34, 0x5f, 0x53, 0x48, 0x41, 0x5f, 0x33, 0x38, 0x34, 0x10, 0x0c, 0x12, 0x1b, 0x0a, 0x17, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x43, 0x44, 0x53, 0x41, 0x5f, 0x50, 0x35, 0x32, 0x31, 0x5f, 0x53, 0x48, 0x41, 0x5f, 0x35, 0x31, 0x32, 0x10, 0x0d, 0x12, 0x10, 0x0a, 0x0c, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x44, 0x32, 0x35, 0x35, 0x31, 0x39, 0x10, 0x07, 0x12, 0x13, 0x0a, 0x0f, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x44, 0x32, 0x35, 0x35, 0x31, 0x39, 0x5f, 0x50, 0x48, 0x10, 0x08, 0x12, 0x1f, 0x0a, 0x17, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x43, 0x44, 0x53, 0x41, 0x5f, 0x50, 0x33, 0x38, 0x34, 0x5f, 0x53, 0x48, 0x41, 0x5f, 0x32, 0x35, 0x36, 0x10, 0x13, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x1f, 0x0a, 0x17, 0x50, 0x4b, 0x49, 0x58, 0x5f, 0x45, 0x43, 0x44, 0x53, 0x41, 0x5f, 0x50, 0x35, 0x32, 0x31, 0x5f, 0x53, 0x48, 0x41, 0x5f, 0x32, 0x35, 0x36, 0x10, 0x14, 0x1a, 0x02, 0x08, 0x01, 0x12, 0x0e, 0x0a, 0x0a, 0x4c, 0x4d, 0x53, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x0e, 0x12, 0x10, 0x0a, 0x0c, 0x4c, 0x4d, 0x4f, 0x54, 0x53, 0x5f, 0x53, 0x48, 0x41, 0x32, 0x35, 0x36, 0x10, 0x0f, 0x22, 0x04, 0x08, 0x15, 0x10, 0x32, 0x2a, 0x6f, 0x0a, 0x1a, 0x53, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x41, 0x6c, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x54, 0x79, 0x70, 0x65, 0x12, 0x2d, 0x0a, 0x29, 0x53, 0x55, 0x42, 0x4a, 0x45, 0x43, 0x54, 0x5f, 0x41, 0x4c, 0x54, 0x45, 0x52, 0x4e, 0x41, 0x54, 0x49, 0x56, 0x45, 0x5f, 0x4e, 0x41, 0x4d, 0x45, 0x5f, 0x54, 0x59, 0x50, 0x45, 0x5f, 0x55, 0x4e, 0x53, 0x50, 0x45, 0x43, 0x49, 0x46, 0x49, 0x45, 0x44, 0x10, 0x00, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x4d, 0x41, 0x49, 0x4c, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x55, 0x52, 0x49, 0x10, 0x02, 0x12, 0x0e, 0x0a, 0x0a, 0x4f, 0x54, 0x48, 0x45, 0x52, 0x5f, 0x4e, 0x41, 0x4d, 0x45, 0x10, 0x03, 0x42, 0x7c, 0x0a, 0x1c, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x42, 0x0b, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0xea, 0x02, 0x14, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x43, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_sigstore_common_proto_rawDescOnce sync.Once file_sigstore_common_proto_rawDescData []byte ) func file_sigstore_common_proto_rawDescGZIP() []byte { file_sigstore_common_proto_rawDescOnce.Do(func() { file_sigstore_common_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_sigstore_common_proto_rawDesc), len(file_sigstore_common_proto_rawDesc))) }) return file_sigstore_common_proto_rawDescData } var file_sigstore_common_proto_enumTypes = make([]protoimpl.EnumInfo, 3) var file_sigstore_common_proto_msgTypes = make([]protoimpl.MessageInfo, 13) var file_sigstore_common_proto_goTypes = []any{ (HashAlgorithm)(0), // 0: dev.sigstore.common.v1.HashAlgorithm (PublicKeyDetails)(0), // 1: dev.sigstore.common.v1.PublicKeyDetails (SubjectAlternativeNameType)(0), // 2: dev.sigstore.common.v1.SubjectAlternativeNameType (*HashOutput)(nil), // 3: dev.sigstore.common.v1.HashOutput (*MessageSignature)(nil), // 4: dev.sigstore.common.v1.MessageSignature (*LogId)(nil), // 5: dev.sigstore.common.v1.LogId (*RFC3161SignedTimestamp)(nil), // 6: dev.sigstore.common.v1.RFC3161SignedTimestamp (*PublicKey)(nil), // 7: dev.sigstore.common.v1.PublicKey (*PublicKeyIdentifier)(nil), // 8: dev.sigstore.common.v1.PublicKeyIdentifier (*ObjectIdentifier)(nil), // 9: dev.sigstore.common.v1.ObjectIdentifier (*ObjectIdentifierValuePair)(nil), // 10: dev.sigstore.common.v1.ObjectIdentifierValuePair (*DistinguishedName)(nil), // 11: dev.sigstore.common.v1.DistinguishedName (*X509Certificate)(nil), // 12: dev.sigstore.common.v1.X509Certificate (*SubjectAlternativeName)(nil), // 13: dev.sigstore.common.v1.SubjectAlternativeName (*X509CertificateChain)(nil), // 14: dev.sigstore.common.v1.X509CertificateChain (*TimeRange)(nil), // 15: dev.sigstore.common.v1.TimeRange (*timestamppb.Timestamp)(nil), // 16: google.protobuf.Timestamp } var file_sigstore_common_proto_depIdxs = []int32{ 0, // 0: dev.sigstore.common.v1.HashOutput.algorithm:type_name -> dev.sigstore.common.v1.HashAlgorithm 3, // 1: dev.sigstore.common.v1.MessageSignature.message_digest:type_name -> dev.sigstore.common.v1.HashOutput 1, // 2: dev.sigstore.common.v1.PublicKey.key_details:type_name -> dev.sigstore.common.v1.PublicKeyDetails 15, // 3: dev.sigstore.common.v1.PublicKey.valid_for:type_name -> dev.sigstore.common.v1.TimeRange 9, // 4: dev.sigstore.common.v1.ObjectIdentifierValuePair.oid:type_name -> dev.sigstore.common.v1.ObjectIdentifier 2, // 5: dev.sigstore.common.v1.SubjectAlternativeName.type:type_name -> dev.sigstore.common.v1.SubjectAlternativeNameType 12, // 6: dev.sigstore.common.v1.X509CertificateChain.certificates:type_name -> dev.sigstore.common.v1.X509Certificate 16, // 7: dev.sigstore.common.v1.TimeRange.start:type_name -> google.protobuf.Timestamp 16, // 8: dev.sigstore.common.v1.TimeRange.end:type_name -> google.protobuf.Timestamp 9, // [9:9] is the sub-list for method output_type 9, // [9:9] is the sub-list for method input_type 9, // [9:9] is the sub-list for extension type_name 9, // [9:9] is the sub-list for extension extendee 0, // [0:9] is the sub-list for field type_name } func init() { file_sigstore_common_proto_init() } func file_sigstore_common_proto_init() { if File_sigstore_common_proto != nil { return } file_sigstore_common_proto_msgTypes[4].OneofWrappers = []any{} file_sigstore_common_proto_msgTypes[10].OneofWrappers = []any{ (*SubjectAlternativeName_Regexp)(nil), (*SubjectAlternativeName_Value)(nil), } file_sigstore_common_proto_msgTypes[12].OneofWrappers = []any{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_sigstore_common_proto_rawDesc), len(file_sigstore_common_proto_rawDesc)), NumEnums: 3, NumMessages: 13, NumExtensions: 0, NumServices: 0, }, GoTypes: file_sigstore_common_proto_goTypes, DependencyIndexes: file_sigstore_common_proto_depIdxs, EnumInfos: file_sigstore_common_proto_enumTypes, MessageInfos: file_sigstore_common_proto_msgTypes, }.Build() File_sigstore_common_proto = out.File file_sigstore_common_proto_goTypes = nil file_sigstore_common_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-go/dsse/000077500000000000000000000000001477352757300172265ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/dsse/envelope.pb.go000066400000000000000000000201021477352757300217650ustar00rootroot00000000000000// https://raw.githubusercontent.com/secure-systems-lab/dsse/9c813476bd36de70a5738c72e784f123ecea16af/envelope.proto // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: envelope.proto package dsse import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // An authenticated message of arbitrary type. type Envelope struct { state protoimpl.MessageState `protogen:"open.v1"` // Message to be signed. (In JSON, this is encoded as base64.) // REQUIRED. Payload []byte `protobuf:"bytes,1,opt,name=payload,proto3" json:"payload,omitempty"` // String unambiguously identifying how to interpret payload. // REQUIRED. PayloadType string `protobuf:"bytes,2,opt,name=payloadType,proto3" json:"payloadType,omitempty"` // Signature over: // // PAE(type, payload) // // Where PAE is defined as: // PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload // + = concatenation // SP = ASCII space [0x20] // "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] // LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros // REQUIRED (length >= 1). Signatures []*Signature `protobuf:"bytes,3,rep,name=signatures,proto3" json:"signatures,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Envelope) Reset() { *x = Envelope{} mi := &file_envelope_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Envelope) String() string { return protoimpl.X.MessageStringOf(x) } func (*Envelope) ProtoMessage() {} func (x *Envelope) ProtoReflect() protoreflect.Message { mi := &file_envelope_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Envelope.ProtoReflect.Descriptor instead. func (*Envelope) Descriptor() ([]byte, []int) { return file_envelope_proto_rawDescGZIP(), []int{0} } func (x *Envelope) GetPayload() []byte { if x != nil { return x.Payload } return nil } func (x *Envelope) GetPayloadType() string { if x != nil { return x.PayloadType } return "" } func (x *Envelope) GetSignatures() []*Signature { if x != nil { return x.Signatures } return nil } type Signature struct { state protoimpl.MessageState `protogen:"open.v1"` // Signature itself. (In JSON, this is encoded as base64.) // REQUIRED. Sig []byte `protobuf:"bytes,1,opt,name=sig,proto3" json:"sig,omitempty"` // *Unauthenticated* hint identifying which public key was used. // OPTIONAL. Keyid string `protobuf:"bytes,2,opt,name=keyid,proto3" json:"keyid,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Signature) Reset() { *x = Signature{} mi := &file_envelope_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Signature) String() string { return protoimpl.X.MessageStringOf(x) } func (*Signature) ProtoMessage() {} func (x *Signature) ProtoReflect() protoreflect.Message { mi := &file_envelope_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Signature.ProtoReflect.Descriptor instead. func (*Signature) Descriptor() ([]byte, []int) { return file_envelope_proto_rawDescGZIP(), []int{1} } func (x *Signature) GetSig() []byte { if x != nil { return x.Sig } return nil } func (x *Signature) GetKeyid() string { if x != nil { return x.Keyid } return "" } var File_envelope_proto protoreflect.FileDescriptor var file_envelope_proto_rawDesc = string([]byte{ 0x0a, 0x0e, 0x65, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x09, 0x69, 0x6f, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x74, 0x6f, 0x22, 0x7c, 0x0a, 0x08, 0x45, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x12, 0x18, 0x0a, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x12, 0x20, 0x0a, 0x0b, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x70, 0x61, 0x79, 0x6c, 0x6f, 0x61, 0x64, 0x54, 0x79, 0x70, 0x65, 0x12, 0x34, 0x0a, 0x0a, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x69, 0x6f, 0x2e, 0x69, 0x6e, 0x74, 0x6f, 0x74, 0x6f, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x52, 0x0a, 0x73, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x73, 0x22, 0x33, 0x0a, 0x09, 0x53, 0x69, 0x67, 0x6e, 0x61, 0x74, 0x75, 0x72, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x73, 0x69, 0x67, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x03, 0x73, 0x69, 0x67, 0x12, 0x14, 0x0a, 0x05, 0x6b, 0x65, 0x79, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x6b, 0x65, 0x79, 0x69, 0x64, 0x42, 0x44, 0x5a, 0x31, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x64, 0x73, 0x73, 0x65, 0xea, 0x02, 0x0e, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x44, 0x53, 0x53, 0x45, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_envelope_proto_rawDescOnce sync.Once file_envelope_proto_rawDescData []byte ) func file_envelope_proto_rawDescGZIP() []byte { file_envelope_proto_rawDescOnce.Do(func() { file_envelope_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_envelope_proto_rawDesc), len(file_envelope_proto_rawDesc))) }) return file_envelope_proto_rawDescData } var file_envelope_proto_msgTypes = make([]protoimpl.MessageInfo, 2) var file_envelope_proto_goTypes = []any{ (*Envelope)(nil), // 0: io.intoto.Envelope (*Signature)(nil), // 1: io.intoto.Signature } var file_envelope_proto_depIdxs = []int32{ 1, // 0: io.intoto.Envelope.signatures:type_name -> io.intoto.Signature 1, // [1:1] is the sub-list for method output_type 1, // [1:1] is the sub-list for method input_type 1, // [1:1] is the sub-list for extension type_name 1, // [1:1] is the sub-list for extension extendee 0, // [0:1] is the sub-list for field type_name } func init() { file_envelope_proto_init() } func file_envelope_proto_init() { if File_envelope_proto != nil { return } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_envelope_proto_rawDesc), len(file_envelope_proto_rawDesc)), NumEnums: 0, NumMessages: 2, NumExtensions: 0, NumServices: 0, }, GoTypes: file_envelope_proto_goTypes, DependencyIndexes: file_envelope_proto_depIdxs, MessageInfos: file_envelope_proto_msgTypes, }.Build() File_envelope_proto = out.File file_envelope_proto_goTypes = nil file_envelope_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-go/events/000077500000000000000000000000001477352757300175745ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/events/v1/000077500000000000000000000000001477352757300201225ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/events/v1/events.pb.go000066400000000000000000000502411477352757300223570ustar00rootroot00000000000000// https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/formats/cloudevents.proto // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //* // CloudEvent Protobuf Format // // - Required context attributes are explicity represented. // - Optional and Extension context attributes are carried in a map structure. // - Data may be represented as binary, text, or protobuf messages. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: events.proto package v1 import ( protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" anypb "google.golang.org/protobuf/types/known/anypb" timestamppb "google.golang.org/protobuf/types/known/timestamppb" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) type CloudEvent struct { state protoimpl.MessageState `protogen:"open.v1"` // Required Attributes Id string `protobuf:"bytes,1,opt,name=id,proto3" json:"id,omitempty"` Source string `protobuf:"bytes,2,opt,name=source,proto3" json:"source,omitempty"` // URI-reference SpecVersion string `protobuf:"bytes,3,opt,name=spec_version,json=specVersion,proto3" json:"spec_version,omitempty"` Type string `protobuf:"bytes,4,opt,name=type,proto3" json:"type,omitempty"` // Optional & Extension Attributes Attributes map[string]*CloudEvent_CloudEventAttributeValue `protobuf:"bytes,5,rep,name=attributes,proto3" json:"attributes,omitempty" protobuf_key:"bytes,1,opt,name=key" protobuf_val:"bytes,2,opt,name=value"` // -- CloudEvent Data (Bytes, Text, or Proto) // // Types that are valid to be assigned to Data: // // *CloudEvent_BinaryData // *CloudEvent_TextData // *CloudEvent_ProtoData Data isCloudEvent_Data `protobuf_oneof:"data"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *CloudEvent) Reset() { *x = CloudEvent{} mi := &file_events_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *CloudEvent) String() string { return protoimpl.X.MessageStringOf(x) } func (*CloudEvent) ProtoMessage() {} func (x *CloudEvent) ProtoReflect() protoreflect.Message { mi := &file_events_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CloudEvent.ProtoReflect.Descriptor instead. func (*CloudEvent) Descriptor() ([]byte, []int) { return file_events_proto_rawDescGZIP(), []int{0} } func (x *CloudEvent) GetId() string { if x != nil { return x.Id } return "" } func (x *CloudEvent) GetSource() string { if x != nil { return x.Source } return "" } func (x *CloudEvent) GetSpecVersion() string { if x != nil { return x.SpecVersion } return "" } func (x *CloudEvent) GetType() string { if x != nil { return x.Type } return "" } func (x *CloudEvent) GetAttributes() map[string]*CloudEvent_CloudEventAttributeValue { if x != nil { return x.Attributes } return nil } func (x *CloudEvent) GetData() isCloudEvent_Data { if x != nil { return x.Data } return nil } func (x *CloudEvent) GetBinaryData() []byte { if x != nil { if x, ok := x.Data.(*CloudEvent_BinaryData); ok { return x.BinaryData } } return nil } func (x *CloudEvent) GetTextData() string { if x != nil { if x, ok := x.Data.(*CloudEvent_TextData); ok { return x.TextData } } return "" } func (x *CloudEvent) GetProtoData() *anypb.Any { if x != nil { if x, ok := x.Data.(*CloudEvent_ProtoData); ok { return x.ProtoData } } return nil } type isCloudEvent_Data interface { isCloudEvent_Data() } type CloudEvent_BinaryData struct { BinaryData []byte `protobuf:"bytes,6,opt,name=binary_data,json=binaryData,proto3,oneof"` } type CloudEvent_TextData struct { TextData string `protobuf:"bytes,7,opt,name=text_data,json=textData,proto3,oneof"` } type CloudEvent_ProtoData struct { ProtoData *anypb.Any `protobuf:"bytes,8,opt,name=proto_data,json=protoData,proto3,oneof"` } func (*CloudEvent_BinaryData) isCloudEvent_Data() {} func (*CloudEvent_TextData) isCloudEvent_Data() {} func (*CloudEvent_ProtoData) isCloudEvent_Data() {} type CloudEventBatch struct { state protoimpl.MessageState `protogen:"open.v1"` Events []*CloudEvent `protobuf:"bytes,1,rep,name=events,proto3" json:"events,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *CloudEventBatch) Reset() { *x = CloudEventBatch{} mi := &file_events_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *CloudEventBatch) String() string { return protoimpl.X.MessageStringOf(x) } func (*CloudEventBatch) ProtoMessage() {} func (x *CloudEventBatch) ProtoReflect() protoreflect.Message { mi := &file_events_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CloudEventBatch.ProtoReflect.Descriptor instead. func (*CloudEventBatch) Descriptor() ([]byte, []int) { return file_events_proto_rawDescGZIP(), []int{1} } func (x *CloudEventBatch) GetEvents() []*CloudEvent { if x != nil { return x.Events } return nil } type CloudEvent_CloudEventAttributeValue struct { state protoimpl.MessageState `protogen:"open.v1"` // Types that are valid to be assigned to Attr: // // *CloudEvent_CloudEventAttributeValue_CeBoolean // *CloudEvent_CloudEventAttributeValue_CeInteger // *CloudEvent_CloudEventAttributeValue_CeString // *CloudEvent_CloudEventAttributeValue_CeBytes // *CloudEvent_CloudEventAttributeValue_CeUri // *CloudEvent_CloudEventAttributeValue_CeUriRef // *CloudEvent_CloudEventAttributeValue_CeTimestamp Attr isCloudEvent_CloudEventAttributeValue_Attr `protobuf_oneof:"attr"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *CloudEvent_CloudEventAttributeValue) Reset() { *x = CloudEvent_CloudEventAttributeValue{} mi := &file_events_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *CloudEvent_CloudEventAttributeValue) String() string { return protoimpl.X.MessageStringOf(x) } func (*CloudEvent_CloudEventAttributeValue) ProtoMessage() {} func (x *CloudEvent_CloudEventAttributeValue) ProtoReflect() protoreflect.Message { mi := &file_events_proto_msgTypes[3] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CloudEvent_CloudEventAttributeValue.ProtoReflect.Descriptor instead. func (*CloudEvent_CloudEventAttributeValue) Descriptor() ([]byte, []int) { return file_events_proto_rawDescGZIP(), []int{0, 1} } func (x *CloudEvent_CloudEventAttributeValue) GetAttr() isCloudEvent_CloudEventAttributeValue_Attr { if x != nil { return x.Attr } return nil } func (x *CloudEvent_CloudEventAttributeValue) GetCeBoolean() bool { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeBoolean); ok { return x.CeBoolean } } return false } func (x *CloudEvent_CloudEventAttributeValue) GetCeInteger() int32 { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeInteger); ok { return x.CeInteger } } return 0 } func (x *CloudEvent_CloudEventAttributeValue) GetCeString() string { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeString); ok { return x.CeString } } return "" } func (x *CloudEvent_CloudEventAttributeValue) GetCeBytes() []byte { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeBytes); ok { return x.CeBytes } } return nil } func (x *CloudEvent_CloudEventAttributeValue) GetCeUri() string { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeUri); ok { return x.CeUri } } return "" } func (x *CloudEvent_CloudEventAttributeValue) GetCeUriRef() string { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeUriRef); ok { return x.CeUriRef } } return "" } func (x *CloudEvent_CloudEventAttributeValue) GetCeTimestamp() *timestamppb.Timestamp { if x != nil { if x, ok := x.Attr.(*CloudEvent_CloudEventAttributeValue_CeTimestamp); ok { return x.CeTimestamp } } return nil } type isCloudEvent_CloudEventAttributeValue_Attr interface { isCloudEvent_CloudEventAttributeValue_Attr() } type CloudEvent_CloudEventAttributeValue_CeBoolean struct { CeBoolean bool `protobuf:"varint,1,opt,name=ce_boolean,json=ceBoolean,proto3,oneof"` } type CloudEvent_CloudEventAttributeValue_CeInteger struct { CeInteger int32 `protobuf:"varint,2,opt,name=ce_integer,json=ceInteger,proto3,oneof"` } type CloudEvent_CloudEventAttributeValue_CeString struct { CeString string `protobuf:"bytes,3,opt,name=ce_string,json=ceString,proto3,oneof"` } type CloudEvent_CloudEventAttributeValue_CeBytes struct { CeBytes []byte `protobuf:"bytes,4,opt,name=ce_bytes,json=ceBytes,proto3,oneof"` } type CloudEvent_CloudEventAttributeValue_CeUri struct { CeUri string `protobuf:"bytes,5,opt,name=ce_uri,json=ceUri,proto3,oneof"` } type CloudEvent_CloudEventAttributeValue_CeUriRef struct { CeUriRef string `protobuf:"bytes,6,opt,name=ce_uri_ref,json=ceUriRef,proto3,oneof"` } type CloudEvent_CloudEventAttributeValue_CeTimestamp struct { CeTimestamp *timestamppb.Timestamp `protobuf:"bytes,7,opt,name=ce_timestamp,json=ceTimestamp,proto3,oneof"` } func (*CloudEvent_CloudEventAttributeValue_CeBoolean) isCloudEvent_CloudEventAttributeValue_Attr() {} func (*CloudEvent_CloudEventAttributeValue_CeInteger) isCloudEvent_CloudEventAttributeValue_Attr() {} func (*CloudEvent_CloudEventAttributeValue_CeString) isCloudEvent_CloudEventAttributeValue_Attr() {} func (*CloudEvent_CloudEventAttributeValue_CeBytes) isCloudEvent_CloudEventAttributeValue_Attr() {} func (*CloudEvent_CloudEventAttributeValue_CeUri) isCloudEvent_CloudEventAttributeValue_Attr() {} func (*CloudEvent_CloudEventAttributeValue_CeUriRef) isCloudEvent_CloudEventAttributeValue_Attr() {} func (*CloudEvent_CloudEventAttributeValue_CeTimestamp) isCloudEvent_CloudEventAttributeValue_Attr() { } var File_events_proto protoreflect.FileDescriptor var file_events_proto_rawDesc = string([]byte{ 0x0a, 0x0c, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x16, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x31, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xd9, 0x05, 0x0a, 0x0a, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x12, 0x0e, 0x0a, 0x02, 0x69, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x02, 0x69, 0x64, 0x12, 0x16, 0x0a, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x12, 0x21, 0x0a, 0x0c, 0x73, 0x70, 0x65, 0x63, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0b, 0x73, 0x70, 0x65, 0x63, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x12, 0x0a, 0x04, 0x74, 0x79, 0x70, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x04, 0x74, 0x79, 0x70, 0x65, 0x12, 0x52, 0x0a, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0a, 0x61, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x12, 0x21, 0x0a, 0x0b, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x0a, 0x62, 0x69, 0x6e, 0x61, 0x72, 0x79, 0x44, 0x61, 0x74, 0x61, 0x12, 0x1d, 0x0a, 0x09, 0x74, 0x65, 0x78, 0x74, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x07, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x74, 0x65, 0x78, 0x74, 0x44, 0x61, 0x74, 0x61, 0x12, 0x35, 0x0a, 0x0a, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x5f, 0x64, 0x61, 0x74, 0x61, 0x18, 0x08, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x48, 0x00, 0x52, 0x09, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x44, 0x61, 0x74, 0x61, 0x1a, 0x7a, 0x0a, 0x0f, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x51, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x3b, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x1a, 0x9a, 0x02, 0x0a, 0x18, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x41, 0x74, 0x74, 0x72, 0x69, 0x62, 0x75, 0x74, 0x65, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x12, 0x1f, 0x0a, 0x0a, 0x63, 0x65, 0x5f, 0x62, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x08, 0x48, 0x00, 0x52, 0x09, 0x63, 0x65, 0x42, 0x6f, 0x6f, 0x6c, 0x65, 0x61, 0x6e, 0x12, 0x1f, 0x0a, 0x0a, 0x63, 0x65, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x18, 0x02, 0x20, 0x01, 0x28, 0x05, 0x48, 0x00, 0x52, 0x09, 0x63, 0x65, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x65, 0x72, 0x12, 0x1d, 0x0a, 0x09, 0x63, 0x65, 0x5f, 0x73, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x63, 0x65, 0x53, 0x74, 0x72, 0x69, 0x6e, 0x67, 0x12, 0x1b, 0x0a, 0x08, 0x63, 0x65, 0x5f, 0x62, 0x79, 0x74, 0x65, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x07, 0x63, 0x65, 0x42, 0x79, 0x74, 0x65, 0x73, 0x12, 0x17, 0x0a, 0x06, 0x63, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x05, 0x63, 0x65, 0x55, 0x72, 0x69, 0x12, 0x1e, 0x0a, 0x0a, 0x63, 0x65, 0x5f, 0x75, 0x72, 0x69, 0x5f, 0x72, 0x65, 0x66, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x08, 0x63, 0x65, 0x55, 0x72, 0x69, 0x52, 0x65, 0x66, 0x12, 0x3f, 0x0a, 0x0c, 0x63, 0x65, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x48, 0x00, 0x52, 0x0b, 0x63, 0x65, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x42, 0x06, 0x0a, 0x04, 0x61, 0x74, 0x74, 0x72, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0x4d, 0x0a, 0x0f, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x42, 0x61, 0x74, 0x63, 0x68, 0x12, 0x3a, 0x0a, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x6c, 0x6f, 0x75, 0x64, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x52, 0x06, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x42, 0x6b, 0x0a, 0x1c, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2e, 0x76, 0x31, 0x50, 0x01, 0x5a, 0x36, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x65, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x2f, 0x76, 0x31, 0xea, 0x02, 0x10, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x45, 0x76, 0x65, 0x6e, 0x74, 0x73, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_events_proto_rawDescOnce sync.Once file_events_proto_rawDescData []byte ) func file_events_proto_rawDescGZIP() []byte { file_events_proto_rawDescOnce.Do(func() { file_events_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_events_proto_rawDesc), len(file_events_proto_rawDesc))) }) return file_events_proto_rawDescData } var file_events_proto_msgTypes = make([]protoimpl.MessageInfo, 4) var file_events_proto_goTypes = []any{ (*CloudEvent)(nil), // 0: dev.sigstore.events.v1.CloudEvent (*CloudEventBatch)(nil), // 1: dev.sigstore.events.v1.CloudEventBatch nil, // 2: dev.sigstore.events.v1.CloudEvent.AttributesEntry (*CloudEvent_CloudEventAttributeValue)(nil), // 3: dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue (*anypb.Any)(nil), // 4: google.protobuf.Any (*timestamppb.Timestamp)(nil), // 5: google.protobuf.Timestamp } var file_events_proto_depIdxs = []int32{ 2, // 0: dev.sigstore.events.v1.CloudEvent.attributes:type_name -> dev.sigstore.events.v1.CloudEvent.AttributesEntry 4, // 1: dev.sigstore.events.v1.CloudEvent.proto_data:type_name -> google.protobuf.Any 0, // 2: dev.sigstore.events.v1.CloudEventBatch.events:type_name -> dev.sigstore.events.v1.CloudEvent 3, // 3: dev.sigstore.events.v1.CloudEvent.AttributesEntry.value:type_name -> dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue 5, // 4: dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue.ce_timestamp:type_name -> google.protobuf.Timestamp 5, // [5:5] is the sub-list for method output_type 5, // [5:5] is the sub-list for method input_type 5, // [5:5] is the sub-list for extension type_name 5, // [5:5] is the sub-list for extension extendee 0, // [0:5] is the sub-list for field type_name } func init() { file_events_proto_init() } func file_events_proto_init() { if File_events_proto != nil { return } file_events_proto_msgTypes[0].OneofWrappers = []any{ (*CloudEvent_BinaryData)(nil), (*CloudEvent_TextData)(nil), (*CloudEvent_ProtoData)(nil), } file_events_proto_msgTypes[3].OneofWrappers = []any{ (*CloudEvent_CloudEventAttributeValue_CeBoolean)(nil), (*CloudEvent_CloudEventAttributeValue_CeInteger)(nil), (*CloudEvent_CloudEventAttributeValue_CeString)(nil), (*CloudEvent_CloudEventAttributeValue_CeBytes)(nil), (*CloudEvent_CloudEventAttributeValue_CeUri)(nil), (*CloudEvent_CloudEventAttributeValue_CeUriRef)(nil), (*CloudEvent_CloudEventAttributeValue_CeTimestamp)(nil), } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_events_proto_rawDesc), len(file_events_proto_rawDesc)), NumEnums: 0, NumMessages: 4, NumExtensions: 0, NumServices: 0, }, GoTypes: file_events_proto_goTypes, DependencyIndexes: file_events_proto_depIdxs, MessageInfos: file_events_proto_msgTypes, }.Build() File_events_proto = out.File file_events_proto_goTypes = nil file_events_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-go/rekor/000077500000000000000000000000001477352757300174125ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/rekor/v1/000077500000000000000000000000001477352757300177405ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/rekor/v1/sigstore_rekor.pb.go000066400000000000000000000571401477352757300237370ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: sigstore_rekor.proto package v1 import ( v1 "github.com/sigstore/protobuf-specs/gen/pb-go/common/v1" _ "google.golang.org/genproto/googleapis/api/annotations" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // KindVersion contains the entry's kind and api version. type KindVersion struct { state protoimpl.MessageState `protogen:"open.v1"` // Kind is the type of entry being stored in the log. // See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types Kind string `protobuf:"bytes,1,opt,name=kind,proto3" json:"kind,omitempty"` // The specific api version of the type. Version string `protobuf:"bytes,2,opt,name=version,proto3" json:"version,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *KindVersion) Reset() { *x = KindVersion{} mi := &file_sigstore_rekor_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *KindVersion) String() string { return protoimpl.X.MessageStringOf(x) } func (*KindVersion) ProtoMessage() {} func (x *KindVersion) ProtoReflect() protoreflect.Message { mi := &file_sigstore_rekor_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use KindVersion.ProtoReflect.Descriptor instead. func (*KindVersion) Descriptor() ([]byte, []int) { return file_sigstore_rekor_proto_rawDescGZIP(), []int{0} } func (x *KindVersion) GetKind() string { if x != nil { return x.Kind } return "" } func (x *KindVersion) GetVersion() string { if x != nil { return x.Version } return "" } // The checkpoint MUST contain an origin string as a unique log identifier, // the tree size, and the root hash. It MAY also be followed by optional data, // and clients MUST NOT assume optional data. The checkpoint MUST also contain // a signature over the root hash (tree head). The checkpoint MAY contain additional // signatures, but the first SHOULD be the signature from the log. Checkpoint contents // are concatenated with newlines into a single string. // The checkpoint format is described in // https://github.com/transparency-dev/formats/blob/main/log/README.md // and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. // An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go type Checkpoint struct { state protoimpl.MessageState `protogen:"open.v1"` Envelope string `protobuf:"bytes,1,opt,name=envelope,proto3" json:"envelope,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Checkpoint) Reset() { *x = Checkpoint{} mi := &file_sigstore_rekor_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Checkpoint) String() string { return protoimpl.X.MessageStringOf(x) } func (*Checkpoint) ProtoMessage() {} func (x *Checkpoint) ProtoReflect() protoreflect.Message { mi := &file_sigstore_rekor_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Checkpoint.ProtoReflect.Descriptor instead. func (*Checkpoint) Descriptor() ([]byte, []int) { return file_sigstore_rekor_proto_rawDescGZIP(), []int{1} } func (x *Checkpoint) GetEnvelope() string { if x != nil { return x.Envelope } return "" } // InclusionProof is the proof returned from the transparency log. Can // be used for offline or online verification against the log. type InclusionProof struct { state protoimpl.MessageState `protogen:"open.v1"` // The index of the entry in the tree it was written to. LogIndex int64 `protobuf:"varint,1,opt,name=log_index,json=logIndex,proto3" json:"log_index,omitempty"` // The hash digest stored at the root of the merkle tree at the time // the proof was generated. RootHash []byte `protobuf:"bytes,2,opt,name=root_hash,json=rootHash,proto3" json:"root_hash,omitempty"` // The size of the merkle tree at the time the proof was generated. TreeSize int64 `protobuf:"varint,3,opt,name=tree_size,json=treeSize,proto3" json:"tree_size,omitempty"` // A list of hashes required to compute the inclusion proof, sorted // in order from leaf to root. // Note that leaf and root hashes are not included. // The root hash is available separately in this message, and the // leaf hash should be calculated by the client. Hashes [][]byte `protobuf:"bytes,4,rep,name=hashes,proto3" json:"hashes,omitempty"` // Signature of the tree head, as of the time of this proof was // generated. See above info on 'Checkpoint' for more details. Checkpoint *Checkpoint `protobuf:"bytes,5,opt,name=checkpoint,proto3" json:"checkpoint,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *InclusionProof) Reset() { *x = InclusionProof{} mi := &file_sigstore_rekor_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *InclusionProof) String() string { return protoimpl.X.MessageStringOf(x) } func (*InclusionProof) ProtoMessage() {} func (x *InclusionProof) ProtoReflect() protoreflect.Message { mi := &file_sigstore_rekor_proto_msgTypes[2] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use InclusionProof.ProtoReflect.Descriptor instead. func (*InclusionProof) Descriptor() ([]byte, []int) { return file_sigstore_rekor_proto_rawDescGZIP(), []int{2} } func (x *InclusionProof) GetLogIndex() int64 { if x != nil { return x.LogIndex } return 0 } func (x *InclusionProof) GetRootHash() []byte { if x != nil { return x.RootHash } return nil } func (x *InclusionProof) GetTreeSize() int64 { if x != nil { return x.TreeSize } return 0 } func (x *InclusionProof) GetHashes() [][]byte { if x != nil { return x.Hashes } return nil } func (x *InclusionProof) GetCheckpoint() *Checkpoint { if x != nil { return x.Checkpoint } return nil } // The inclusion promise is calculated by Rekor. It's calculated as a // signature over a canonical JSON serialization of the persisted entry, the // log ID, log index and the integration timestamp. // See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 // The format of the signature depends on the transparency log's public key. // If the signature algorithm requires a hash function and/or a signature // scheme (e.g. RSA) those has to be retrieved out-of-band from the log's // operators, together with the public key. // This is used to verify the integration timestamp's value and that the log // has promised to include the entry. type InclusionPromise struct { state protoimpl.MessageState `protogen:"open.v1"` SignedEntryTimestamp []byte `protobuf:"bytes,1,opt,name=signed_entry_timestamp,json=signedEntryTimestamp,proto3" json:"signed_entry_timestamp,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *InclusionPromise) Reset() { *x = InclusionPromise{} mi := &file_sigstore_rekor_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *InclusionPromise) String() string { return protoimpl.X.MessageStringOf(x) } func (*InclusionPromise) ProtoMessage() {} func (x *InclusionPromise) ProtoReflect() protoreflect.Message { mi := &file_sigstore_rekor_proto_msgTypes[3] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use InclusionPromise.ProtoReflect.Descriptor instead. func (*InclusionPromise) Descriptor() ([]byte, []int) { return file_sigstore_rekor_proto_rawDescGZIP(), []int{3} } func (x *InclusionPromise) GetSignedEntryTimestamp() []byte { if x != nil { return x.SignedEntryTimestamp } return nil } // TransparencyLogEntry captures all the details required from Rekor to // reconstruct an entry, given that the payload is provided via other means. // This type can easily be created from the existing response from Rekor. // Future iterations could rely on Rekor returning the minimal set of // attributes (excluding the payload) that are required for verifying the // inclusion promise. The inclusion promise (called SignedEntryTimestamp in // the response from Rekor) is similar to a Signed Certificate Timestamp // as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2. type TransparencyLogEntry struct { state protoimpl.MessageState `protogen:"open.v1"` // The global index of the entry, used when querying the log by index. LogIndex int64 `protobuf:"varint,1,opt,name=log_index,json=logIndex,proto3" json:"log_index,omitempty"` // The unique identifier of the log. LogId *v1.LogId `protobuf:"bytes,2,opt,name=log_id,json=logId,proto3" json:"log_id,omitempty"` // The kind (type) and version of the object associated with this // entry. These values are required to construct the entry during // verification. KindVersion *KindVersion `protobuf:"bytes,3,opt,name=kind_version,json=kindVersion,proto3" json:"kind_version,omitempty"` // The UNIX timestamp from the log when the entry was persisted. // The integration time MUST NOT be trusted if inclusion_promise // is omitted. IntegratedTime int64 `protobuf:"varint,4,opt,name=integrated_time,json=integratedTime,proto3" json:"integrated_time,omitempty"` // The inclusion promise/signed entry timestamp from the log. // Required for v0.1 bundles, and MUST be verified. // Optional for >= v0.2 bundles if another suitable source of // time is present (such as another source of signed time, // or the current system time for long-lived certificates). // MUST be verified if no other suitable source of time is present, // and SHOULD be verified otherwise. InclusionPromise *InclusionPromise `protobuf:"bytes,5,opt,name=inclusion_promise,json=inclusionPromise,proto3" json:"inclusion_promise,omitempty"` // The inclusion proof can be used for offline or online verification // that the entry was appended to the log, and that the log has not been // altered. InclusionProof *InclusionProof `protobuf:"bytes,6,opt,name=inclusion_proof,json=inclusionProof,proto3" json:"inclusion_proof,omitempty"` // Optional. The canonicalized transparency log entry, used to // reconstruct the Signed Entry Timestamp (SET) during verification. // The contents of this field are the same as the `body` field in // a Rekor response, meaning that it does **not** include the "full" // canonicalized form (of log index, ID, etc.) which are // exposed as separate fields. The verifier is responsible for // combining the `canonicalized_body`, `log_index`, `log_id`, // and `integrated_time` into the payload that the SET's signature // is generated over. // This field is intended to be used in cases where the SET cannot be // produced determinisitically (e.g. inconsistent JSON field ordering, // differing whitespace, etc). // // If set, clients MUST verify that the signature referenced in the // `canonicalized_body` matches the signature provided in the // `Bundle.content`. // If not set, clients are responsible for constructing an equivalent // payload from other sources to verify the signature. CanonicalizedBody []byte `protobuf:"bytes,7,opt,name=canonicalized_body,json=canonicalizedBody,proto3" json:"canonicalized_body,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *TransparencyLogEntry) Reset() { *x = TransparencyLogEntry{} mi := &file_sigstore_rekor_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *TransparencyLogEntry) String() string { return protoimpl.X.MessageStringOf(x) } func (*TransparencyLogEntry) ProtoMessage() {} func (x *TransparencyLogEntry) ProtoReflect() protoreflect.Message { mi := &file_sigstore_rekor_proto_msgTypes[4] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TransparencyLogEntry.ProtoReflect.Descriptor instead. func (*TransparencyLogEntry) Descriptor() ([]byte, []int) { return file_sigstore_rekor_proto_rawDescGZIP(), []int{4} } func (x *TransparencyLogEntry) GetLogIndex() int64 { if x != nil { return x.LogIndex } return 0 } func (x *TransparencyLogEntry) GetLogId() *v1.LogId { if x != nil { return x.LogId } return nil } func (x *TransparencyLogEntry) GetKindVersion() *KindVersion { if x != nil { return x.KindVersion } return nil } func (x *TransparencyLogEntry) GetIntegratedTime() int64 { if x != nil { return x.IntegratedTime } return 0 } func (x *TransparencyLogEntry) GetInclusionPromise() *InclusionPromise { if x != nil { return x.InclusionPromise } return nil } func (x *TransparencyLogEntry) GetInclusionProof() *InclusionProof { if x != nil { return x.InclusionProof } return nil } func (x *TransparencyLogEntry) GetCanonicalizedBody() []byte { if x != nil { return x.CanonicalizedBody } return nil } var File_sigstore_rekor_proto protoreflect.FileDescriptor var file_sigstore_rekor_proto_rawDesc = string([]byte{ 0x0a, 0x14, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x15, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0x45, 0x0a, 0x0b, 0x4b, 0x69, 0x6e, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x17, 0x0a, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x04, 0x6b, 0x69, 0x6e, 0x64, 0x12, 0x1d, 0x0a, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x07, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x22, 0x2d, 0x0a, 0x0a, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x12, 0x1f, 0x0a, 0x08, 0x65, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x65, 0x6e, 0x76, 0x65, 0x6c, 0x6f, 0x70, 0x65, 0x22, 0xdb, 0x01, 0x0a, 0x0e, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x20, 0x0a, 0x09, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x6c, 0x6f, 0x67, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x20, 0x0a, 0x09, 0x72, 0x6f, 0x6f, 0x74, 0x5f, 0x68, 0x61, 0x73, 0x68, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x72, 0x6f, 0x6f, 0x74, 0x48, 0x61, 0x73, 0x68, 0x12, 0x20, 0x0a, 0x09, 0x74, 0x72, 0x65, 0x65, 0x5f, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x03, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x74, 0x72, 0x65, 0x65, 0x53, 0x69, 0x7a, 0x65, 0x12, 0x1b, 0x0a, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x06, 0x68, 0x61, 0x73, 0x68, 0x65, 0x73, 0x12, 0x46, 0x0a, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0a, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x22, 0x4d, 0x0a, 0x10, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x12, 0x39, 0x0a, 0x16, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x5f, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x14, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x64, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0xc7, 0x03, 0x0a, 0x14, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x6f, 0x67, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x20, 0x0a, 0x09, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x6e, 0x64, 0x65, 0x78, 0x18, 0x01, 0x20, 0x01, 0x28, 0x03, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x08, 0x6c, 0x6f, 0x67, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x39, 0x0a, 0x06, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x05, 0x6c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x4a, 0x0a, 0x0c, 0x6b, 0x69, 0x6e, 0x64, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x4b, 0x69, 0x6e, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0b, 0x6b, 0x69, 0x6e, 0x64, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x2c, 0x0a, 0x0f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x69, 0x6d, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0e, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x12, 0x54, 0x0a, 0x11, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x27, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x52, 0x10, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6d, 0x69, 0x73, 0x65, 0x12, 0x53, 0x0a, 0x0f, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x5f, 0x70, 0x72, 0x6f, 0x6f, 0x66, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x2e, 0x49, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0e, 0x69, 0x6e, 0x63, 0x6c, 0x75, 0x73, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x6f, 0x66, 0x12, 0x2d, 0x0a, 0x12, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x5f, 0x62, 0x6f, 0x64, 0x79, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x11, 0x63, 0x61, 0x6e, 0x6f, 0x6e, 0x69, 0x63, 0x61, 0x6c, 0x69, 0x7a, 0x65, 0x64, 0x42, 0x6f, 0x64, 0x79, 0x42, 0x78, 0x0a, 0x1b, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2e, 0x76, 0x31, 0x42, 0x0a, 0x52, 0x65, 0x6b, 0x6f, 0x72, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x35, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x2f, 0x76, 0x31, 0xea, 0x02, 0x13, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x52, 0x65, 0x6b, 0x6f, 0x72, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_sigstore_rekor_proto_rawDescOnce sync.Once file_sigstore_rekor_proto_rawDescData []byte ) func file_sigstore_rekor_proto_rawDescGZIP() []byte { file_sigstore_rekor_proto_rawDescOnce.Do(func() { file_sigstore_rekor_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_sigstore_rekor_proto_rawDesc), len(file_sigstore_rekor_proto_rawDesc))) }) return file_sigstore_rekor_proto_rawDescData } var file_sigstore_rekor_proto_msgTypes = make([]protoimpl.MessageInfo, 5) var file_sigstore_rekor_proto_goTypes = []any{ (*KindVersion)(nil), // 0: dev.sigstore.rekor.v1.KindVersion (*Checkpoint)(nil), // 1: dev.sigstore.rekor.v1.Checkpoint (*InclusionProof)(nil), // 2: dev.sigstore.rekor.v1.InclusionProof (*InclusionPromise)(nil), // 3: dev.sigstore.rekor.v1.InclusionPromise (*TransparencyLogEntry)(nil), // 4: dev.sigstore.rekor.v1.TransparencyLogEntry (*v1.LogId)(nil), // 5: dev.sigstore.common.v1.LogId } var file_sigstore_rekor_proto_depIdxs = []int32{ 1, // 0: dev.sigstore.rekor.v1.InclusionProof.checkpoint:type_name -> dev.sigstore.rekor.v1.Checkpoint 5, // 1: dev.sigstore.rekor.v1.TransparencyLogEntry.log_id:type_name -> dev.sigstore.common.v1.LogId 0, // 2: dev.sigstore.rekor.v1.TransparencyLogEntry.kind_version:type_name -> dev.sigstore.rekor.v1.KindVersion 3, // 3: dev.sigstore.rekor.v1.TransparencyLogEntry.inclusion_promise:type_name -> dev.sigstore.rekor.v1.InclusionPromise 2, // 4: dev.sigstore.rekor.v1.TransparencyLogEntry.inclusion_proof:type_name -> dev.sigstore.rekor.v1.InclusionProof 5, // [5:5] is the sub-list for method output_type 5, // [5:5] is the sub-list for method input_type 5, // [5:5] is the sub-list for extension type_name 5, // [5:5] is the sub-list for extension extendee 0, // [0:5] is the sub-list for field type_name } func init() { file_sigstore_rekor_proto_init() } func file_sigstore_rekor_proto_init() { if File_sigstore_rekor_proto != nil { return } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_sigstore_rekor_proto_rawDesc), len(file_sigstore_rekor_proto_rawDesc)), NumEnums: 0, NumMessages: 5, NumExtensions: 0, NumServices: 0, }, GoTypes: file_sigstore_rekor_proto_goTypes, DependencyIndexes: file_sigstore_rekor_proto_depIdxs, MessageInfos: file_sigstore_rekor_proto_msgTypes, }.Build() File_sigstore_rekor_proto = out.File file_sigstore_rekor_proto_goTypes = nil file_sigstore_rekor_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-go/trustroot/000077500000000000000000000000001477352757300203555ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/trustroot/v1/000077500000000000000000000000001477352757300207035ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/trustroot/v1/sigstore_trustroot.pb.go000066400000000000000000001327251477352757300256500ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: sigstore_trustroot.proto package v1 import ( v1 "github.com/sigstore/protobuf-specs/gen/pb-go/common/v1" _ "google.golang.org/genproto/googleapis/api/annotations" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // ServiceSelector specifies how a client SHOULD select a set of // Services to connect to. A client SHOULD throw an error if // the value is SERVICE_SELECTOR_UNDEFINED. type ServiceSelector int32 const ( ServiceSelector_SERVICE_SELECTOR_UNDEFINED ServiceSelector = 0 // Clients SHOULD select all Services based on supported API version // and validity window. ServiceSelector_ALL ServiceSelector = 1 // Clients SHOULD select one Service based on supported API version // and validity window. It is up to the client implementation to // decide how to select the Service, e.g. random or round-robin. ServiceSelector_ANY ServiceSelector = 2 // Clients SHOULD select a specific number of Services based on // supported API version and validity window, using the provided // `count`. It is up to the client implementation to decide how to // select the Service, e.g. random or round-robin. ServiceSelector_EXACT ServiceSelector = 3 ) // Enum value maps for ServiceSelector. var ( ServiceSelector_name = map[int32]string{ 0: "SERVICE_SELECTOR_UNDEFINED", 1: "ALL", 2: "ANY", 3: "EXACT", } ServiceSelector_value = map[string]int32{ "SERVICE_SELECTOR_UNDEFINED": 0, "ALL": 1, "ANY": 2, "EXACT": 3, } ) func (x ServiceSelector) Enum() *ServiceSelector { p := new(ServiceSelector) *p = x return p } func (x ServiceSelector) String() string { return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x)) } func (ServiceSelector) Descriptor() protoreflect.EnumDescriptor { return file_sigstore_trustroot_proto_enumTypes[0].Descriptor() } func (ServiceSelector) Type() protoreflect.EnumType { return &file_sigstore_trustroot_proto_enumTypes[0] } func (x ServiceSelector) Number() protoreflect.EnumNumber { return protoreflect.EnumNumber(x) } // Deprecated: Use ServiceSelector.Descriptor instead. func (ServiceSelector) EnumDescriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{0} } // TransparencyLogInstance describes the immutable parameters from a // transparency log. // See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters // for more details. // The included parameters are the minimal set required to identify a log, // and verify an inclusion proof/promise. type TransparencyLogInstance struct { state protoimpl.MessageState `protogen:"open.v1"` // The base URL at which can be used to URLs for the client. BaseUrl string `protobuf:"bytes,1,opt,name=base_url,json=baseUrl,proto3" json:"base_url,omitempty"` // The hash algorithm used for the Merkle Tree. HashAlgorithm v1.HashAlgorithm `protobuf:"varint,2,opt,name=hash_algorithm,json=hashAlgorithm,proto3,enum=dev.sigstore.common.v1.HashAlgorithm" json:"hash_algorithm,omitempty"` // The public key used to verify signatures generated by the log. // This attribute contains the signature algorithm used by the log. PublicKey *v1.PublicKey `protobuf:"bytes,3,opt,name=public_key,json=publicKey,proto3" json:"public_key,omitempty"` // The unique identifier for this transparency log. // Represented as the SHA-256 hash of the log's public key, // calculated over the DER encoding of the key represented as // SubjectPublicKeyInfo. // See https://www.rfc-editor.org/rfc/rfc6962#section-3.2 LogId *v1.LogId `protobuf:"bytes,4,opt,name=log_id,json=logId,proto3" json:"log_id,omitempty"` // The checkpoint key identifier for the log used in a checkpoint. // Optional, not provided for logs that do not generate checkpoints. // For logs that do generate checkpoints, if not set, assume // log_id equals checkpoint_key_id. // Follows the specification described here // for ECDSA and Ed25519 signatures: // https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures // For RSA signatures, the key ID will match the ECDSA format, the // hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT // use RSA-signed checkpoints, since witnesses do not support // RSA signatures. // This is provided for convenience. Clients can also calculate the // checkpoint key ID given the log's public key. // SHOULD be set for logs generating Ed25519 signatures. // SHOULD be 4 bytes long, as a truncated hash. CheckpointKeyId *v1.LogId `protobuf:"bytes,5,opt,name=checkpoint_key_id,json=checkpointKeyId,proto3" json:"checkpoint_key_id,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *TransparencyLogInstance) Reset() { *x = TransparencyLogInstance{} mi := &file_sigstore_trustroot_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *TransparencyLogInstance) String() string { return protoimpl.X.MessageStringOf(x) } func (*TransparencyLogInstance) ProtoMessage() {} func (x *TransparencyLogInstance) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TransparencyLogInstance.ProtoReflect.Descriptor instead. func (*TransparencyLogInstance) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{0} } func (x *TransparencyLogInstance) GetBaseUrl() string { if x != nil { return x.BaseUrl } return "" } func (x *TransparencyLogInstance) GetHashAlgorithm() v1.HashAlgorithm { if x != nil { return x.HashAlgorithm } return v1.HashAlgorithm(0) } func (x *TransparencyLogInstance) GetPublicKey() *v1.PublicKey { if x != nil { return x.PublicKey } return nil } func (x *TransparencyLogInstance) GetLogId() *v1.LogId { if x != nil { return x.LogId } return nil } func (x *TransparencyLogInstance) GetCheckpointKeyId() *v1.LogId { if x != nil { return x.CheckpointKeyId } return nil } // CertificateAuthority enlists the information required to identify which // CA to use and perform signature verification. type CertificateAuthority struct { state protoimpl.MessageState `protogen:"open.v1"` // The root certificate MUST be self-signed, and so the subject and // issuer are the same. Subject *v1.DistinguishedName `protobuf:"bytes,1,opt,name=subject,proto3" json:"subject,omitempty"` // The URI identifies the certificate authority. // // It is RECOMMENDED that the URI is the base URL for the certificate // authority, that can be provided to any SDK/client provided // by the certificate authority to interact with the certificate // authority. Uri string `protobuf:"bytes,2,opt,name=uri,proto3" json:"uri,omitempty"` // The certificate chain for this CA. The last certificate in the chain // MUST be the trust anchor. The trust anchor MAY be a self-signed root // CA certificate or MAY be an intermediate CA certificate. CertChain *v1.X509CertificateChain `protobuf:"bytes,3,opt,name=cert_chain,json=certChain,proto3" json:"cert_chain,omitempty"` // The time the *entire* chain was valid. This is at max the // longest interval when *all* certificates in the chain were valid, // but it MAY be shorter. Clients MUST check timestamps against *both* // the `valid_for` time range *and* the entire certificate chain. // // The TimeRange should be considered valid *inclusive* of the // endpoints. ValidFor *v1.TimeRange `protobuf:"bytes,4,opt,name=valid_for,json=validFor,proto3" json:"valid_for,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *CertificateAuthority) Reset() { *x = CertificateAuthority{} mi := &file_sigstore_trustroot_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *CertificateAuthority) String() string { return protoimpl.X.MessageStringOf(x) } func (*CertificateAuthority) ProtoMessage() {} func (x *CertificateAuthority) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CertificateAuthority.ProtoReflect.Descriptor instead. func (*CertificateAuthority) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{1} } func (x *CertificateAuthority) GetSubject() *v1.DistinguishedName { if x != nil { return x.Subject } return nil } func (x *CertificateAuthority) GetUri() string { if x != nil { return x.Uri } return "" } func (x *CertificateAuthority) GetCertChain() *v1.X509CertificateChain { if x != nil { return x.CertChain } return nil } func (x *CertificateAuthority) GetValidFor() *v1.TimeRange { if x != nil { return x.ValidFor } return nil } // TrustedRoot describes the client's complete set of trusted entities. // How the TrustedRoot is populated is not specified, but can be a // combination of many sources such as TUF repositories, files on disk etc. // // The TrustedRoot is not meant to be used for any artifact verification, only // to capture the complete/global set of trusted verification materials. // When verifying an artifact, based on the artifact and policies, a selection // of keys/authorities are expected to be extracted and provided to the // verification function. This way the set of keys/authorities can be kept to // a minimal set by the policy to gain better control over what signatures // that are allowed. // // The embedded transparency logs, CT logs, CAs and TSAs MUST include any // previously used instance -- otherwise signatures made in the past cannot // be verified. // // All the listed instances SHOULD be sorted by the 'valid_for' in ascending // order, that is, the oldest instance first. Only the last instance is // allowed to have their 'end' timestamp unset. All previous instances MUST // have a closed interval of validity. The last instance MAY have a closed // interval. Clients MUST accept instances that overlaps in time, if not // clients may experience problems during rotations of verification // materials. // // To be able to manage planned rotations of either transparency logs or // certificate authorities, clienst MUST accept lists of instances where // the last instance have a 'valid_for' that belongs to the future. // This should not be a problem as clients SHOULD first seek the trust root // for a suitable instance before creating a per artifact trust root (that // is, a sub-set of the complete trust root) that is used for verification. type TrustedRoot struct { state protoimpl.MessageState `protogen:"open.v1"` // MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json // when encoded as JSON. // Clients MUST be able to process and parse content with the media // type defined in the old format: // application/vnd.dev.sigstore.trustedroot+json;version=0.1 MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"` // A set of trusted Rekor servers. Tlogs []*TransparencyLogInstance `protobuf:"bytes,2,rep,name=tlogs,proto3" json:"tlogs,omitempty"` // A set of trusted certificate authorities (e.g Fulcio), and any // intermediate certificates they provide. // If a CA is issuing multiple intermediate certificate, each // combination shall be represented as separate chain. I.e, a single // root cert may appear in multiple chains but with different // intermediate and/or leaf certificates. // The certificates are intended to be used for verifying artifact // signatures. CertificateAuthorities []*CertificateAuthority `protobuf:"bytes,3,rep,name=certificate_authorities,json=certificateAuthorities,proto3" json:"certificate_authorities,omitempty"` // A set of trusted certificate transparency logs. Ctlogs []*TransparencyLogInstance `protobuf:"bytes,4,rep,name=ctlogs,proto3" json:"ctlogs,omitempty"` // A set of trusted timestamping authorities. TimestampAuthorities []*CertificateAuthority `protobuf:"bytes,5,rep,name=timestamp_authorities,json=timestampAuthorities,proto3" json:"timestamp_authorities,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *TrustedRoot) Reset() { *x = TrustedRoot{} mi := &file_sigstore_trustroot_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *TrustedRoot) String() string { return protoimpl.X.MessageStringOf(x) } func (*TrustedRoot) ProtoMessage() {} func (x *TrustedRoot) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[2] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use TrustedRoot.ProtoReflect.Descriptor instead. func (*TrustedRoot) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{2} } func (x *TrustedRoot) GetMediaType() string { if x != nil { return x.MediaType } return "" } func (x *TrustedRoot) GetTlogs() []*TransparencyLogInstance { if x != nil { return x.Tlogs } return nil } func (x *TrustedRoot) GetCertificateAuthorities() []*CertificateAuthority { if x != nil { return x.CertificateAuthorities } return nil } func (x *TrustedRoot) GetCtlogs() []*TransparencyLogInstance { if x != nil { return x.Ctlogs } return nil } func (x *TrustedRoot) GetTimestampAuthorities() []*CertificateAuthority { if x != nil { return x.TimestampAuthorities } return nil } // SigningConfig represents the trusted entities/state needed by Sigstore // signing. In particular, it primarily contains service URLs that a Sigstore // signer may need to connect to for the online aspects of signing. type SigningConfig struct { state protoimpl.MessageState `protogen:"open.v1"` // MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json // Clients MAY choose to also support // application/vnd.dev.sigstore.signingconfig.v0.1+json MediaType string `protobuf:"bytes,5,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"` // URLs to Fulcio-compatible CAs, capable of receiving // Certificate Signing Requests (CSRs) and responding with // issued certificates. // // These URLs MUST be the "base" URL for the CAs, which clients // should construct an appropriate CSR endpoint on top of. // For example, if a CA URL is `https://example.com/ca`, then // the client MAY construct the CSR endpoint as // `https://example.com/ca/api/v2/signingCert`. // // Clients MUST select only one Service with the highest API version // that the client is compatible with, that is within its // validity period, and has the newest validity start date. // Client SHOULD select the first Service that meets this requirement. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. CaUrls []*Service `protobuf:"bytes,6,rep,name=ca_urls,json=caUrls,proto3" json:"ca_urls,omitempty"` // URLs to OpenID Connect identity providers. // // These URLs MUST be the "base" URLs for the OIDC IdPs, which clients // should perform well-known OpenID Connect discovery against. // // Clients MUST select only one Service with the highest API version // that the client is compatible with, that is within its // validity period, and has the newest validity start date. // Client SHOULD select the first Service that meets this requirement. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. OidcUrls []*Service `protobuf:"bytes,7,rep,name=oidc_urls,json=oidcUrls,proto3" json:"oidc_urls,omitempty"` // URLs to Rekor transparency logs. // // These URL MUST be the "base" URLs for the transparency logs, // which clients should construct appropriate API endpoints on top of. // // Clients MUST select Services with the highest API version // that the client is compatible with, that are within its // validity period, and have the newest validity start dates. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. // // Clients MUST select Services based on the selector value of // `rekor_tlog_config`. RekorTlogUrls []*Service `protobuf:"bytes,8,rep,name=rekor_tlog_urls,json=rekorTlogUrls,proto3" json:"rekor_tlog_urls,omitempty"` // Specifies how a client should select the set of Rekor transparency // logs to write to. RekorTlogConfig *ServiceConfiguration `protobuf:"bytes,9,opt,name=rekor_tlog_config,json=rekorTlogConfig,proto3" json:"rekor_tlog_config,omitempty"` // URLs to RFC 3161 Time Stamping Authorities (TSA). // // These URLs MUST be the *full* URL for the TSA, meaning that it // should be suitable for submitting Time Stamp Requests (TSRs) to // via HTTP, per RFC 3161. // // Clients MUST select Services with the highest API version // that the client is compatible with, that are within its // validity period, and have the newest validity start dates. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. // // Clients MUST select Services based on the selector value of // `tsa_config`. TsaUrls []*Service `protobuf:"bytes,10,rep,name=tsa_urls,json=tsaUrls,proto3" json:"tsa_urls,omitempty"` // Specifies how a client should select the set of TSAs to request // signed timestamps from. TsaConfig *ServiceConfiguration `protobuf:"bytes,11,opt,name=tsa_config,json=tsaConfig,proto3" json:"tsa_config,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *SigningConfig) Reset() { *x = SigningConfig{} mi := &file_sigstore_trustroot_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *SigningConfig) String() string { return protoimpl.X.MessageStringOf(x) } func (*SigningConfig) ProtoMessage() {} func (x *SigningConfig) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[3] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use SigningConfig.ProtoReflect.Descriptor instead. func (*SigningConfig) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{3} } func (x *SigningConfig) GetMediaType() string { if x != nil { return x.MediaType } return "" } func (x *SigningConfig) GetCaUrls() []*Service { if x != nil { return x.CaUrls } return nil } func (x *SigningConfig) GetOidcUrls() []*Service { if x != nil { return x.OidcUrls } return nil } func (x *SigningConfig) GetRekorTlogUrls() []*Service { if x != nil { return x.RekorTlogUrls } return nil } func (x *SigningConfig) GetRekorTlogConfig() *ServiceConfiguration { if x != nil { return x.RekorTlogConfig } return nil } func (x *SigningConfig) GetTsaUrls() []*Service { if x != nil { return x.TsaUrls } return nil } func (x *SigningConfig) GetTsaConfig() *ServiceConfiguration { if x != nil { return x.TsaConfig } return nil } // Service represents an instance of a service that is a part of Sigstore infrastructure. // Clients MUST use the API version hint to determine the service with the // highest API version that the client is compatible with. Clients MUST also // only connect to services within the specified validity period and that has the // newest validity start date. type Service struct { state protoimpl.MessageState `protogen:"open.v1"` // URL of the service. MUST include scheme and authority. MAY include path. Url string `protobuf:"bytes,1,opt,name=url,proto3" json:"url,omitempty"` // Specifies the major API version. A value of 0 represents a service that // has not yet been released. MajorApiVersion uint32 `protobuf:"varint,2,opt,name=major_api_version,json=majorApiVersion,proto3" json:"major_api_version,omitempty"` // Validity period of a service. A service that has only a start date // SHOULD be considered the most recent instance of that service, but // the client MUST NOT assume there is only one valid instance. // The TimeRange MUST be considered valid *inclusive* of the // endpoints. ValidFor *v1.TimeRange `protobuf:"bytes,3,opt,name=valid_for,json=validFor,proto3" json:"valid_for,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Service) Reset() { *x = Service{} mi := &file_sigstore_trustroot_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Service) String() string { return protoimpl.X.MessageStringOf(x) } func (*Service) ProtoMessage() {} func (x *Service) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[4] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Service.ProtoReflect.Descriptor instead. func (*Service) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{4} } func (x *Service) GetUrl() string { if x != nil { return x.Url } return "" } func (x *Service) GetMajorApiVersion() uint32 { if x != nil { return x.MajorApiVersion } return 0 } func (x *Service) GetValidFor() *v1.TimeRange { if x != nil { return x.ValidFor } return nil } // ServiceConfiguration specifies how a client should select a set of // Services to connect to, along with a count when a specific number // of Services is requested. type ServiceConfiguration struct { state protoimpl.MessageState `protogen:"open.v1"` // How a client should select a set of Services to connect to. Selector ServiceSelector `protobuf:"varint,1,opt,name=selector,proto3,enum=dev.sigstore.trustroot.v1.ServiceSelector" json:"selector,omitempty"` // count specifies the number of Services the client should use. // Only used when selector is set to EXACT, and count MUST be greater // than 0. count MUST be less than or equal to the number of Services. Count uint32 `protobuf:"varint,2,opt,name=count,proto3" json:"count,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ServiceConfiguration) Reset() { *x = ServiceConfiguration{} mi := &file_sigstore_trustroot_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ServiceConfiguration) String() string { return protoimpl.X.MessageStringOf(x) } func (*ServiceConfiguration) ProtoMessage() {} func (x *ServiceConfiguration) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[5] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ServiceConfiguration.ProtoReflect.Descriptor instead. func (*ServiceConfiguration) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{5} } func (x *ServiceConfiguration) GetSelector() ServiceSelector { if x != nil { return x.Selector } return ServiceSelector_SERVICE_SELECTOR_UNDEFINED } func (x *ServiceConfiguration) GetCount() uint32 { if x != nil { return x.Count } return 0 } // ClientTrustConfig describes the complete state needed by a client // to perform both signing and verification operations against a particular // instance of Sigstore. type ClientTrustConfig struct { state protoimpl.MessageState `protogen:"open.v1"` // MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json MediaType string `protobuf:"bytes,1,opt,name=media_type,json=mediaType,proto3" json:"media_type,omitempty"` // The root of trust, which MUST be present. TrustedRoot *TrustedRoot `protobuf:"bytes,2,opt,name=trusted_root,json=trustedRoot,proto3" json:"trusted_root,omitempty"` // Configuration for signing clients, which MUST be present. SigningConfig *SigningConfig `protobuf:"bytes,3,opt,name=signing_config,json=signingConfig,proto3" json:"signing_config,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ClientTrustConfig) Reset() { *x = ClientTrustConfig{} mi := &file_sigstore_trustroot_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ClientTrustConfig) String() string { return protoimpl.X.MessageStringOf(x) } func (*ClientTrustConfig) ProtoMessage() {} func (x *ClientTrustConfig) ProtoReflect() protoreflect.Message { mi := &file_sigstore_trustroot_proto_msgTypes[6] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ClientTrustConfig.ProtoReflect.Descriptor instead. func (*ClientTrustConfig) Descriptor() ([]byte, []int) { return file_sigstore_trustroot_proto_rawDescGZIP(), []int{6} } func (x *ClientTrustConfig) GetMediaType() string { if x != nil { return x.MediaType } return "" } func (x *ClientTrustConfig) GetTrustedRoot() *TrustedRoot { if x != nil { return x.TrustedRoot } return nil } func (x *ClientTrustConfig) GetSigningConfig() *SigningConfig { if x != nil { return x.SigningConfig } return nil } var File_sigstore_trustroot_proto protoreflect.FileDescriptor var file_sigstore_trustroot_proto_rawDesc = string([]byte{ 0x0a, 0x18, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x19, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x61, 0x70, 0x69, 0x2f, 0x66, 0x69, 0x65, 0x6c, 0x64, 0x5f, 0x62, 0x65, 0x68, 0x61, 0x76, 0x69, 0x6f, 0x72, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xc5, 0x02, 0x0a, 0x17, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x6f, 0x67, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x12, 0x19, 0x0a, 0x08, 0x62, 0x61, 0x73, 0x65, 0x5f, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07, 0x62, 0x61, 0x73, 0x65, 0x55, 0x72, 0x6c, 0x12, 0x4c, 0x0a, 0x0e, 0x68, 0x61, 0x73, 0x68, 0x5f, 0x61, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x25, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x52, 0x0d, 0x68, 0x61, 0x73, 0x68, 0x41, 0x6c, 0x67, 0x6f, 0x72, 0x69, 0x74, 0x68, 0x6d, 0x12, 0x40, 0x0a, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x52, 0x09, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x12, 0x34, 0x0a, 0x06, 0x6c, 0x6f, 0x67, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x52, 0x05, 0x6c, 0x6f, 0x67, 0x49, 0x64, 0x12, 0x49, 0x0a, 0x11, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x5f, 0x69, 0x64, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1d, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4c, 0x6f, 0x67, 0x49, 0x64, 0x52, 0x0f, 0x63, 0x68, 0x65, 0x63, 0x6b, 0x70, 0x6f, 0x69, 0x6e, 0x74, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x22, 0xfa, 0x01, 0x0a, 0x14, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x12, 0x43, 0x0a, 0x07, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x29, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x44, 0x69, 0x73, 0x74, 0x69, 0x6e, 0x67, 0x75, 0x69, 0x73, 0x68, 0x65, 0x64, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x07, 0x73, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x69, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x69, 0x12, 0x4b, 0x0a, 0x0a, 0x63, 0x65, 0x72, 0x74, 0x5f, 0x63, 0x68, 0x61, 0x69, 0x6e, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x58, 0x35, 0x30, 0x39, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x52, 0x09, 0x63, 0x65, 0x72, 0x74, 0x43, 0x68, 0x61, 0x69, 0x6e, 0x12, 0x3e, 0x0a, 0x09, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x08, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x46, 0x6f, 0x72, 0x22, 0x92, 0x03, 0x0a, 0x0b, 0x54, 0x72, 0x75, 0x73, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x48, 0x0a, 0x05, 0x74, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x6f, 0x67, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x05, 0x74, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x68, 0x0a, 0x17, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x52, 0x16, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x4a, 0x0a, 0x06, 0x63, 0x74, 0x6c, 0x6f, 0x67, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x32, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x61, 0x6e, 0x73, 0x70, 0x61, 0x72, 0x65, 0x6e, 0x63, 0x79, 0x4c, 0x6f, 0x67, 0x49, 0x6e, 0x73, 0x74, 0x61, 0x6e, 0x63, 0x65, 0x52, 0x06, 0x63, 0x74, 0x6c, 0x6f, 0x67, 0x73, 0x12, 0x64, 0x0a, 0x15, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x5f, 0x61, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x52, 0x14, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0xea, 0x03, 0x0a, 0x0d, 0x53, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x05, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x3b, 0x0a, 0x07, 0x63, 0x61, 0x5f, 0x75, 0x72, 0x6c, 0x73, 0x18, 0x06, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x06, 0x63, 0x61, 0x55, 0x72, 0x6c, 0x73, 0x12, 0x3f, 0x0a, 0x09, 0x6f, 0x69, 0x64, 0x63, 0x5f, 0x75, 0x72, 0x6c, 0x73, 0x18, 0x07, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x08, 0x6f, 0x69, 0x64, 0x63, 0x55, 0x72, 0x6c, 0x73, 0x12, 0x4a, 0x0a, 0x0f, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x5f, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x75, 0x72, 0x6c, 0x73, 0x18, 0x08, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x0d, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x54, 0x6c, 0x6f, 0x67, 0x55, 0x72, 0x6c, 0x73, 0x12, 0x5b, 0x0a, 0x11, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x5f, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x09, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x0f, 0x72, 0x65, 0x6b, 0x6f, 0x72, 0x54, 0x6c, 0x6f, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x3d, 0x0a, 0x08, 0x74, 0x73, 0x61, 0x5f, 0x75, 0x72, 0x6c, 0x73, 0x18, 0x0a, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x52, 0x07, 0x74, 0x73, 0x61, 0x55, 0x72, 0x6c, 0x73, 0x12, 0x4e, 0x0a, 0x0a, 0x74, 0x73, 0x61, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x0b, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x52, 0x09, 0x74, 0x73, 0x61, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x4a, 0x04, 0x08, 0x01, 0x10, 0x05, 0x22, 0x87, 0x01, 0x0a, 0x07, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x12, 0x10, 0x0a, 0x03, 0x75, 0x72, 0x6c, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x75, 0x72, 0x6c, 0x12, 0x2a, 0x0a, 0x11, 0x6d, 0x61, 0x6a, 0x6f, 0x72, 0x5f, 0x61, 0x70, 0x69, 0x5f, 0x76, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x0f, 0x6d, 0x61, 0x6a, 0x6f, 0x72, 0x41, 0x70, 0x69, 0x56, 0x65, 0x72, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x3e, 0x0a, 0x09, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x5f, 0x66, 0x6f, 0x72, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x52, 0x61, 0x6e, 0x67, 0x65, 0x52, 0x08, 0x76, 0x61, 0x6c, 0x69, 0x64, 0x46, 0x6f, 0x72, 0x22, 0x74, 0x0a, 0x14, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x75, 0x72, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x46, 0x0a, 0x08, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0e, 0x32, 0x2a, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x52, 0x08, 0x73, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0d, 0x52, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x22, 0xd8, 0x01, 0x0a, 0x11, 0x43, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54, 0x72, 0x75, 0x73, 0x74, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x12, 0x1d, 0x0a, 0x0a, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x5f, 0x74, 0x79, 0x70, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x09, 0x6d, 0x65, 0x64, 0x69, 0x61, 0x54, 0x79, 0x70, 0x65, 0x12, 0x4e, 0x0a, 0x0c, 0x74, 0x72, 0x75, 0x73, 0x74, 0x65, 0x64, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x75, 0x73, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0b, 0x74, 0x72, 0x75, 0x73, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x54, 0x0a, 0x0e, 0x73, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x28, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x42, 0x03, 0xe0, 0x41, 0x02, 0x52, 0x0d, 0x73, 0x69, 0x67, 0x6e, 0x69, 0x6e, 0x67, 0x43, 0x6f, 0x6e, 0x66, 0x69, 0x67, 0x2a, 0x4e, 0x0a, 0x0f, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x53, 0x65, 0x6c, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x12, 0x1e, 0x0a, 0x1a, 0x53, 0x45, 0x52, 0x56, 0x49, 0x43, 0x45, 0x5f, 0x53, 0x45, 0x4c, 0x45, 0x43, 0x54, 0x4f, 0x52, 0x5f, 0x55, 0x4e, 0x44, 0x45, 0x46, 0x49, 0x4e, 0x45, 0x44, 0x10, 0x00, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x4c, 0x4c, 0x10, 0x01, 0x12, 0x07, 0x0a, 0x03, 0x41, 0x4e, 0x59, 0x10, 0x02, 0x12, 0x09, 0x0a, 0x05, 0x45, 0x58, 0x41, 0x43, 0x54, 0x10, 0x03, 0x42, 0x88, 0x01, 0x0a, 0x1f, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x42, 0x0e, 0x54, 0x72, 0x75, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x39, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2f, 0x76, 0x31, 0xea, 0x02, 0x17, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x54, 0x72, 0x75, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_sigstore_trustroot_proto_rawDescOnce sync.Once file_sigstore_trustroot_proto_rawDescData []byte ) func file_sigstore_trustroot_proto_rawDescGZIP() []byte { file_sigstore_trustroot_proto_rawDescOnce.Do(func() { file_sigstore_trustroot_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_sigstore_trustroot_proto_rawDesc), len(file_sigstore_trustroot_proto_rawDesc))) }) return file_sigstore_trustroot_proto_rawDescData } var file_sigstore_trustroot_proto_enumTypes = make([]protoimpl.EnumInfo, 1) var file_sigstore_trustroot_proto_msgTypes = make([]protoimpl.MessageInfo, 7) var file_sigstore_trustroot_proto_goTypes = []any{ (ServiceSelector)(0), // 0: dev.sigstore.trustroot.v1.ServiceSelector (*TransparencyLogInstance)(nil), // 1: dev.sigstore.trustroot.v1.TransparencyLogInstance (*CertificateAuthority)(nil), // 2: dev.sigstore.trustroot.v1.CertificateAuthority (*TrustedRoot)(nil), // 3: dev.sigstore.trustroot.v1.TrustedRoot (*SigningConfig)(nil), // 4: dev.sigstore.trustroot.v1.SigningConfig (*Service)(nil), // 5: dev.sigstore.trustroot.v1.Service (*ServiceConfiguration)(nil), // 6: dev.sigstore.trustroot.v1.ServiceConfiguration (*ClientTrustConfig)(nil), // 7: dev.sigstore.trustroot.v1.ClientTrustConfig (v1.HashAlgorithm)(0), // 8: dev.sigstore.common.v1.HashAlgorithm (*v1.PublicKey)(nil), // 9: dev.sigstore.common.v1.PublicKey (*v1.LogId)(nil), // 10: dev.sigstore.common.v1.LogId (*v1.DistinguishedName)(nil), // 11: dev.sigstore.common.v1.DistinguishedName (*v1.X509CertificateChain)(nil), // 12: dev.sigstore.common.v1.X509CertificateChain (*v1.TimeRange)(nil), // 13: dev.sigstore.common.v1.TimeRange } var file_sigstore_trustroot_proto_depIdxs = []int32{ 8, // 0: dev.sigstore.trustroot.v1.TransparencyLogInstance.hash_algorithm:type_name -> dev.sigstore.common.v1.HashAlgorithm 9, // 1: dev.sigstore.trustroot.v1.TransparencyLogInstance.public_key:type_name -> dev.sigstore.common.v1.PublicKey 10, // 2: dev.sigstore.trustroot.v1.TransparencyLogInstance.log_id:type_name -> dev.sigstore.common.v1.LogId 10, // 3: dev.sigstore.trustroot.v1.TransparencyLogInstance.checkpoint_key_id:type_name -> dev.sigstore.common.v1.LogId 11, // 4: dev.sigstore.trustroot.v1.CertificateAuthority.subject:type_name -> dev.sigstore.common.v1.DistinguishedName 12, // 5: dev.sigstore.trustroot.v1.CertificateAuthority.cert_chain:type_name -> dev.sigstore.common.v1.X509CertificateChain 13, // 6: dev.sigstore.trustroot.v1.CertificateAuthority.valid_for:type_name -> dev.sigstore.common.v1.TimeRange 1, // 7: dev.sigstore.trustroot.v1.TrustedRoot.tlogs:type_name -> dev.sigstore.trustroot.v1.TransparencyLogInstance 2, // 8: dev.sigstore.trustroot.v1.TrustedRoot.certificate_authorities:type_name -> dev.sigstore.trustroot.v1.CertificateAuthority 1, // 9: dev.sigstore.trustroot.v1.TrustedRoot.ctlogs:type_name -> dev.sigstore.trustroot.v1.TransparencyLogInstance 2, // 10: dev.sigstore.trustroot.v1.TrustedRoot.timestamp_authorities:type_name -> dev.sigstore.trustroot.v1.CertificateAuthority 5, // 11: dev.sigstore.trustroot.v1.SigningConfig.ca_urls:type_name -> dev.sigstore.trustroot.v1.Service 5, // 12: dev.sigstore.trustroot.v1.SigningConfig.oidc_urls:type_name -> dev.sigstore.trustroot.v1.Service 5, // 13: dev.sigstore.trustroot.v1.SigningConfig.rekor_tlog_urls:type_name -> dev.sigstore.trustroot.v1.Service 6, // 14: dev.sigstore.trustroot.v1.SigningConfig.rekor_tlog_config:type_name -> dev.sigstore.trustroot.v1.ServiceConfiguration 5, // 15: dev.sigstore.trustroot.v1.SigningConfig.tsa_urls:type_name -> dev.sigstore.trustroot.v1.Service 6, // 16: dev.sigstore.trustroot.v1.SigningConfig.tsa_config:type_name -> dev.sigstore.trustroot.v1.ServiceConfiguration 13, // 17: dev.sigstore.trustroot.v1.Service.valid_for:type_name -> dev.sigstore.common.v1.TimeRange 0, // 18: dev.sigstore.trustroot.v1.ServiceConfiguration.selector:type_name -> dev.sigstore.trustroot.v1.ServiceSelector 3, // 19: dev.sigstore.trustroot.v1.ClientTrustConfig.trusted_root:type_name -> dev.sigstore.trustroot.v1.TrustedRoot 4, // 20: dev.sigstore.trustroot.v1.ClientTrustConfig.signing_config:type_name -> dev.sigstore.trustroot.v1.SigningConfig 21, // [21:21] is the sub-list for method output_type 21, // [21:21] is the sub-list for method input_type 21, // [21:21] is the sub-list for extension type_name 21, // [21:21] is the sub-list for extension extendee 0, // [0:21] is the sub-list for field type_name } func init() { file_sigstore_trustroot_proto_init() } func file_sigstore_trustroot_proto_init() { if File_sigstore_trustroot_proto != nil { return } type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_sigstore_trustroot_proto_rawDesc), len(file_sigstore_trustroot_proto_rawDesc)), NumEnums: 1, NumMessages: 7, NumExtensions: 0, NumServices: 0, }, GoTypes: file_sigstore_trustroot_proto_goTypes, DependencyIndexes: file_sigstore_trustroot_proto_depIdxs, EnumInfos: file_sigstore_trustroot_proto_enumTypes, MessageInfos: file_sigstore_trustroot_proto_msgTypes, }.Build() File_sigstore_trustroot_proto = out.File file_sigstore_trustroot_proto_goTypes = nil file_sigstore_trustroot_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-go/verification/000077500000000000000000000000001477352757300207525ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/verification/v1/000077500000000000000000000000001477352757300213005ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-go/verification/v1/sigstore_verification.pb.go000066400000000000000000001404351477352757300266370ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // Code generated by protoc-gen-go. DO NOT EDIT. // versions: // protoc-gen-go v1.36.5 // protoc v5.29.4 // source: sigstore_verification.proto package v1 import ( v12 "github.com/sigstore/protobuf-specs/gen/pb-go/bundle/v1" v1 "github.com/sigstore/protobuf-specs/gen/pb-go/common/v1" v11 "github.com/sigstore/protobuf-specs/gen/pb-go/trustroot/v1" protoreflect "google.golang.org/protobuf/reflect/protoreflect" protoimpl "google.golang.org/protobuf/runtime/protoimpl" reflect "reflect" sync "sync" unsafe "unsafe" ) const ( // Verify that this generated code is sufficiently up-to-date. _ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion) // Verify that runtime/protoimpl is sufficiently up-to-date. _ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20) ) // The identity of a X.509 Certificate signer. type CertificateIdentity struct { state protoimpl.MessageState `protogen:"open.v1"` // The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) Issuer string `protobuf:"bytes,1,opt,name=issuer,proto3" json:"issuer,omitempty"` San *v1.SubjectAlternativeName `protobuf:"bytes,2,opt,name=san,proto3" json:"san,omitempty"` // An unordered list of OIDs that must be verified. // All OID/values provided in this list MUST exactly match against // the values in the certificate for verification to be successful. Oids []*v1.ObjectIdentifierValuePair `protobuf:"bytes,3,rep,name=oids,proto3" json:"oids,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *CertificateIdentity) Reset() { *x = CertificateIdentity{} mi := &file_sigstore_verification_proto_msgTypes[0] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *CertificateIdentity) String() string { return protoimpl.X.MessageStringOf(x) } func (*CertificateIdentity) ProtoMessage() {} func (x *CertificateIdentity) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[0] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CertificateIdentity.ProtoReflect.Descriptor instead. func (*CertificateIdentity) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{0} } func (x *CertificateIdentity) GetIssuer() string { if x != nil { return x.Issuer } return "" } func (x *CertificateIdentity) GetSan() *v1.SubjectAlternativeName { if x != nil { return x.San } return nil } func (x *CertificateIdentity) GetOids() []*v1.ObjectIdentifierValuePair { if x != nil { return x.Oids } return nil } type CertificateIdentities struct { state protoimpl.MessageState `protogen:"open.v1"` Identities []*CertificateIdentity `protobuf:"bytes,1,rep,name=identities,proto3" json:"identities,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *CertificateIdentities) Reset() { *x = CertificateIdentities{} mi := &file_sigstore_verification_proto_msgTypes[1] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *CertificateIdentities) String() string { return protoimpl.X.MessageStringOf(x) } func (*CertificateIdentities) ProtoMessage() {} func (x *CertificateIdentities) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[1] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use CertificateIdentities.ProtoReflect.Descriptor instead. func (*CertificateIdentities) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{1} } func (x *CertificateIdentities) GetIdentities() []*CertificateIdentity { if x != nil { return x.Identities } return nil } type PublicKeyIdentities struct { state protoimpl.MessageState `protogen:"open.v1"` PublicKeys []*v1.PublicKey `protobuf:"bytes,1,rep,name=public_keys,json=publicKeys,proto3" json:"public_keys,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *PublicKeyIdentities) Reset() { *x = PublicKeyIdentities{} mi := &file_sigstore_verification_proto_msgTypes[2] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *PublicKeyIdentities) String() string { return protoimpl.X.MessageStringOf(x) } func (*PublicKeyIdentities) ProtoMessage() {} func (x *PublicKeyIdentities) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[2] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use PublicKeyIdentities.ProtoReflect.Descriptor instead. func (*PublicKeyIdentities) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{2} } func (x *PublicKeyIdentities) GetPublicKeys() []*v1.PublicKey { if x != nil { return x.PublicKeys } return nil } // A light-weight set of options/policies for identifying trusted signers, // used during verification of a single artifact. type ArtifactVerificationOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // At least one identity MUST be provided. Providing zero identities // is an error. If at least one provided identity is found as a // signer, the verification is considered successful. // // Types that are valid to be assigned to Signers: // // *ArtifactVerificationOptions_CertificateIdentities // *ArtifactVerificationOptions_PublicKeys Signers isArtifactVerificationOptions_Signers `protobuf_oneof:"signers"` // Optional options for artifact transparency log verification. // If none is provided, the default verification options are: // Threshold: 1 // Online verification: false // Disable: false TlogOptions *ArtifactVerificationOptions_TlogOptions `protobuf:"bytes,3,opt,name=tlog_options,json=tlogOptions,proto3,oneof" json:"tlog_options,omitempty"` // Optional options for certificate transparency log verification. // If none is provided, the default verification options are: // Threshold: 1 // Disable: false CtlogOptions *ArtifactVerificationOptions_CtlogOptions `protobuf:"bytes,4,opt,name=ctlog_options,json=ctlogOptions,proto3,oneof" json:"ctlog_options,omitempty"` // Optional options for certificate signed timestamp verification. // If none is provided, the default verification options are: // Threshold: 0 // Disable: true TsaOptions *ArtifactVerificationOptions_TimestampAuthorityOptions `protobuf:"bytes,5,opt,name=tsa_options,json=tsaOptions,proto3,oneof" json:"tsa_options,omitempty"` // Optional options for integrated timestamp verification. // If none is provided, the default verification options are: // Threshold: 0 // Disable: true IntegratedTsOptions *ArtifactVerificationOptions_TlogIntegratedTimestampOptions `protobuf:"bytes,6,opt,name=integrated_ts_options,json=integratedTsOptions,proto3,oneof" json:"integrated_ts_options,omitempty"` // Optional options for observed timestamp verification. // If none is provided, the default verification options are: // Threshold 1 // Disable: false ObserverOptions *ArtifactVerificationOptions_ObserverTimestampOptions `protobuf:"bytes,7,opt,name=observer_options,json=observerOptions,proto3,oneof" json:"observer_options,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ArtifactVerificationOptions) Reset() { *x = ArtifactVerificationOptions{} mi := &file_sigstore_verification_proto_msgTypes[3] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ArtifactVerificationOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactVerificationOptions) ProtoMessage() {} func (x *ArtifactVerificationOptions) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[3] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactVerificationOptions.ProtoReflect.Descriptor instead. func (*ArtifactVerificationOptions) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{3} } func (x *ArtifactVerificationOptions) GetSigners() isArtifactVerificationOptions_Signers { if x != nil { return x.Signers } return nil } func (x *ArtifactVerificationOptions) GetCertificateIdentities() *CertificateIdentities { if x != nil { if x, ok := x.Signers.(*ArtifactVerificationOptions_CertificateIdentities); ok { return x.CertificateIdentities } } return nil } func (x *ArtifactVerificationOptions) GetPublicKeys() *PublicKeyIdentities { if x != nil { if x, ok := x.Signers.(*ArtifactVerificationOptions_PublicKeys); ok { return x.PublicKeys } } return nil } func (x *ArtifactVerificationOptions) GetTlogOptions() *ArtifactVerificationOptions_TlogOptions { if x != nil { return x.TlogOptions } return nil } func (x *ArtifactVerificationOptions) GetCtlogOptions() *ArtifactVerificationOptions_CtlogOptions { if x != nil { return x.CtlogOptions } return nil } func (x *ArtifactVerificationOptions) GetTsaOptions() *ArtifactVerificationOptions_TimestampAuthorityOptions { if x != nil { return x.TsaOptions } return nil } func (x *ArtifactVerificationOptions) GetIntegratedTsOptions() *ArtifactVerificationOptions_TlogIntegratedTimestampOptions { if x != nil { return x.IntegratedTsOptions } return nil } func (x *ArtifactVerificationOptions) GetObserverOptions() *ArtifactVerificationOptions_ObserverTimestampOptions { if x != nil { return x.ObserverOptions } return nil } type isArtifactVerificationOptions_Signers interface { isArtifactVerificationOptions_Signers() } type ArtifactVerificationOptions_CertificateIdentities struct { CertificateIdentities *CertificateIdentities `protobuf:"bytes,1,opt,name=certificate_identities,json=certificateIdentities,proto3,oneof"` } type ArtifactVerificationOptions_PublicKeys struct { // To simplify verification implementation, the logic for // bundle verification should be implemented as a // higher-order function, where one of argument should be an // interface over the set of trusted public keys, like this: // `Verify(bytes artifact, bytes signature, string key_id)`. // This way the caller is in full control of mapping the // identified (or hinted) key in the bundle to one of the // trusted keys, as this process is inherently application // specific. PublicKeys *PublicKeyIdentities `protobuf:"bytes,2,opt,name=public_keys,json=publicKeys,proto3,oneof"` } func (*ArtifactVerificationOptions_CertificateIdentities) isArtifactVerificationOptions_Signers() {} func (*ArtifactVerificationOptions_PublicKeys) isArtifactVerificationOptions_Signers() {} type Artifact struct { state protoimpl.MessageState `protogen:"open.v1"` // Types that are valid to be assigned to Data: // // *Artifact_ArtifactUri // *Artifact_Artifact // *Artifact_ArtifactDigest Data isArtifact_Data `protobuf_oneof:"data"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Artifact) Reset() { *x = Artifact{} mi := &file_sigstore_verification_proto_msgTypes[4] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Artifact) String() string { return protoimpl.X.MessageStringOf(x) } func (*Artifact) ProtoMessage() {} func (x *Artifact) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[4] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Artifact.ProtoReflect.Descriptor instead. func (*Artifact) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{4} } func (x *Artifact) GetData() isArtifact_Data { if x != nil { return x.Data } return nil } func (x *Artifact) GetArtifactUri() string { if x != nil { if x, ok := x.Data.(*Artifact_ArtifactUri); ok { return x.ArtifactUri } } return "" } func (x *Artifact) GetArtifact() []byte { if x != nil { if x, ok := x.Data.(*Artifact_Artifact); ok { return x.Artifact } } return nil } func (x *Artifact) GetArtifactDigest() *v1.HashOutput { if x != nil { if x, ok := x.Data.(*Artifact_ArtifactDigest); ok { return x.ArtifactDigest } } return nil } type isArtifact_Data interface { isArtifact_Data() } type Artifact_ArtifactUri struct { // Location of the artifact ArtifactUri string `protobuf:"bytes,1,opt,name=artifact_uri,json=artifactUri,proto3,oneof"` } type Artifact_Artifact struct { // The raw bytes of the artifact Artifact []byte `protobuf:"bytes,2,opt,name=artifact,proto3,oneof"` } type Artifact_ArtifactDigest struct { // Digest of the artifact. SHOULD NOT be used when verifying an // in-toto attestation as the subject digest cannot be // reconstructed. This option will not work with Ed25519 // signatures, use Ed25519Ph or another algorithm instead. ArtifactDigest *v1.HashOutput `protobuf:"bytes,3,opt,name=artifact_digest,json=artifactDigest,proto3,oneof"` } func (*Artifact_ArtifactUri) isArtifact_Data() {} func (*Artifact_Artifact) isArtifact_Data() {} func (*Artifact_ArtifactDigest) isArtifact_Data() {} // Input captures all that is needed to call the bundle verification method, // to verify a single artifact referenced by the bundle. type Input struct { state protoimpl.MessageState `protogen:"open.v1"` // The verification materials provided during a bundle verification. // The running process is usually preloaded with a "global" // dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to // verifying an artifact (i.e a bundle), and/or based on current // policy, some selection is expected to happen, to filter out the // exact certificate authority to use, which transparency logs are // relevant etc. The result should b ecaptured in the // `artifact_trust_root`. ArtifactTrustRoot *v11.TrustedRoot `protobuf:"bytes,1,opt,name=artifact_trust_root,json=artifactTrustRoot,proto3" json:"artifact_trust_root,omitempty"` ArtifactVerificationOptions *ArtifactVerificationOptions `protobuf:"bytes,2,opt,name=artifact_verification_options,json=artifactVerificationOptions,proto3" json:"artifact_verification_options,omitempty"` Bundle *v12.Bundle `protobuf:"bytes,3,opt,name=bundle,proto3" json:"bundle,omitempty"` // If the bundle contains a message signature, the artifact must be // provided. Artifact *Artifact `protobuf:"bytes,4,opt,name=artifact,proto3,oneof" json:"artifact,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *Input) Reset() { *x = Input{} mi := &file_sigstore_verification_proto_msgTypes[5] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *Input) String() string { return protoimpl.X.MessageStringOf(x) } func (*Input) ProtoMessage() {} func (x *Input) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[5] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use Input.ProtoReflect.Descriptor instead. func (*Input) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{5} } func (x *Input) GetArtifactTrustRoot() *v11.TrustedRoot { if x != nil { return x.ArtifactTrustRoot } return nil } func (x *Input) GetArtifactVerificationOptions() *ArtifactVerificationOptions { if x != nil { return x.ArtifactVerificationOptions } return nil } func (x *Input) GetBundle() *v12.Bundle { if x != nil { return x.Bundle } return nil } func (x *Input) GetArtifact() *Artifact { if x != nil { return x.Artifact } return nil } type ArtifactVerificationOptions_TlogOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // Number of transparency logs the entry must appear on. Threshold int32 `protobuf:"varint,1,opt,name=threshold,proto3" json:"threshold,omitempty"` // Perform an online inclusion proof. PerformOnlineVerification bool `protobuf:"varint,2,opt,name=perform_online_verification,json=performOnlineVerification,proto3" json:"perform_online_verification,omitempty"` // Disable verification for transparency logs. Disable bool `protobuf:"varint,3,opt,name=disable,proto3" json:"disable,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ArtifactVerificationOptions_TlogOptions) Reset() { *x = ArtifactVerificationOptions_TlogOptions{} mi := &file_sigstore_verification_proto_msgTypes[6] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ArtifactVerificationOptions_TlogOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactVerificationOptions_TlogOptions) ProtoMessage() {} func (x *ArtifactVerificationOptions_TlogOptions) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[6] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactVerificationOptions_TlogOptions.ProtoReflect.Descriptor instead. func (*ArtifactVerificationOptions_TlogOptions) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{3, 0} } func (x *ArtifactVerificationOptions_TlogOptions) GetThreshold() int32 { if x != nil { return x.Threshold } return 0 } func (x *ArtifactVerificationOptions_TlogOptions) GetPerformOnlineVerification() bool { if x != nil { return x.PerformOnlineVerification } return false } func (x *ArtifactVerificationOptions_TlogOptions) GetDisable() bool { if x != nil { return x.Disable } return false } type ArtifactVerificationOptions_CtlogOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // The number of ct transparency logs the certificate must // appear on. Threshold int32 `protobuf:"varint,1,opt,name=threshold,proto3" json:"threshold,omitempty"` // Disable ct transparency log verification Disable bool `protobuf:"varint,3,opt,name=disable,proto3" json:"disable,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ArtifactVerificationOptions_CtlogOptions) Reset() { *x = ArtifactVerificationOptions_CtlogOptions{} mi := &file_sigstore_verification_proto_msgTypes[7] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ArtifactVerificationOptions_CtlogOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactVerificationOptions_CtlogOptions) ProtoMessage() {} func (x *ArtifactVerificationOptions_CtlogOptions) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[7] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactVerificationOptions_CtlogOptions.ProtoReflect.Descriptor instead. func (*ArtifactVerificationOptions_CtlogOptions) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{3, 1} } func (x *ArtifactVerificationOptions_CtlogOptions) GetThreshold() int32 { if x != nil { return x.Threshold } return 0 } func (x *ArtifactVerificationOptions_CtlogOptions) GetDisable() bool { if x != nil { return x.Disable } return false } type ArtifactVerificationOptions_TimestampAuthorityOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // The number of signed timestamps that are expected. Threshold int32 `protobuf:"varint,1,opt,name=threshold,proto3" json:"threshold,omitempty"` // Disable signed timestamp verification. Disable bool `protobuf:"varint,2,opt,name=disable,proto3" json:"disable,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ArtifactVerificationOptions_TimestampAuthorityOptions) Reset() { *x = ArtifactVerificationOptions_TimestampAuthorityOptions{} mi := &file_sigstore_verification_proto_msgTypes[8] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ArtifactVerificationOptions_TimestampAuthorityOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactVerificationOptions_TimestampAuthorityOptions) ProtoMessage() {} func (x *ArtifactVerificationOptions_TimestampAuthorityOptions) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[8] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactVerificationOptions_TimestampAuthorityOptions.ProtoReflect.Descriptor instead. func (*ArtifactVerificationOptions_TimestampAuthorityOptions) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{3, 2} } func (x *ArtifactVerificationOptions_TimestampAuthorityOptions) GetThreshold() int32 { if x != nil { return x.Threshold } return 0 } func (x *ArtifactVerificationOptions_TimestampAuthorityOptions) GetDisable() bool { if x != nil { return x.Disable } return false } type ArtifactVerificationOptions_TlogIntegratedTimestampOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // The number of integrated timestamps that are expected. Threshold int32 `protobuf:"varint,1,opt,name=threshold,proto3" json:"threshold,omitempty"` // Disable integrated timestamp verification. Disable bool `protobuf:"varint,2,opt,name=disable,proto3" json:"disable,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ArtifactVerificationOptions_TlogIntegratedTimestampOptions) Reset() { *x = ArtifactVerificationOptions_TlogIntegratedTimestampOptions{} mi := &file_sigstore_verification_proto_msgTypes[9] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ArtifactVerificationOptions_TlogIntegratedTimestampOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactVerificationOptions_TlogIntegratedTimestampOptions) ProtoMessage() {} func (x *ArtifactVerificationOptions_TlogIntegratedTimestampOptions) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[9] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactVerificationOptions_TlogIntegratedTimestampOptions.ProtoReflect.Descriptor instead. func (*ArtifactVerificationOptions_TlogIntegratedTimestampOptions) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{3, 3} } func (x *ArtifactVerificationOptions_TlogIntegratedTimestampOptions) GetThreshold() int32 { if x != nil { return x.Threshold } return 0 } func (x *ArtifactVerificationOptions_TlogIntegratedTimestampOptions) GetDisable() bool { if x != nil { return x.Disable } return false } type ArtifactVerificationOptions_ObserverTimestampOptions struct { state protoimpl.MessageState `protogen:"open.v1"` // The number of external observers of the timestamp. // This is a union of RFC3161 signed timestamps, and // integrated timestamps from a transparency log, that // could include additional timestamp sources in the // future. Threshold int32 `protobuf:"varint,1,opt,name=threshold,proto3" json:"threshold,omitempty"` // Disable observer timestamp verification. Disable bool `protobuf:"varint,2,opt,name=disable,proto3" json:"disable,omitempty"` unknownFields protoimpl.UnknownFields sizeCache protoimpl.SizeCache } func (x *ArtifactVerificationOptions_ObserverTimestampOptions) Reset() { *x = ArtifactVerificationOptions_ObserverTimestampOptions{} mi := &file_sigstore_verification_proto_msgTypes[10] ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) ms.StoreMessageInfo(mi) } func (x *ArtifactVerificationOptions_ObserverTimestampOptions) String() string { return protoimpl.X.MessageStringOf(x) } func (*ArtifactVerificationOptions_ObserverTimestampOptions) ProtoMessage() {} func (x *ArtifactVerificationOptions_ObserverTimestampOptions) ProtoReflect() protoreflect.Message { mi := &file_sigstore_verification_proto_msgTypes[10] if x != nil { ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x)) if ms.LoadMessageInfo() == nil { ms.StoreMessageInfo(mi) } return ms } return mi.MessageOf(x) } // Deprecated: Use ArtifactVerificationOptions_ObserverTimestampOptions.ProtoReflect.Descriptor instead. func (*ArtifactVerificationOptions_ObserverTimestampOptions) Descriptor() ([]byte, []int) { return file_sigstore_verification_proto_rawDescGZIP(), []int{3, 4} } func (x *ArtifactVerificationOptions_ObserverTimestampOptions) GetThreshold() int32 { if x != nil { return x.Threshold } return 0 } func (x *ArtifactVerificationOptions_ObserverTimestampOptions) GetDisable() bool { if x != nil { return x.Disable } return false } var File_sigstore_verification_proto protoreflect.FileDescriptor var file_sigstore_verification_proto_rawDesc = string([]byte{ 0x0a, 0x1b, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x1c, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x1a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x18, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x15, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x5f, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x22, 0xb6, 0x01, 0x0a, 0x13, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x12, 0x16, 0x0a, 0x06, 0x69, 0x73, 0x73, 0x75, 0x65, 0x72, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x69, 0x73, 0x73, 0x75, 0x65, 0x72, 0x12, 0x40, 0x0a, 0x03, 0x73, 0x61, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2e, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x53, 0x75, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x41, 0x6c, 0x74, 0x65, 0x72, 0x6e, 0x61, 0x74, 0x69, 0x76, 0x65, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x03, 0x73, 0x61, 0x6e, 0x12, 0x45, 0x0a, 0x04, 0x6f, 0x69, 0x64, 0x73, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x4f, 0x62, 0x6a, 0x65, 0x63, 0x74, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x66, 0x69, 0x65, 0x72, 0x56, 0x61, 0x6c, 0x75, 0x65, 0x50, 0x61, 0x69, 0x72, 0x52, 0x04, 0x6f, 0x69, 0x64, 0x73, 0x22, 0x6a, 0x0a, 0x15, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x51, 0x0a, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x0a, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x22, 0x59, 0x0a, 0x13, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x42, 0x0a, 0x0b, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x21, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x52, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x73, 0x22, 0x9c, 0x0b, 0x0a, 0x1b, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x6c, 0x0a, 0x16, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x5f, 0x69, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x33, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x43, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x48, 0x00, 0x52, 0x15, 0x63, 0x65, 0x72, 0x74, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x65, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x12, 0x54, 0x0a, 0x0b, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x5f, 0x6b, 0x65, 0x79, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x31, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x50, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x49, 0x64, 0x65, 0x6e, 0x74, 0x69, 0x74, 0x69, 0x65, 0x73, 0x48, 0x00, 0x52, 0x0a, 0x70, 0x75, 0x62, 0x6c, 0x69, 0x63, 0x4b, 0x65, 0x79, 0x73, 0x12, 0x6d, 0x0a, 0x0c, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x45, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x54, 0x6c, 0x6f, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x01, 0x52, 0x0b, 0x74, 0x6c, 0x6f, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x70, 0x0a, 0x0d, 0x63, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x46, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x43, 0x74, 0x6c, 0x6f, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x02, 0x52, 0x0c, 0x63, 0x74, 0x6c, 0x6f, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x79, 0x0a, 0x0b, 0x74, 0x73, 0x61, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x05, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x53, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x03, 0x52, 0x0a, 0x74, 0x73, 0x61, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x91, 0x01, 0x0a, 0x15, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x73, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x06, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x58, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x54, 0x6c, 0x6f, 0x67, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x04, 0x52, 0x13, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x54, 0x73, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x12, 0x82, 0x01, 0x0a, 0x10, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x07, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x52, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x2e, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x48, 0x05, 0x52, 0x0f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x88, 0x01, 0x01, 0x1a, 0x85, 0x01, 0x0a, 0x0b, 0x54, 0x6c, 0x6f, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x3e, 0x0a, 0x1b, 0x70, 0x65, 0x72, 0x66, 0x6f, 0x72, 0x6d, 0x5f, 0x6f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x19, 0x70, 0x65, 0x72, 0x66, 0x6f, 0x72, 0x6d, 0x4f, 0x6e, 0x6c, 0x69, 0x6e, 0x65, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x1a, 0x4c, 0x0a, 0x0c, 0x43, 0x74, 0x6c, 0x6f, 0x67, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x1a, 0x53, 0x0a, 0x19, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x41, 0x75, 0x74, 0x68, 0x6f, 0x72, 0x69, 0x74, 0x79, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x1a, 0x58, 0x0a, 0x1e, 0x54, 0x6c, 0x6f, 0x67, 0x49, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x1a, 0x52, 0x0a, 0x18, 0x4f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x1c, 0x0a, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52, 0x09, 0x74, 0x68, 0x72, 0x65, 0x73, 0x68, 0x6f, 0x6c, 0x64, 0x12, 0x18, 0x0a, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x08, 0x52, 0x07, 0x64, 0x69, 0x73, 0x61, 0x62, 0x6c, 0x65, 0x42, 0x09, 0x0a, 0x07, 0x73, 0x69, 0x67, 0x6e, 0x65, 0x72, 0x73, 0x42, 0x0f, 0x0a, 0x0d, 0x5f, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x10, 0x0a, 0x0e, 0x5f, 0x63, 0x74, 0x6c, 0x6f, 0x67, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x0e, 0x0a, 0x0c, 0x5f, 0x74, 0x73, 0x61, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x18, 0x0a, 0x16, 0x5f, 0x69, 0x6e, 0x74, 0x65, 0x67, 0x72, 0x61, 0x74, 0x65, 0x64, 0x5f, 0x74, 0x73, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x13, 0x0a, 0x11, 0x5f, 0x6f, 0x62, 0x73, 0x65, 0x72, 0x76, 0x65, 0x72, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x22, 0xa4, 0x01, 0x0a, 0x08, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x23, 0x0a, 0x0c, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x75, 0x72, 0x69, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x48, 0x00, 0x52, 0x0b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x55, 0x72, 0x69, 0x12, 0x1c, 0x0a, 0x08, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0c, 0x48, 0x00, 0x52, 0x08, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x12, 0x4d, 0x0a, 0x0f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x22, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x63, 0x6f, 0x6d, 0x6d, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x48, 0x61, 0x73, 0x68, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x48, 0x00, 0x52, 0x0e, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x44, 0x69, 0x67, 0x65, 0x73, 0x74, 0x42, 0x06, 0x0a, 0x04, 0x64, 0x61, 0x74, 0x61, 0x22, 0xec, 0x02, 0x0a, 0x05, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x12, 0x56, 0x0a, 0x13, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x74, 0x72, 0x75, 0x73, 0x74, 0x5f, 0x72, 0x6f, 0x6f, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x74, 0x72, 0x75, 0x73, 0x74, 0x72, 0x6f, 0x6f, 0x74, 0x2e, 0x76, 0x31, 0x2e, 0x54, 0x72, 0x75, 0x73, 0x74, 0x65, 0x64, 0x52, 0x6f, 0x6f, 0x74, 0x52, 0x11, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x54, 0x72, 0x75, 0x73, 0x74, 0x52, 0x6f, 0x6f, 0x74, 0x12, 0x7d, 0x0a, 0x1d, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x5f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x5f, 0x6f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x39, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x52, 0x1b, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x4f, 0x70, 0x74, 0x69, 0x6f, 0x6e, 0x73, 0x12, 0x36, 0x0a, 0x06, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x18, 0x03, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1e, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x2e, 0x76, 0x31, 0x2e, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x52, 0x06, 0x62, 0x75, 0x6e, 0x64, 0x6c, 0x65, 0x12, 0x47, 0x0a, 0x08, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x18, 0x04, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x2e, 0x41, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x48, 0x00, 0x52, 0x08, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x88, 0x01, 0x01, 0x42, 0x0b, 0x0a, 0x09, 0x5f, 0x61, 0x72, 0x74, 0x69, 0x66, 0x61, 0x63, 0x74, 0x42, 0x94, 0x01, 0x0a, 0x22, 0x64, 0x65, 0x76, 0x2e, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x76, 0x31, 0x42, 0x11, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x50, 0x72, 0x6f, 0x74, 0x6f, 0x50, 0x01, 0x5a, 0x3c, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f, 0x73, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2d, 0x73, 0x70, 0x65, 0x63, 0x73, 0x2f, 0x67, 0x65, 0x6e, 0x2f, 0x70, 0x62, 0x2d, 0x67, 0x6f, 0x2f, 0x76, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2f, 0x76, 0x31, 0xea, 0x02, 0x1a, 0x53, 0x69, 0x67, 0x73, 0x74, 0x6f, 0x72, 0x65, 0x3a, 0x3a, 0x56, 0x65, 0x72, 0x69, 0x66, 0x69, 0x63, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x3a, 0x3a, 0x56, 0x31, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33, }) var ( file_sigstore_verification_proto_rawDescOnce sync.Once file_sigstore_verification_proto_rawDescData []byte ) func file_sigstore_verification_proto_rawDescGZIP() []byte { file_sigstore_verification_proto_rawDescOnce.Do(func() { file_sigstore_verification_proto_rawDescData = protoimpl.X.CompressGZIP(unsafe.Slice(unsafe.StringData(file_sigstore_verification_proto_rawDesc), len(file_sigstore_verification_proto_rawDesc))) }) return file_sigstore_verification_proto_rawDescData } var file_sigstore_verification_proto_msgTypes = make([]protoimpl.MessageInfo, 11) var file_sigstore_verification_proto_goTypes = []any{ (*CertificateIdentity)(nil), // 0: dev.sigstore.verification.v1.CertificateIdentity (*CertificateIdentities)(nil), // 1: dev.sigstore.verification.v1.CertificateIdentities (*PublicKeyIdentities)(nil), // 2: dev.sigstore.verification.v1.PublicKeyIdentities (*ArtifactVerificationOptions)(nil), // 3: dev.sigstore.verification.v1.ArtifactVerificationOptions (*Artifact)(nil), // 4: dev.sigstore.verification.v1.Artifact (*Input)(nil), // 5: dev.sigstore.verification.v1.Input (*ArtifactVerificationOptions_TlogOptions)(nil), // 6: dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions (*ArtifactVerificationOptions_CtlogOptions)(nil), // 7: dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions (*ArtifactVerificationOptions_TimestampAuthorityOptions)(nil), // 8: dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions (*ArtifactVerificationOptions_TlogIntegratedTimestampOptions)(nil), // 9: dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions (*ArtifactVerificationOptions_ObserverTimestampOptions)(nil), // 10: dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions (*v1.SubjectAlternativeName)(nil), // 11: dev.sigstore.common.v1.SubjectAlternativeName (*v1.ObjectIdentifierValuePair)(nil), // 12: dev.sigstore.common.v1.ObjectIdentifierValuePair (*v1.PublicKey)(nil), // 13: dev.sigstore.common.v1.PublicKey (*v1.HashOutput)(nil), // 14: dev.sigstore.common.v1.HashOutput (*v11.TrustedRoot)(nil), // 15: dev.sigstore.trustroot.v1.TrustedRoot (*v12.Bundle)(nil), // 16: dev.sigstore.bundle.v1.Bundle } var file_sigstore_verification_proto_depIdxs = []int32{ 11, // 0: dev.sigstore.verification.v1.CertificateIdentity.san:type_name -> dev.sigstore.common.v1.SubjectAlternativeName 12, // 1: dev.sigstore.verification.v1.CertificateIdentity.oids:type_name -> dev.sigstore.common.v1.ObjectIdentifierValuePair 0, // 2: dev.sigstore.verification.v1.CertificateIdentities.identities:type_name -> dev.sigstore.verification.v1.CertificateIdentity 13, // 3: dev.sigstore.verification.v1.PublicKeyIdentities.public_keys:type_name -> dev.sigstore.common.v1.PublicKey 1, // 4: dev.sigstore.verification.v1.ArtifactVerificationOptions.certificate_identities:type_name -> dev.sigstore.verification.v1.CertificateIdentities 2, // 5: dev.sigstore.verification.v1.ArtifactVerificationOptions.public_keys:type_name -> dev.sigstore.verification.v1.PublicKeyIdentities 6, // 6: dev.sigstore.verification.v1.ArtifactVerificationOptions.tlog_options:type_name -> dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions 7, // 7: dev.sigstore.verification.v1.ArtifactVerificationOptions.ctlog_options:type_name -> dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions 8, // 8: dev.sigstore.verification.v1.ArtifactVerificationOptions.tsa_options:type_name -> dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions 9, // 9: dev.sigstore.verification.v1.ArtifactVerificationOptions.integrated_ts_options:type_name -> dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions 10, // 10: dev.sigstore.verification.v1.ArtifactVerificationOptions.observer_options:type_name -> dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions 14, // 11: dev.sigstore.verification.v1.Artifact.artifact_digest:type_name -> dev.sigstore.common.v1.HashOutput 15, // 12: dev.sigstore.verification.v1.Input.artifact_trust_root:type_name -> dev.sigstore.trustroot.v1.TrustedRoot 3, // 13: dev.sigstore.verification.v1.Input.artifact_verification_options:type_name -> dev.sigstore.verification.v1.ArtifactVerificationOptions 16, // 14: dev.sigstore.verification.v1.Input.bundle:type_name -> dev.sigstore.bundle.v1.Bundle 4, // 15: dev.sigstore.verification.v1.Input.artifact:type_name -> dev.sigstore.verification.v1.Artifact 16, // [16:16] is the sub-list for method output_type 16, // [16:16] is the sub-list for method input_type 16, // [16:16] is the sub-list for extension type_name 16, // [16:16] is the sub-list for extension extendee 0, // [0:16] is the sub-list for field type_name } func init() { file_sigstore_verification_proto_init() } func file_sigstore_verification_proto_init() { if File_sigstore_verification_proto != nil { return } file_sigstore_verification_proto_msgTypes[3].OneofWrappers = []any{ (*ArtifactVerificationOptions_CertificateIdentities)(nil), (*ArtifactVerificationOptions_PublicKeys)(nil), } file_sigstore_verification_proto_msgTypes[4].OneofWrappers = []any{ (*Artifact_ArtifactUri)(nil), (*Artifact_Artifact)(nil), (*Artifact_ArtifactDigest)(nil), } file_sigstore_verification_proto_msgTypes[5].OneofWrappers = []any{} type x struct{} out := protoimpl.TypeBuilder{ File: protoimpl.DescBuilder{ GoPackagePath: reflect.TypeOf(x{}).PkgPath(), RawDescriptor: unsafe.Slice(unsafe.StringData(file_sigstore_verification_proto_rawDesc), len(file_sigstore_verification_proto_rawDesc)), NumEnums: 0, NumMessages: 11, NumExtensions: 0, NumServices: 0, }, GoTypes: file_sigstore_verification_proto_goTypes, DependencyIndexes: file_sigstore_verification_proto_depIdxs, MessageInfos: file_sigstore_verification_proto_msgTypes, }.Build() File_sigstore_verification_proto = out.File file_sigstore_verification_proto_goTypes = nil file_sigstore_verification_proto_depIdxs = nil } protobuf-specs-0.4.1/gen/pb-python/000077500000000000000000000000001477352757300172045ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/LICENSE000066400000000000000000000261361477352757300202210ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. protobuf-specs-0.4.1/gen/pb-python/Makefile000066400000000000000000000005421477352757300206450ustar00rootroot00000000000000.PHONY: all all: @echo "Run my targets individually!" env/pyvenv.cfg: pyproject.toml # Create our Python 3 virtual environment rm -rf env python3 -m venv env ./env/bin/python -m pip install --upgrade pip ./env/bin/python -m pip install -e .[dev] .PHONY: dev dev: env/pyvenv.cfg .PHONY: package package: env/pyvenv.cfg ./env/bin/python -m build protobuf-specs-0.4.1/gen/pb-python/README.md000066400000000000000000000003301477352757300204570ustar00rootroot00000000000000sigstore-protobuf-specs ======================= These are the Python language bindings for Sigstore's protobuf specs. See the [repository's README](https://github.com/sigstore/protobuf-specs) for more information. protobuf-specs-0.4.1/gen/pb-python/pyproject.toml000066400000000000000000000022751477352757300221260ustar00rootroot00000000000000[build-system] requires = ["flit_core >=3.2,<4"] build-backend = "flit_core.buildapi" [project] name = "sigstore-protobuf-specs" version = "0.4.0" description = "A library for serializing and deserializing Sigstore messages" readme = "README.md" license = { file = "LICENSE" } authors = [ { name = "Sigstore Authors", email = "sigstore-dev@googlegroups.com" }, ] classifiers = [ "License :: OSI Approved :: Apache Software License", "Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Development Status :: 4 - Beta", "Intended Audience :: Developers", "Topic :: Security", "Topic :: Security :: Cryptography", ] dependencies = ["betterproto==2.0.0b7", "pydantic >= 2, < 3"] requires-python = ">=3.8" [project.urls] Homepage = "https://pypi.org/project/sigstore-protobuf-specs/" Issues = "https://github.com/sigstore/protobuf-specs/issues" Source = "https://github.com/sigstore/protobuf-specs" [project.optional-dependencies] dev = ["build"] protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/000077500000000000000000000000001477352757300241605ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/__init__.py000066400000000000000000000000001477352757300262570ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/000077500000000000000000000000001477352757300247365ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/__init__.py000066400000000000000000000000001477352757300270350ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/000077500000000000000000000000001477352757300265755ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/__init__.py000066400000000000000000000000001477352757300306740ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/bundle/000077500000000000000000000000001477352757300300465ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/bundle/__init__.py000066400000000000000000000000001477352757300321450ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/bundle/v1/000077500000000000000000000000001477352757300303745ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/bundle/v1/__init__.py000066400000000000000000000133321477352757300325070ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: sigstore_bundle.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from typing import ( List, Optional, ) import betterproto from pydantic import model_validator from pydantic.dataclasses import rebuild_dataclass from .....io import intoto as ____io_intoto__ from ...common import v1 as __common_v1__ from ...rekor import v1 as __rekor_v1__ @dataclass(eq=False, repr=False) class TimestampVerificationData(betterproto.Message): """ Various timestamped counter signatures over the artifacts signature. Currently only RFC3161 signatures are provided. More formats may be added in the future. """ rfc3161_timestamps: List["__common_v1__.Rfc3161SignedTimestamp"] = ( betterproto.message_field(1) ) """ A list of RFC3161 signed timestamps provided by the user. This can be used when the entry has not been stored on a transparency log, or in conjunction for a stronger trust model. Clients MUST verify the hashed message in the message imprint against the signature in the bundle. """ @dataclass(eq=False, repr=False) class VerificationMaterial(betterproto.Message): """ VerificationMaterial captures details on the materials used to verify signatures. This message may be embedded in a DSSE envelope as a signature extension. Specifically, the `ext` field of the extension will expect this message when the signature extension is for Sigstore. This is identified by the `kind` field in the extension, which must be set to application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. When used as a DSSE extension, if the `public_key` field is used to indicate the key identifier, it MUST match the `keyid` field of the signature the extension is attached to. """ public_key: Optional["__common_v1__.PublicKeyIdentifier"] = ( betterproto.message_field(1, optional=True, group="content") ) x509_certificate_chain: Optional["__common_v1__.X509CertificateChain"] = ( betterproto.message_field(2, optional=True, group="content") ) certificate: Optional["__common_v1__.X509Certificate"] = betterproto.message_field( 5, optional=True, group="content" ) tlog_entries: List["__rekor_v1__.TransparencyLogEntry"] = betterproto.message_field( 3 ) """ An inclusion proof and an optional signed timestamp from the log. Client verification libraries MAY provide an option to support v0.1 bundles for backwards compatibility, which may contain an inclusion promise and not an inclusion proof. In this case, the client MUST validate the promise. Verifiers SHOULD NOT allow v0.1 bundles if they're used in an ecosystem which never produced them. """ timestamp_verification_data: "TimestampVerificationData" = ( betterproto.message_field(4) ) """ Timestamp may also come from tlog_entries.inclusion_promise.signed_entry_timestamp. """ @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) @dataclass(eq=False, repr=False) class Bundle(betterproto.Message): media_type: str = betterproto.string_field(1) """ MUST be application/vnd.dev.sigstore.bundle.v0.3+json when when encoded as JSON. Clients must to be able to accept media type using the previously defined formats: * application/vnd.dev.sigstore.bundle+json;version=0.1 * application/vnd.dev.sigstore.bundle+json;version=0.2 * application/vnd.dev.sigstore.bundle+json;version=0.3 """ verification_material: "VerificationMaterial" = betterproto.message_field(2) """ When a signer is identified by a X.509 certificate, a verifier MUST verify that the signature was computed at the time the certificate was valid as described in the Sigstore client spec: "Verification using a Bundle". If the verification material contains a public key identifier (key hint) and the `content` is a DSSE envelope, the key hints MUST be exactly the same in the verification material and in the DSSE envelope. """ message_signature: Optional["__common_v1__.MessageSignature"] = ( betterproto.message_field(3, optional=True, group="content") ) dsse_envelope: Optional["____io_intoto__.Envelope"] = betterproto.message_field( 4, optional=True, group="content" ) """ A DSSE envelope can contain arbitrary payloads. Verifiers must verify that the payload type is a supported and expected type. This is part of the DSSE protocol which is defined here: DSSE envelopes in a bundle MUST have exactly one signature. This is a limitation from the DSSE spec, as it can contain multiple signatures. There are two primary reasons: 1. It simplifies the verification logic and policy 2. The bundle (currently) can only contain a single instance of the required verification materials During verification a client MUST reject an envelope if the number of signatures is not equal to one. """ @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) rebuild_dataclass(TimestampVerificationData) # type: ignore rebuild_dataclass(VerificationMaterial) # type: ignore rebuild_dataclass(Bundle) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/common/000077500000000000000000000000001477352757300300655ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/common/__init__.py000066400000000000000000000000001477352757300321640ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/common/v1/000077500000000000000000000000001477352757300304135ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/common/v1/__init__.py000066400000000000000000000241421477352757300325270ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: sigstore_common.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from datetime import datetime from typing import ( List, Optional, ) import betterproto from pydantic import model_validator from pydantic.dataclasses import rebuild_dataclass class HashAlgorithm(betterproto.Enum): """ Only a subset of the secure hash standard algorithms are supported. See for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'. """ UNSPECIFIED = 0 SHA2_256 = 1 SHA2_384 = 2 SHA2_512 = 3 SHA3_256 = 4 SHA3_384 = 5 @classmethod def __get_pydantic_core_schema__(cls, _source_type, _handler): from pydantic_core import core_schema return core_schema.int_schema(ge=0) class PublicKeyDetails(betterproto.Enum): """ Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm). """ UNSPECIFIED = 0 PKCS1_RSA_PKCS1V5 = 1 """RSA""" PKCS1_RSA_PSS = 2 PKIX_RSA_PKCS1V5 = 3 PKIX_RSA_PSS = 4 PKIX_RSA_PKCS1V15_2048_SHA256 = 9 """RSA public key in PKIX format, PKCS#1v1.5 signature""" PKIX_RSA_PKCS1V15_3072_SHA256 = 10 PKIX_RSA_PKCS1V15_4096_SHA256 = 11 PKIX_RSA_PSS_2048_SHA256 = 16 """RSA public key in PKIX format, RSASSA-PSS signature""" PKIX_RSA_PSS_3072_SHA256 = 17 PKIX_RSA_PSS_4096_SHA256 = 18 PKIX_ECDSA_P256_HMAC_SHA_256 = 6 """ECDSA""" PKIX_ECDSA_P256_SHA_256 = 5 PKIX_ECDSA_P384_SHA_384 = 12 PKIX_ECDSA_P521_SHA_512 = 13 PKIX_ED25519 = 7 """Ed 25519""" PKIX_ED25519_PH = 8 PKIX_ECDSA_P384_SHA_256 = 19 """ These algorithms are deprecated and should not be used, but they were/are being used by most Sigstore clients implementations. """ PKIX_ECDSA_P521_SHA_256 = 20 LMS_SHA256 = 14 """ LMS and LM-OTS These keys and signatures may be used by private Sigstore deployments, but are not currently supported by the public good instance. USER WARNING: LMS and LM-OTS are both stateful signature schemes. Using them correctly requires discretion and careful consideration to ensure that individual secret keys are not used more than once. In addition, LM-OTS is a single-use scheme, meaning that it MUST NOT be used for more than one signature per LM-OTS key. If you cannot maintain these invariants, you MUST NOT use these schemes. """ LMOTS_SHA256 = 15 @classmethod def __get_pydantic_core_schema__(cls, _source_type, _handler): from pydantic_core import core_schema return core_schema.int_schema(ge=0) class SubjectAlternativeNameType(betterproto.Enum): UNSPECIFIED = 0 EMAIL = 1 URI = 2 OTHER_NAME = 3 """ OID 1.3.6.1.4.1.57264.1.7 See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san for more details. """ @classmethod def __get_pydantic_core_schema__(cls, _source_type, _handler): from pydantic_core import core_schema return core_schema.int_schema(ge=0) @dataclass(eq=False, repr=False) class HashOutput(betterproto.Message): """ HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used. """ algorithm: "HashAlgorithm" = betterproto.enum_field(1) digest: bytes = betterproto.bytes_field(2) """ This is the raw octets of the message digest as computed by the hash algorithm. """ @dataclass(eq=False, repr=False) class MessageSignature(betterproto.Message): """MessageSignature stores the computed signature over a message.""" message_digest: "HashOutput" = betterproto.message_field(1) """ Message digest can be used to identify the artifact. Clients MUST NOT attempt to use this digest to verify the associated signature; it is intended solely for identification. """ signature: bytes = betterproto.bytes_field(2) """ The raw bytes as returned from the signature algorithm. The signature algorithm (and so the format of the signature bytes) are determined by the contents of the 'verification_material', either a key-pair or a certificate. If using a certificate, the certificate contains the required information on the signature algorithm. When using a key pair, the algorithm MUST be part of the public key, which MUST be communicated out-of-band. """ @dataclass(eq=False, repr=False) class LogId(betterproto.Message): """LogId captures the identity of a transparency log.""" key_id: bytes = betterproto.bytes_field(1) """The unique identity of the log, represented by its public key.""" @dataclass(eq=False, repr=False) class Rfc3161SignedTimestamp(betterproto.Message): """This message holds a RFC 3161 timestamp.""" signed_timestamp: bytes = betterproto.bytes_field(1) """ Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2 """ @dataclass(eq=False, repr=False) class PublicKey(betterproto.Message): raw_bytes: Optional[bytes] = betterproto.bytes_field(1, optional=True) """ DER-encoded public key, encoding method is specified by the key_details attribute. """ key_details: "PublicKeyDetails" = betterproto.enum_field(2) """Key encoding and signature algorithm to use for this key.""" valid_for: Optional["TimeRange"] = betterproto.message_field(3, optional=True) """Optional validity period for this key, *inclusive* of the endpoints.""" @dataclass(eq=False, repr=False) class PublicKeyIdentifier(betterproto.Message): """ PublicKeyIdentifier can be used to identify an (out of band) delivered key, to verify a signature. """ hint: str = betterproto.string_field(1) """ Optional unauthenticated hint on which key to use. The format of the hint must be agreed upon out of band by the signer and the verifiers, and so is not subject to this specification. Example use-case is to specify the public key to use, from a trusted key-ring. Implementors are RECOMMENDED to derive the value from the public key as described in RFC 6962. See: """ @dataclass(eq=False, repr=False) class ObjectIdentifier(betterproto.Message): """An ASN.1 OBJECT IDENTIFIER""" id: List[int] = betterproto.int32_field(1) @dataclass(eq=False, repr=False) class ObjectIdentifierValuePair(betterproto.Message): """An OID and the corresponding (byte) value.""" oid: "ObjectIdentifier" = betterproto.message_field(1) value: bytes = betterproto.bytes_field(2) @dataclass(eq=False, repr=False) class DistinguishedName(betterproto.Message): organization: str = betterproto.string_field(1) common_name: str = betterproto.string_field(2) @dataclass(eq=False, repr=False) class X509Certificate(betterproto.Message): raw_bytes: bytes = betterproto.bytes_field(1) """DER-encoded X.509 certificate.""" @dataclass(eq=False, repr=False) class SubjectAlternativeName(betterproto.Message): type: "SubjectAlternativeNameType" = betterproto.enum_field(1) regexp: Optional[str] = betterproto.string_field(2, optional=True, group="identity") """ A regular expression describing the expected value for the SAN. """ value: Optional[str] = betterproto.string_field(3, optional=True, group="identity") """The exact value to match against.""" @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) @dataclass(eq=False, repr=False) class X509CertificateChain(betterproto.Message): """ A collection of X.509 certificates. This "chain" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building. """ certificates: List["X509Certificate"] = betterproto.message_field(1) """ One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order. """ @dataclass(eq=False, repr=False) class TimeRange(betterproto.Message): """ The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end. """ start: datetime = betterproto.message_field(1) end: Optional[datetime] = betterproto.message_field(2, optional=True) rebuild_dataclass(HashOutput) # type: ignore rebuild_dataclass(MessageSignature) # type: ignore rebuild_dataclass(PublicKey) # type: ignore rebuild_dataclass(ObjectIdentifierValuePair) # type: ignore rebuild_dataclass(SubjectAlternativeName) # type: ignore rebuild_dataclass(X509CertificateChain) # type: ignore rebuild_dataclass(TimeRange) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/events/000077500000000000000000000000001477352757300301015ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/events/__init__.py000066400000000000000000000000001477352757300322000ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/events/v1/000077500000000000000000000000001477352757300304275ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/events/v1/__init__.py000066400000000000000000000052041477352757300325410ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: events.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from datetime import datetime from typing import ( Dict, List, Optional, ) import betterproto import betterproto.lib.pydantic.google.protobuf as betterproto_lib_pydantic_google_protobuf from pydantic import model_validator from pydantic.dataclasses import rebuild_dataclass @dataclass(eq=False, repr=False) class CloudEvent(betterproto.Message): id: str = betterproto.string_field(1) """Required Attributes""" source: str = betterproto.string_field(2) spec_version: str = betterproto.string_field(3) type: str = betterproto.string_field(4) attributes: Dict[str, "CloudEventCloudEventAttributeValue"] = betterproto.map_field( 5, betterproto.TYPE_STRING, betterproto.TYPE_MESSAGE ) """Optional & Extension Attributes""" binary_data: Optional[bytes] = betterproto.bytes_field( 6, optional=True, group="data" ) text_data: Optional[str] = betterproto.string_field(7, optional=True, group="data") proto_data: Optional["betterproto_lib_pydantic_google_protobuf.Any"] = ( betterproto.message_field(8, optional=True, group="data") ) @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) @dataclass(eq=False, repr=False) class CloudEventCloudEventAttributeValue(betterproto.Message): ce_boolean: Optional[bool] = betterproto.bool_field(1, optional=True, group="attr") ce_integer: Optional[int] = betterproto.int32_field(2, optional=True, group="attr") ce_string: Optional[str] = betterproto.string_field(3, optional=True, group="attr") ce_bytes: Optional[bytes] = betterproto.bytes_field(4, optional=True, group="attr") ce_uri: Optional[str] = betterproto.string_field(5, optional=True, group="attr") ce_uri_ref: Optional[str] = betterproto.string_field(6, optional=True, group="attr") ce_timestamp: Optional[datetime] = betterproto.message_field( 7, optional=True, group="attr" ) @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) @dataclass(eq=False, repr=False) class CloudEventBatch(betterproto.Message): events: List["CloudEvent"] = betterproto.message_field(1) rebuild_dataclass(CloudEvent) # type: ignore rebuild_dataclass(CloudEventCloudEventAttributeValue) # type: ignore rebuild_dataclass(CloudEventBatch) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/rekor/000077500000000000000000000000001477352757300277175ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/rekor/__init__.py000066400000000000000000000000001477352757300320160ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/rekor/v1/000077500000000000000000000000001477352757300302455ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/rekor/v1/__init__.py000066400000000000000000000160341477352757300323620ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: sigstore_rekor.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from typing import List import betterproto from pydantic.dataclasses import rebuild_dataclass from ...common import v1 as __common_v1__ @dataclass(eq=False, repr=False) class KindVersion(betterproto.Message): """KindVersion contains the entry's kind and api version.""" kind: str = betterproto.string_field(1) """ Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types """ version: str = betterproto.string_field(2) """The specific api version of the type.""" @dataclass(eq=False, repr=False) class Checkpoint(betterproto.Message): """ The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go """ envelope: str = betterproto.string_field(1) @dataclass(eq=False, repr=False) class InclusionProof(betterproto.Message): """ InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log. """ log_index: int = betterproto.int64_field(1) """The index of the entry in the tree it was written to.""" root_hash: bytes = betterproto.bytes_field(2) """ The hash digest stored at the root of the merkle tree at the time the proof was generated. """ tree_size: int = betterproto.int64_field(3) """The size of the merkle tree at the time the proof was generated.""" hashes: List[bytes] = betterproto.bytes_field(4) """ A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client. """ checkpoint: "Checkpoint" = betterproto.message_field(5) """ Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details. """ @dataclass(eq=False, repr=False) class InclusionPromise(betterproto.Message): """ The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry. """ signed_entry_timestamp: bytes = betterproto.bytes_field(1) @dataclass(eq=False, repr=False) class TransparencyLogEntry(betterproto.Message): """ TransparencyLogEntry captures all the details required from Rekor to reconstruct an entry, given that the payload is provided via other means. This type can easily be created from the existing response from Rekor. Future iterations could rely on Rekor returning the minimal set of attributes (excluding the payload) that are required for verifying the inclusion promise. The inclusion promise (called SignedEntryTimestamp in the response from Rekor) is similar to a Signed Certificate Timestamp as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2. """ log_index: int = betterproto.int64_field(1) """The global index of the entry, used when querying the log by index.""" log_id: "__common_v1__.LogId" = betterproto.message_field(2) """The unique identifier of the log.""" kind_version: "KindVersion" = betterproto.message_field(3) """ The kind (type) and version of the object associated with this entry. These values are required to construct the entry during verification. """ integrated_time: int = betterproto.int64_field(4) """ The UNIX timestamp from the log when the entry was persisted. The integration time MUST NOT be trusted if inclusion_promise is omitted. """ inclusion_promise: "InclusionPromise" = betterproto.message_field(5) """ The inclusion promise/signed entry timestamp from the log. Required for v0.1 bundles, and MUST be verified. Optional for >= v0.2 bundles if another suitable source of time is present (such as another source of signed time, or the current system time for long-lived certificates). MUST be verified if no other suitable source of time is present, and SHOULD be verified otherwise. """ inclusion_proof: "InclusionProof" = betterproto.message_field(6) """ The inclusion proof can be used for offline or online verification that the entry was appended to the log, and that the log has not been altered. """ canonicalized_body: bytes = betterproto.bytes_field(7) """ Optional. The canonicalized transparency log entry, used to reconstruct the Signed Entry Timestamp (SET) during verification. The contents of this field are the same as the `body` field in a Rekor response, meaning that it does **not** include the "full" canonicalized form (of log index, ID, etc.) which are exposed as separate fields. The verifier is responsible for combining the `canonicalized_body`, `log_index`, `log_id`, and `integrated_time` into the payload that the SET's signature is generated over. This field is intended to be used in cases where the SET cannot be produced determinisitically (e.g. inconsistent JSON field ordering, differing whitespace, etc). If set, clients MUST verify that the signature referenced in the `canonicalized_body` matches the signature provided in the `Bundle.content`. If not set, clients are responsible for constructing an equivalent payload from other sources to verify the signature. """ rebuild_dataclass(InclusionProof) # type: ignore rebuild_dataclass(TransparencyLogEntry) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/trustroot/000077500000000000000000000000001477352757300306625ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/trustroot/__init__.py000066400000000000000000000000001477352757300327610ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/trustroot/v1/000077500000000000000000000000001477352757300312105ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/trustroot/v1/__init__.py000066400000000000000000000353771477352757300333400ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: sigstore_trustroot.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from typing import List import betterproto from pydantic.dataclasses import rebuild_dataclass from ...common import v1 as __common_v1__ class ServiceSelector(betterproto.Enum): """ ServiceSelector specifies how a client SHOULD select a set of Services to connect to. A client SHOULD throw an error if the value is SERVICE_SELECTOR_UNDEFINED. """ UNDEFINED = 0 ALL = 1 """ Clients SHOULD select all Services based on supported API version and validity window. """ ANY = 2 """ Clients SHOULD select one Service based on supported API version and validity window. It is up to the client implementation to decide how to select the Service, e.g. random or round-robin. """ EXACT = 3 """ Clients SHOULD select a specific number of Services based on supported API version and validity window, using the provided `count`. It is up to the client implementation to decide how to select the Service, e.g. random or round-robin. """ @classmethod def __get_pydantic_core_schema__(cls, _source_type, _handler): from pydantic_core import core_schema return core_schema.int_schema(ge=0) @dataclass(eq=False, repr=False) class TransparencyLogInstance(betterproto.Message): """ TransparencyLogInstance describes the immutable parameters from a transparency log. See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters for more details. The included parameters are the minimal set required to identify a log, and verify an inclusion proof/promise. """ base_url: str = betterproto.string_field(1) """The base URL at which can be used to URLs for the client.""" hash_algorithm: "__common_v1__.HashAlgorithm" = betterproto.enum_field(2) """The hash algorithm used for the Merkle Tree.""" public_key: "__common_v1__.PublicKey" = betterproto.message_field(3) """ The public key used to verify signatures generated by the log. This attribute contains the signature algorithm used by the log. """ log_id: "__common_v1__.LogId" = betterproto.message_field(4) """ The unique identifier for this transparency log. Represented as the SHA-256 hash of the log's public key, calculated over the DER encoding of the key represented as SubjectPublicKeyInfo. See https://www.rfc-editor.org/rfc/rfc6962#section-3.2 """ checkpoint_key_id: "__common_v1__.LogId" = betterproto.message_field(5) """ The checkpoint key identifier for the log used in a checkpoint. Optional, not provided for logs that do not generate checkpoints. For logs that do generate checkpoints, if not set, assume log_id equals checkpoint_key_id. Follows the specification described here for ECDSA and Ed25519 signatures: https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures For RSA signatures, the key ID will match the ECDSA format, the hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT use RSA-signed checkpoints, since witnesses do not support RSA signatures. This is provided for convenience. Clients can also calculate the checkpoint key ID given the log's public key. SHOULD be set for logs generating Ed25519 signatures. SHOULD be 4 bytes long, as a truncated hash. """ @dataclass(eq=False, repr=False) class CertificateAuthority(betterproto.Message): """ CertificateAuthority enlists the information required to identify which CA to use and perform signature verification. """ subject: "__common_v1__.DistinguishedName" = betterproto.message_field(1) """ The root certificate MUST be self-signed, and so the subject and issuer are the same. """ uri: str = betterproto.string_field(2) """ The URI identifies the certificate authority. It is RECOMMENDED that the URI is the base URL for the certificate authority, that can be provided to any SDK/client provided by the certificate authority to interact with the certificate authority. """ cert_chain: "__common_v1__.X509CertificateChain" = betterproto.message_field(3) """ The certificate chain for this CA. The last certificate in the chain MUST be the trust anchor. The trust anchor MAY be a self-signed root CA certificate or MAY be an intermediate CA certificate. """ valid_for: "__common_v1__.TimeRange" = betterproto.message_field(4) """ The time the *entire* chain was valid. This is at max the longest interval when *all* certificates in the chain were valid, but it MAY be shorter. Clients MUST check timestamps against *both* the `valid_for` time range *and* the entire certificate chain. The TimeRange should be considered valid *inclusive* of the endpoints. """ @dataclass(eq=False, repr=False) class TrustedRoot(betterproto.Message): """ TrustedRoot describes the client's complete set of trusted entities. How the TrustedRoot is populated is not specified, but can be a combination of many sources such as TUF repositories, files on disk etc. The TrustedRoot is not meant to be used for any artifact verification, only to capture the complete/global set of trusted verification materials. When verifying an artifact, based on the artifact and policies, a selection of keys/authorities are expected to be extracted and provided to the verification function. This way the set of keys/authorities can be kept to a minimal set by the policy to gain better control over what signatures that are allowed. The embedded transparency logs, CT logs, CAs and TSAs MUST include any previously used instance -- otherwise signatures made in the past cannot be verified. All the listed instances SHOULD be sorted by the 'valid_for' in ascending order, that is, the oldest instance first. Only the last instance is allowed to have their 'end' timestamp unset. All previous instances MUST have a closed interval of validity. The last instance MAY have a closed interval. Clients MUST accept instances that overlaps in time, if not clients may experience problems during rotations of verification materials. To be able to manage planned rotations of either transparency logs or certificate authorities, clienst MUST accept lists of instances where the last instance have a 'valid_for' that belongs to the future. This should not be a problem as clients SHOULD first seek the trust root for a suitable instance before creating a per artifact trust root (that is, a sub-set of the complete trust root) that is used for verification. """ media_type: str = betterproto.string_field(1) """ MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json when encoded as JSON. Clients MUST be able to process and parse content with the media type defined in the old format: application/vnd.dev.sigstore.trustedroot+json;version=0.1 """ tlogs: List["TransparencyLogInstance"] = betterproto.message_field(2) """A set of trusted Rekor servers.""" certificate_authorities: List["CertificateAuthority"] = betterproto.message_field(3) """ A set of trusted certificate authorities (e.g Fulcio), and any intermediate certificates they provide. If a CA is issuing multiple intermediate certificate, each combination shall be represented as separate chain. I.e, a single root cert may appear in multiple chains but with different intermediate and/or leaf certificates. The certificates are intended to be used for verifying artifact signatures. """ ctlogs: List["TransparencyLogInstance"] = betterproto.message_field(4) """A set of trusted certificate transparency logs.""" timestamp_authorities: List["CertificateAuthority"] = betterproto.message_field(5) """A set of trusted timestamping authorities.""" @dataclass(eq=False, repr=False) class SigningConfig(betterproto.Message): """ SigningConfig represents the trusted entities/state needed by Sigstore signing. In particular, it primarily contains service URLs that a Sigstore signer may need to connect to for the online aspects of signing. """ media_type: str = betterproto.string_field(5) """ MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json Clients MAY choose to also support application/vnd.dev.sigstore.signingconfig.v0.1+json """ ca_urls: List["Service"] = betterproto.message_field(6) """ URLs to Fulcio-compatible CAs, capable of receiving Certificate Signing Requests (CSRs) and responding with issued certificates. These URLs MUST be the "base" URL for the CAs, which clients should construct an appropriate CSR endpoint on top of. For example, if a CA URL is `https://example.com/ca`, then the client MAY construct the CSR endpoint as `https://example.com/ca/api/v2/signingCert`. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. """ oidc_urls: List["Service"] = betterproto.message_field(7) """ URLs to OpenID Connect identity providers. These URLs MUST be the "base" URLs for the OIDC IdPs, which clients should perform well-known OpenID Connect discovery against. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. """ rekor_tlog_urls: List["Service"] = betterproto.message_field(8) """ URLs to Rekor transparency logs. These URL MUST be the "base" URLs for the transparency logs, which clients should construct appropriate API endpoints on top of. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `rekor_tlog_config`. """ rekor_tlog_config: "ServiceConfiguration" = betterproto.message_field(9) """ Specifies how a client should select the set of Rekor transparency logs to write to. """ tsa_urls: List["Service"] = betterproto.message_field(10) """ URLs to RFC 3161 Time Stamping Authorities (TSA). These URLs MUST be the *full* URL for the TSA, meaning that it should be suitable for submitting Time Stamp Requests (TSRs) to via HTTP, per RFC 3161. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `tsa_config`. """ tsa_config: "ServiceConfiguration" = betterproto.message_field(11) """ Specifies how a client should select the set of TSAs to request signed timestamps from. """ @dataclass(eq=False, repr=False) class Service(betterproto.Message): """ Service represents an instance of a service that is a part of Sigstore infrastructure. Clients MUST use the API version hint to determine the service with the highest API version that the client is compatible with. Clients MUST also only connect to services within the specified validity period and that has the newest validity start date. """ url: str = betterproto.string_field(1) """ URL of the service. MUST include scheme and authority. MAY include path. """ major_api_version: int = betterproto.uint32_field(2) """ Specifies the major API version. A value of 0 represents a service that has not yet been released. """ valid_for: "__common_v1__.TimeRange" = betterproto.message_field(3) """ Validity period of a service. A service that has only a start date SHOULD be considered the most recent instance of that service, but the client MUST NOT assume there is only one valid instance. The TimeRange MUST be considered valid *inclusive* of the endpoints. """ @dataclass(eq=False, repr=False) class ServiceConfiguration(betterproto.Message): """ ServiceConfiguration specifies how a client should select a set of Services to connect to, along with a count when a specific number of Services is requested. """ selector: "ServiceSelector" = betterproto.enum_field(1) """How a client should select a set of Services to connect to.""" count: int = betterproto.uint32_field(2) """ count specifies the number of Services the client should use. Only used when selector is set to EXACT, and count MUST be greater than 0. count MUST be less than or equal to the number of Services. """ @dataclass(eq=False, repr=False) class ClientTrustConfig(betterproto.Message): """ ClientTrustConfig describes the complete state needed by a client to perform both signing and verification operations against a particular instance of Sigstore. """ media_type: str = betterproto.string_field(1) """MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json""" trusted_root: "TrustedRoot" = betterproto.message_field(2) """The root of trust, which MUST be present.""" signing_config: "SigningConfig" = betterproto.message_field(3) """Configuration for signing clients, which MUST be present.""" rebuild_dataclass(TransparencyLogInstance) # type: ignore rebuild_dataclass(CertificateAuthority) # type: ignore rebuild_dataclass(TrustedRoot) # type: ignore rebuild_dataclass(SigningConfig) # type: ignore rebuild_dataclass(Service) # type: ignore rebuild_dataclass(ServiceConfiguration) # type: ignore rebuild_dataclass(ClientTrustConfig) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/verification/000077500000000000000000000000001477352757300312575ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/verification/__init__.py000066400000000000000000000000001477352757300333560ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/verification/v1/000077500000000000000000000000001477352757300316055ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/dev/sigstore/verification/v1/__init__.py000066400000000000000000000206351477352757300337240ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: sigstore_verification.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from typing import ( List, Optional, ) import betterproto from pydantic import model_validator from pydantic.dataclasses import rebuild_dataclass from ...bundle import v1 as __bundle_v1__ from ...common import v1 as __common_v1__ from ...trustroot import v1 as __trustroot_v1__ @dataclass(eq=False, repr=False) class CertificateIdentity(betterproto.Message): """The identity of a X.509 Certificate signer.""" issuer: str = betterproto.string_field(1) """The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1)""" san: "__common_v1__.SubjectAlternativeName" = betterproto.message_field(2) oids: List["__common_v1__.ObjectIdentifierValuePair"] = betterproto.message_field(3) """ An unordered list of OIDs that must be verified. All OID/values provided in this list MUST exactly match against the values in the certificate for verification to be successful. """ @dataclass(eq=False, repr=False) class CertificateIdentities(betterproto.Message): identities: List["CertificateIdentity"] = betterproto.message_field(1) @dataclass(eq=False, repr=False) class PublicKeyIdentities(betterproto.Message): public_keys: List["__common_v1__.PublicKey"] = betterproto.message_field(1) @dataclass(eq=False, repr=False) class ArtifactVerificationOptions(betterproto.Message): """ A light-weight set of options/policies for identifying trusted signers, used during verification of a single artifact. """ certificate_identities: Optional["CertificateIdentities"] = ( betterproto.message_field(1, optional=True, group="signers") ) public_keys: Optional["PublicKeyIdentities"] = betterproto.message_field( 2, optional=True, group="signers" ) """ To simplify verification implementation, the logic for bundle verification should be implemented as a higher-order function, where one of argument should be an interface over the set of trusted public keys, like this: `Verify(bytes artifact, bytes signature, string key_id)`. This way the caller is in full control of mapping the identified (or hinted) key in the bundle to one of the trusted keys, as this process is inherently application specific. """ tlog_options: Optional["ArtifactVerificationOptionsTlogOptions"] = ( betterproto.message_field(3, optional=True) ) """ Optional options for artifact transparency log verification. If none is provided, the default verification options are: Threshold: 1 Online verification: false Disable: false """ ctlog_options: Optional["ArtifactVerificationOptionsCtlogOptions"] = ( betterproto.message_field(4, optional=True) ) """ Optional options for certificate transparency log verification. If none is provided, the default verification options are: Threshold: 1 Disable: false """ tsa_options: Optional["ArtifactVerificationOptionsTimestampAuthorityOptions"] = ( betterproto.message_field(5, optional=True) ) """ Optional options for certificate signed timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true """ integrated_ts_options: Optional[ "ArtifactVerificationOptionsTlogIntegratedTimestampOptions" ] = betterproto.message_field(6, optional=True) """ Optional options for integrated timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true """ observer_options: Optional[ "ArtifactVerificationOptionsObserverTimestampOptions" ] = betterproto.message_field(7, optional=True) """ Optional options for observed timestamp verification. If none is provided, the default verification options are: Threshold 1 Disable: false """ @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) @dataclass(eq=False, repr=False) class ArtifactVerificationOptionsTlogOptions(betterproto.Message): threshold: int = betterproto.int32_field(1) """Number of transparency logs the entry must appear on.""" perform_online_verification: bool = betterproto.bool_field(2) """Perform an online inclusion proof.""" disable: bool = betterproto.bool_field(3) """Disable verification for transparency logs.""" @dataclass(eq=False, repr=False) class ArtifactVerificationOptionsCtlogOptions(betterproto.Message): threshold: int = betterproto.int32_field(1) """ The number of ct transparency logs the certificate must appear on. """ disable: bool = betterproto.bool_field(3) """Disable ct transparency log verification""" @dataclass(eq=False, repr=False) class ArtifactVerificationOptionsTimestampAuthorityOptions(betterproto.Message): threshold: int = betterproto.int32_field(1) """The number of signed timestamps that are expected.""" disable: bool = betterproto.bool_field(2) """Disable signed timestamp verification.""" @dataclass(eq=False, repr=False) class ArtifactVerificationOptionsTlogIntegratedTimestampOptions(betterproto.Message): threshold: int = betterproto.int32_field(1) """The number of integrated timestamps that are expected.""" disable: bool = betterproto.bool_field(2) """Disable integrated timestamp verification.""" @dataclass(eq=False, repr=False) class ArtifactVerificationOptionsObserverTimestampOptions(betterproto.Message): threshold: int = betterproto.int32_field(1) """ The number of external observers of the timestamp. This is a union of RFC3161 signed timestamps, and integrated timestamps from a transparency log, that could include additional timestamp sources in the future. """ disable: bool = betterproto.bool_field(2) """Disable observer timestamp verification.""" @dataclass(eq=False, repr=False) class Artifact(betterproto.Message): artifact_uri: Optional[str] = betterproto.string_field( 1, optional=True, group="data" ) """Location of the artifact""" artifact: Optional[bytes] = betterproto.bytes_field(2, optional=True, group="data") """The raw bytes of the artifact""" artifact_digest: Optional["__common_v1__.HashOutput"] = betterproto.message_field( 3, optional=True, group="data" ) """ Digest of the artifact. SHOULD NOT be used when verifying an in-toto attestation as the subject digest cannot be reconstructed. This option will not work with Ed25519 signatures, use Ed25519Ph or another algorithm instead. """ @model_validator(mode="after") def check_oneof(cls, values): return cls._validate_field_groups(values) @dataclass(eq=False, repr=False) class Input(betterproto.Message): """ Input captures all that is needed to call the bundle verification method, to verify a single artifact referenced by the bundle. """ artifact_trust_root: "__trustroot_v1__.TrustedRoot" = betterproto.message_field(1) """ The verification materials provided during a bundle verification. The running process is usually preloaded with a "global" dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to verifying an artifact (i.e a bundle), and/or based on current policy, some selection is expected to happen, to filter out the exact certificate authority to use, which transparency logs are relevant etc. The result should b ecaptured in the `artifact_trust_root`. """ artifact_verification_options: "ArtifactVerificationOptions" = ( betterproto.message_field(2) ) bundle: "__bundle_v1__.Bundle" = betterproto.message_field(3) artifact: Optional["Artifact"] = betterproto.message_field(4, optional=True) """ If the bundle contains a message signature, the artifact must be provided. """ rebuild_dataclass(CertificateIdentity) # type: ignore rebuild_dataclass(CertificateIdentities) # type: ignore rebuild_dataclass(PublicKeyIdentities) # type: ignore rebuild_dataclass(ArtifactVerificationOptions) # type: ignore rebuild_dataclass(Artifact) # type: ignore rebuild_dataclass(Input) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/google/000077500000000000000000000000001477352757300254345ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/google/__init__.py000066400000000000000000000000001477352757300275330ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/google/api/000077500000000000000000000000001477352757300262055ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/google/api/__init__.py000066400000000000000000000066161477352757300303270ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: google/api/field_behavior.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass import betterproto from pydantic.dataclasses import rebuild_dataclass class FieldBehavior(betterproto.Enum): """ An indicator of the behavior of a given field (for example, that a field is required in requests, or given as output but ignored as input). This **does not** change the behavior in protocol buffers itself; it only denotes the behavior and may affect how API tooling handles the field. Note: This enum **may** receive new values in the future. """ UNSPECIFIED = 0 """Conventional default for enums. Do not use this.""" OPTIONAL = 1 """ Specifically denotes a field as optional. While all fields in protocol buffers are optional, this may be specified for emphasis if appropriate. """ REQUIRED = 2 """ Denotes a field as required. This indicates that the field **must** be provided as part of the request, and failure to do so will cause an error (usually `INVALID_ARGUMENT`). """ OUTPUT_ONLY = 3 """ Denotes a field as output only. This indicates that the field is provided in responses, but including the field in a request does nothing (the server *must* ignore it and *must not* throw an error as a result of the field's presence). """ INPUT_ONLY = 4 """ Denotes a field as input only. This indicates that the field is provided in requests, and the corresponding field is not included in output. """ IMMUTABLE = 5 """ Denotes a field as immutable. This indicates that the field may be set once in a request to create a resource, but may not be changed thereafter. """ UNORDERED_LIST = 6 """ Denotes that a (repeated) field is an unordered list. This indicates that the service may provide the elements of the list in any arbitrary order, rather than the order the user originally provided. Additionally, the list's order may or may not be stable. """ NON_EMPTY_DEFAULT = 7 """ Denotes that this field returns a non-empty default value if not set. This indicates that if the user provides the empty value in a request, a non-empty value will be returned. The user will not be aware of what non-empty value to expect. """ IDENTIFIER = 8 """ Denotes that the field in a resource (a message annotated with google.api.resource) is used in the resource name to uniquely identify the resource. For AIP-compliant APIs, this should only be applied to the `name` field on the resource. This behavior should not be applied to references to other resources within the message. The identifier field of resources often have different field behavior depending on the request it is embedded in (e.g. for Create methods name is optional and unused, while for Update methods it is required). Instead of method-specific annotations, only `IDENTIFIER` is required. """ @classmethod def __get_pydantic_core_schema__(cls, _source_type, _handler): from pydantic_core import core_schema return core_schema.int_schema(ge=0) protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/io/000077500000000000000000000000001477352757300245675ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/io/__init__.py000066400000000000000000000000001477352757300266660ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/io/intoto/000077500000000000000000000000001477352757300261035ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/io/intoto/__init__.py000066400000000000000000000032771477352757300302250ustar00rootroot00000000000000# Generated by the protocol buffer compiler. DO NOT EDIT! # sources: envelope.proto # plugin: python-betterproto # This file has been @generated from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: from pydantic.dataclasses import dataclass from typing import List import betterproto from pydantic.dataclasses import rebuild_dataclass @dataclass(eq=False, repr=False) class Envelope(betterproto.Message): """An authenticated message of arbitrary type.""" payload: bytes = betterproto.bytes_field(1) """ Message to be signed. (In JSON, this is encoded as base64.) REQUIRED. """ payload_type: str = betterproto.string_field(2) """ String unambiguously identifying how to interpret payload. REQUIRED. """ signatures: List["Signature"] = betterproto.message_field(3) """ Signature over: PAE(type, payload) Where PAE is defined as: PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload + = concatenation SP = ASCII space [0x20] "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros REQUIRED (length >= 1). """ @dataclass(eq=False, repr=False) class Signature(betterproto.Message): sig: bytes = betterproto.bytes_field(1) """ Signature itself. (In JSON, this is encoded as base64.) REQUIRED. """ keyid: str = betterproto.string_field(2) """ *Unauthenticated* hint identifying which public key was used. OPTIONAL. """ rebuild_dataclass(Envelope) # type: ignore protobuf-specs-0.4.1/gen/pb-python/sigstore_protobuf_specs/py.typed000066400000000000000000000000001477352757300256450ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-ruby/000077500000000000000000000000001477352757300166445ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-ruby/LICENSE000066400000000000000000000261361477352757300176610ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. protobuf-specs-0.4.1/gen/pb-ruby/README.md000066400000000000000000000020131477352757300201170ustar00rootroot00000000000000# sigstore_protobuf_specs These are the Ruby language bindings for Sigstore's protobuf specs. See the [repository's README](https://github.com/sigstore/protobuf-specs) for more information. ## Installation Add this line to your application's Gemfile: ```ruby gem 'sigstore_protobuf_specs' ``` And then execute: ```bash $ bundle install ``` Or install it yourself as: ```bash $ gem install sigstore_protobuf_specs ``` ## Usage Import all the bindings: ```ruby require 'sigstore_protobuf_specs' ``` Or you can import them individually: ```ruby require 'sigstore_bundle_pb' ``` See what is available in `gen/pb-ruby/lib/`. ## Releasing Make sure you update the version in `gen/pb-ruby/lib/sigstore_protobuf_specs/version.rb` A release will be build and automatically pushed to RubyGems when a tag in the format `release/ruby/v*` is created. Contact elfotografo007 for Gem ownership stuff. ## Contributing Bug reports and pull requests are welcome on GitHub at https://github.com/sigstore/protobuf-specs/issues. protobuf-specs-0.4.1/gen/pb-ruby/lib/000077500000000000000000000000001477352757300174125ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-ruby/lib/envelope_pb.rb000066400000000000000000000015711477352757300222410ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: envelope.proto require 'google/protobuf' descriptor_data = "\n\x0e\x65nvelope.proto\x12\tio.intoto\"Z\n\x08\x45nvelope\x12\x0f\n\x07payload\x18\x01 \x01(\x0c\x12\x13\n\x0bpayloadType\x18\x02 \x01(\t\x12(\n\nsignatures\x18\x03 \x03(\x0b\x32\x14.io.intoto.Signature\"\'\n\tSignature\x12\x0b\n\x03sig\x18\x01 \x01(\x0c\x12\r\n\x05keyid\x18\x02 \x01(\tBDZ1github.com/sigstore/protobuf-specs/gen/pb-go/dsse\xea\x02\x0eSigstore::DSSEb\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool pool.add_serialized_file(descriptor_data) module Sigstore module DSSE Envelope = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("io.intoto.Envelope").msgclass Signature = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("io.intoto.Signature").msgclass end end protobuf-specs-0.4.1/gen/pb-ruby/lib/events_pb.rb000066400000000000000000000042611477352757300217270ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: events.proto require 'google/protobuf' require 'google/protobuf/any_pb' require 'google/protobuf/timestamp_pb' descriptor_data = "\n\x0c\x65vents.proto\x12\x16\x64\x65v.sigstore.events.v1\x1a\x19google/protobuf/any.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xba\x04\n\nCloudEvent\x12\n\n\x02id\x18\x01 \x01(\t\x12\x0e\n\x06source\x18\x02 \x01(\t\x12\x14\n\x0cspec_version\x18\x03 \x01(\t\x12\x0c\n\x04type\x18\x04 \x01(\t\x12\x46\n\nattributes\x18\x05 \x03(\x0b\x32\x32.dev.sigstore.events.v1.CloudEvent.AttributesEntry\x12\x15\n\x0b\x62inary_data\x18\x06 \x01(\x0cH\x00\x12\x13\n\ttext_data\x18\x07 \x01(\tH\x00\x12*\n\nproto_data\x18\x08 \x01(\x0b\x32\x14.google.protobuf.AnyH\x00\x1an\n\x0f\x41ttributesEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12J\n\x05value\x18\x02 \x01(\x0b\x32;.dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue:\x02\x38\x01\x1a\xd3\x01\n\x18\x43loudEventAttributeValue\x12\x14\n\nce_boolean\x18\x01 \x01(\x08H\x00\x12\x14\n\nce_integer\x18\x02 \x01(\x05H\x00\x12\x13\n\tce_string\x18\x03 \x01(\tH\x00\x12\x12\n\x08\x63\x65_bytes\x18\x04 \x01(\x0cH\x00\x12\x10\n\x06\x63\x65_uri\x18\x05 \x01(\tH\x00\x12\x14\n\nce_uri_ref\x18\x06 \x01(\tH\x00\x12\x32\n\x0c\x63\x65_timestamp\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x42\x06\n\x04\x61ttrB\x06\n\x04\x64\x61ta\"E\n\x0f\x43loudEventBatch\x12\x32\n\x06\x65vents\x18\x01 \x03(\x0b\x32\".dev.sigstore.events.v1.CloudEventBk\n\x1c\x64\x65v.sigstore.proto.events.v1P\x01Z6github.com/sigstore/protobuf-specs/gen/pb-go/events/v1\xea\x02\x10Sigstore::Eventsb\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool pool.add_serialized_file(descriptor_data) module Sigstore module Events CloudEvent = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.events.v1.CloudEvent").msgclass CloudEvent::CloudEventAttributeValue = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue").msgclass CloudEventBatch = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.events.v1.CloudEventBatch").msgclass end end protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_bundle_pb.rb000066400000000000000000000045141477352757300236140ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sigstore_bundle.proto require 'google/protobuf' require 'google/api/field_behavior_pb' require 'envelope_pb' require 'sigstore_common_pb' require 'sigstore_rekor_pb' descriptor_data = "\n\x15sigstore_bundle.proto\x12\x16\x64\x65v.sigstore.bundle.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x0e\x65nvelope.proto\x1a\x15sigstore_common.proto\x1a\x14sigstore_rekor.proto\"g\n\x19TimestampVerificationData\x12J\n\x12rfc3161_timestamps\x18\x01 \x03(\x0b\x32..dev.sigstore.common.v1.RFC3161SignedTimestamp\"\x9e\x03\n\x14VerificationMaterial\x12\x46\n\npublic_key\x18\x01 \x01(\x0b\x32+.dev.sigstore.common.v1.PublicKeyIdentifierB\x03\xe0\x41\x02H\x00\x12S\n\x16x509_certificate_chain\x18\x02 \x01(\x0b\x32,.dev.sigstore.common.v1.X509CertificateChainB\x03\xe0\x41\x02H\x00\x12\x43\n\x0b\x63\x65rtificate\x18\x05 \x01(\x0b\x32\'.dev.sigstore.common.v1.X509CertificateB\x03\xe0\x41\x02H\x00\x12\x41\n\x0ctlog_entries\x18\x03 \x03(\x0b\x32+.dev.sigstore.rekor.v1.TransparencyLogEntry\x12V\n\x1btimestamp_verification_data\x18\x04 \x01(\x0b\x32\x31.dev.sigstore.bundle.v1.TimestampVerificationDataB\t\n\x07\x63ontent\"\xfe\x01\n\x06\x42undle\x12\x12\n\nmedia_type\x18\x01 \x01(\t\x12P\n\x15verification_material\x18\x02 \x01(\x0b\x32,.dev.sigstore.bundle.v1.VerificationMaterialB\x03\xe0\x41\x02\x12J\n\x11message_signature\x18\x03 \x01(\x0b\x32(.dev.sigstore.common.v1.MessageSignatureB\x03\xe0\x41\x02H\x00\x12\x31\n\rdsse_envelope\x18\x04 \x01(\x0b\x32\x13.io.intoto.EnvelopeB\x03\xe0\x41\x02H\x00\x42\t\n\x07\x63ontentJ\x04\x08\x05\x10\x33\x42|\n\x1c\x64\x65v.sigstore.proto.bundle.v1B\x0b\x42undleProtoP\x01Z6github.com/sigstore/protobuf-specs/gen/pb-go/bundle/v1\xea\x02\x14Sigstore::Bundle::V1b\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool pool.add_serialized_file(descriptor_data) module Sigstore module Bundle module V1 TimestampVerificationData = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.bundle.v1.TimestampVerificationData").msgclass VerificationMaterial = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.bundle.v1.VerificationMaterial").msgclass Bundle = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.bundle.v1.Bundle").msgclass end end end protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_common_pb.rb000066400000000000000000000137471477352757300236430ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sigstore_common.proto require 'google/protobuf' require 'google/api/field_behavior_pb' require 'google/protobuf/timestamp_pb' descriptor_data = "\n\x15sigstore_common.proto\x12\x16\x64\x65v.sigstore.common.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"V\n\nHashOutput\x12\x38\n\talgorithm\x18\x01 \x01(\x0e\x32%.dev.sigstore.common.v1.HashAlgorithm\x12\x0e\n\x06\x64igest\x18\x02 \x01(\x0c\"f\n\x10MessageSignature\x12:\n\x0emessage_digest\x18\x01 \x01(\x0b\x32\".dev.sigstore.common.v1.HashOutput\x12\x16\n\tsignature\x18\x02 \x01(\x0c\x42\x03\xe0\x41\x02\"\x1c\n\x05LogId\x12\x13\n\x06key_id\x18\x01 \x01(\x0c\x42\x03\xe0\x41\x02\"7\n\x16RFC3161SignedTimestamp\x12\x1d\n\x10signed_timestamp\x18\x01 \x01(\x0c\x42\x03\xe0\x41\x02\"\xb9\x01\n\tPublicKey\x12\x16\n\traw_bytes\x18\x01 \x01(\x0cH\x00\x88\x01\x01\x12=\n\x0bkey_details\x18\x02 \x01(\x0e\x32(.dev.sigstore.common.v1.PublicKeyDetails\x12\x39\n\tvalid_for\x18\x03 \x01(\x0b\x32!.dev.sigstore.common.v1.TimeRangeH\x01\x88\x01\x01\x42\x0c\n\n_raw_bytesB\x0c\n\n_valid_for\"#\n\x13PublicKeyIdentifier\x12\x0c\n\x04hint\x18\x01 \x01(\t\"#\n\x10ObjectIdentifier\x12\x0f\n\x02id\x18\x01 \x03(\x05\x42\x03\xe0\x41\x02\"a\n\x19ObjectIdentifierValuePair\x12\x35\n\x03oid\x18\x01 \x01(\x0b\x32(.dev.sigstore.common.v1.ObjectIdentifier\x12\r\n\x05value\x18\x02 \x01(\x0c\">\n\x11\x44istinguishedName\x12\x14\n\x0corganization\x18\x01 \x01(\t\x12\x13\n\x0b\x63ommon_name\x18\x02 \x01(\t\")\n\x0fX509Certificate\x12\x16\n\traw_bytes\x18\x01 \x01(\x0c\x42\x03\xe0\x41\x02\"\x89\x01\n\x16SubjectAlternativeName\x12@\n\x04type\x18\x01 \x01(\x0e\x32\x32.dev.sigstore.common.v1.SubjectAlternativeNameType\x12\x10\n\x06regexp\x18\x02 \x01(\tH\x00\x12\x0f\n\x05value\x18\x03 \x01(\tH\x00\x42\n\n\x08identity\"U\n\x14X509CertificateChain\x12=\n\x0c\x63\x65rtificates\x18\x01 \x03(\x0b\x32\'.dev.sigstore.common.v1.X509Certificate\"l\n\tTimeRange\x12)\n\x05start\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.Timestamp\x12,\n\x03\x65nd\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampH\x00\x88\x01\x01\x42\x06\n\x04_end*u\n\rHashAlgorithm\x12\x1e\n\x1aHASH_ALGORITHM_UNSPECIFIED\x10\x00\x12\x0c\n\x08SHA2_256\x10\x01\x12\x0c\n\x08SHA2_384\x10\x02\x12\x0c\n\x08SHA2_512\x10\x03\x12\x0c\n\x08SHA3_256\x10\x04\x12\x0c\n\x08SHA3_384\x10\x05*\xe9\x04\n\x10PublicKeyDetails\x12\"\n\x1ePUBLIC_KEY_DETAILS_UNSPECIFIED\x10\x00\x12\x19\n\x11PKCS1_RSA_PKCS1V5\x10\x01\x1a\x02\x08\x01\x12\x15\n\rPKCS1_RSA_PSS\x10\x02\x1a\x02\x08\x01\x12\x18\n\x10PKIX_RSA_PKCS1V5\x10\x03\x1a\x02\x08\x01\x12\x14\n\x0cPKIX_RSA_PSS\x10\x04\x1a\x02\x08\x01\x12!\n\x1dPKIX_RSA_PKCS1V15_2048_SHA256\x10\t\x12!\n\x1dPKIX_RSA_PKCS1V15_3072_SHA256\x10\n\x12!\n\x1dPKIX_RSA_PKCS1V15_4096_SHA256\x10\x0b\x12\x1c\n\x18PKIX_RSA_PSS_2048_SHA256\x10\x10\x12\x1c\n\x18PKIX_RSA_PSS_3072_SHA256\x10\x11\x12\x1c\n\x18PKIX_RSA_PSS_4096_SHA256\x10\x12\x12$\n\x1cPKIX_ECDSA_P256_HMAC_SHA_256\x10\x06\x1a\x02\x08\x01\x12\x1b\n\x17PKIX_ECDSA_P256_SHA_256\x10\x05\x12\x1b\n\x17PKIX_ECDSA_P384_SHA_384\x10\x0c\x12\x1b\n\x17PKIX_ECDSA_P521_SHA_512\x10\r\x12\x10\n\x0cPKIX_ED25519\x10\x07\x12\x13\n\x0fPKIX_ED25519_PH\x10\x08\x12\x1f\n\x17PKIX_ECDSA_P384_SHA_256\x10\x13\x1a\x02\x08\x01\x12\x1f\n\x17PKIX_ECDSA_P521_SHA_256\x10\x14\x1a\x02\x08\x01\x12\x0e\n\nLMS_SHA256\x10\x0e\x12\x10\n\x0cLMOTS_SHA256\x10\x0f\"\x04\x08\x15\x10\x32*o\n\x1aSubjectAlternativeNameType\x12-\n)SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED\x10\x00\x12\t\n\x05\x45MAIL\x10\x01\x12\x07\n\x03URI\x10\x02\x12\x0e\n\nOTHER_NAME\x10\x03\x42|\n\x1c\x64\x65v.sigstore.proto.common.v1B\x0b\x43ommonProtoP\x01Z6github.com/sigstore/protobuf-specs/gen/pb-go/common/v1\xea\x02\x14Sigstore::Common::V1b\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool pool.add_serialized_file(descriptor_data) module Sigstore module Common module V1 HashOutput = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.HashOutput").msgclass MessageSignature = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.MessageSignature").msgclass LogId = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.LogId").msgclass RFC3161SignedTimestamp = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.RFC3161SignedTimestamp").msgclass PublicKey = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.PublicKey").msgclass PublicKeyIdentifier = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.PublicKeyIdentifier").msgclass ObjectIdentifier = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.ObjectIdentifier").msgclass ObjectIdentifierValuePair = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.ObjectIdentifierValuePair").msgclass DistinguishedName = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.DistinguishedName").msgclass X509Certificate = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.X509Certificate").msgclass SubjectAlternativeName = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.SubjectAlternativeName").msgclass X509CertificateChain = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.X509CertificateChain").msgclass TimeRange = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.TimeRange").msgclass HashAlgorithm = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.HashAlgorithm").enummodule PublicKeyDetails = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.PublicKeyDetails").enummodule SubjectAlternativeNameType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.common.v1.SubjectAlternativeNameType").enummodule end end end protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_protobuf_specs.rb000066400000000000000000000013401477352757300247110ustar00rootroot00000000000000# frozen_string_literal: true # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require_relative 'sigstore_protobuf_specs/version' Dir['*_pb.rb', base: __dir__].each { |file| require_relative file } protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_protobuf_specs/000077500000000000000000000000001477352757300243665ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_protobuf_specs/version.rb000066400000000000000000000012461477352757300264030ustar00rootroot00000000000000# frozen_string_literal: true # Copyright 2023 The Sigstore Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. module Dev module Sigstore VERSION = '0.4.0' end end protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_rekor_pb.rb000066400000000000000000000050451477352757300234650ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sigstore_rekor.proto require 'google/protobuf' require 'google/api/field_behavior_pb' require 'sigstore_common_pb' descriptor_data = "\n\x14sigstore_rekor.proto\x12\x15\x64\x65v.sigstore.rekor.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x15sigstore_common.proto\"6\n\x0bKindVersion\x12\x11\n\x04kind\x18\x01 \x01(\tB\x03\xe0\x41\x02\x12\x14\n\x07version\x18\x02 \x01(\tB\x03\xe0\x41\x02\"#\n\nCheckpoint\x12\x15\n\x08\x65nvelope\x18\x01 \x01(\tB\x03\xe0\x41\x02\"\xa9\x01\n\x0eInclusionProof\x12\x16\n\tlog_index\x18\x01 \x01(\x03\x42\x03\xe0\x41\x02\x12\x16\n\troot_hash\x18\x02 \x01(\x0c\x42\x03\xe0\x41\x02\x12\x16\n\ttree_size\x18\x03 \x01(\x03\x42\x03\xe0\x41\x02\x12\x13\n\x06hashes\x18\x04 \x03(\x0c\x42\x03\xe0\x41\x02\x12:\n\ncheckpoint\x18\x05 \x01(\x0b\x32!.dev.sigstore.rekor.v1.CheckpointB\x03\xe0\x41\x02\"7\n\x10InclusionPromise\x12#\n\x16signed_entry_timestamp\x18\x01 \x01(\x0c\x42\x03\xe0\x41\x02\"\xe4\x02\n\x14TransparencyLogEntry\x12\x16\n\tlog_index\x18\x01 \x01(\x03\x42\x03\xe0\x41\x02\x12\x32\n\x06log_id\x18\x02 \x01(\x0b\x32\x1d.dev.sigstore.common.v1.LogIdB\x03\xe0\x41\x02\x12=\n\x0ckind_version\x18\x03 \x01(\x0b\x32\".dev.sigstore.rekor.v1.KindVersionB\x03\xe0\x41\x02\x12\x1c\n\x0fintegrated_time\x18\x04 \x01(\x03\x42\x03\xe0\x41\x02\x12\x42\n\x11inclusion_promise\x18\x05 \x01(\x0b\x32\'.dev.sigstore.rekor.v1.InclusionPromise\x12\x43\n\x0finclusion_proof\x18\x06 \x01(\x0b\x32%.dev.sigstore.rekor.v1.InclusionProofB\x03\xe0\x41\x02\x12\x1a\n\x12\x63\x61nonicalized_body\x18\x07 \x01(\x0c\x42x\n\x1b\x64\x65v.sigstore.proto.rekor.v1B\nRekorProtoP\x01Z5github.com/sigstore/protobuf-specs/gen/pb-go/rekor/v1\xea\x02\x13Sigstore::Rekor::V1b\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool pool.add_serialized_file(descriptor_data) module Sigstore module Rekor module V1 KindVersion = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.rekor.v1.KindVersion").msgclass Checkpoint = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.rekor.v1.Checkpoint").msgclass InclusionProof = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.rekor.v1.InclusionProof").msgclass InclusionPromise = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.rekor.v1.InclusionPromise").msgclass TransparencyLogEntry = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.rekor.v1.TransparencyLogEntry").msgclass end end end protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_trustroot_pb.rb000066400000000000000000000104411477352757300244240ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sigstore_trustroot.proto require 'google/protobuf' require 'google/api/field_behavior_pb' require 'sigstore_common_pb' descriptor_data = "\n\x18sigstore_trustroot.proto\x12\x19\x64\x65v.sigstore.trustroot.v1\x1a\x1fgoogle/api/field_behavior.proto\x1a\x15sigstore_common.proto\"\x8a\x02\n\x17TransparencyLogInstance\x12\x10\n\x08\x62\x61se_url\x18\x01 \x01(\t\x12=\n\x0ehash_algorithm\x18\x02 \x01(\x0e\x32%.dev.sigstore.common.v1.HashAlgorithm\x12\x35\n\npublic_key\x18\x03 \x01(\x0b\x32!.dev.sigstore.common.v1.PublicKey\x12-\n\x06log_id\x18\x04 \x01(\x0b\x32\x1d.dev.sigstore.common.v1.LogId\x12\x38\n\x11\x63heckpoint_key_id\x18\x05 \x01(\x0b\x32\x1d.dev.sigstore.common.v1.LogId\"\xd7\x01\n\x14\x43\x65rtificateAuthority\x12:\n\x07subject\x18\x01 \x01(\x0b\x32).dev.sigstore.common.v1.DistinguishedName\x12\x0b\n\x03uri\x18\x02 \x01(\t\x12@\n\ncert_chain\x18\x03 \x01(\x0b\x32,.dev.sigstore.common.v1.X509CertificateChain\x12\x34\n\tvalid_for\x18\x04 \x01(\x0b\x32!.dev.sigstore.common.v1.TimeRange\"\xca\x02\n\x0bTrustedRoot\x12\x12\n\nmedia_type\x18\x01 \x01(\t\x12\x41\n\x05tlogs\x18\x02 \x03(\x0b\x32\x32.dev.sigstore.trustroot.v1.TransparencyLogInstance\x12P\n\x17\x63\x65rtificate_authorities\x18\x03 \x03(\x0b\x32/.dev.sigstore.trustroot.v1.CertificateAuthority\x12\x42\n\x06\x63tlogs\x18\x04 \x03(\x0b\x32\x32.dev.sigstore.trustroot.v1.TransparencyLogInstance\x12N\n\x15timestamp_authorities\x18\x05 \x03(\x0b\x32/.dev.sigstore.trustroot.v1.CertificateAuthority\"\x99\x03\n\rSigningConfig\x12\x12\n\nmedia_type\x18\x05 \x01(\t\x12\x33\n\x07\x63\x61_urls\x18\x06 \x03(\x0b\x32\".dev.sigstore.trustroot.v1.Service\x12\x35\n\toidc_urls\x18\x07 \x03(\x0b\x32\".dev.sigstore.trustroot.v1.Service\x12;\n\x0frekor_tlog_urls\x18\x08 \x03(\x0b\x32\".dev.sigstore.trustroot.v1.Service\x12J\n\x11rekor_tlog_config\x18\t \x01(\x0b\x32/.dev.sigstore.trustroot.v1.ServiceConfiguration\x12\x34\n\x08tsa_urls\x18\n \x03(\x0b\x32\".dev.sigstore.trustroot.v1.Service\x12\x43\n\ntsa_config\x18\x0b \x01(\x0b\x32/.dev.sigstore.trustroot.v1.ServiceConfigurationJ\x04\x08\x01\x10\x05\"g\n\x07Service\x12\x0b\n\x03url\x18\x01 \x01(\t\x12\x19\n\x11major_api_version\x18\x02 \x01(\r\x12\x34\n\tvalid_for\x18\x03 \x01(\x0b\x32!.dev.sigstore.common.v1.TimeRange\"c\n\x14ServiceConfiguration\x12<\n\x08selector\x18\x01 \x01(\x0e\x32*.dev.sigstore.trustroot.v1.ServiceSelector\x12\r\n\x05\x63ount\x18\x02 \x01(\r\"\xb1\x01\n\x11\x43lientTrustConfig\x12\x12\n\nmedia_type\x18\x01 \x01(\t\x12\x41\n\x0ctrusted_root\x18\x02 \x01(\x0b\x32&.dev.sigstore.trustroot.v1.TrustedRootB\x03\xe0\x41\x02\x12\x45\n\x0esigning_config\x18\x03 \x01(\x0b\x32(.dev.sigstore.trustroot.v1.SigningConfigB\x03\xe0\x41\x02*N\n\x0fServiceSelector\x12\x1e\n\x1aSERVICE_SELECTOR_UNDEFINED\x10\x00\x12\x07\n\x03\x41LL\x10\x01\x12\x07\n\x03\x41NY\x10\x02\x12\t\n\x05\x45XACT\x10\x03\x42\x88\x01\n\x1f\x64\x65v.sigstore.proto.trustroot.v1B\x0eTrustRootProtoP\x01Z9github.com/sigstore/protobuf-specs/gen/pb-go/trustroot/v1\xea\x02\x17Sigstore::TrustRoot::V1b\x06proto3" pool = Google::Protobuf::DescriptorPool.generated_pool pool.add_serialized_file(descriptor_data) module Sigstore module TrustRoot module V1 TransparencyLogInstance = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.TransparencyLogInstance").msgclass CertificateAuthority = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.CertificateAuthority").msgclass TrustedRoot = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.TrustedRoot").msgclass SigningConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.SigningConfig").msgclass Service = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.Service").msgclass ServiceConfiguration = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.ServiceConfiguration").msgclass ClientTrustConfig = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.ClientTrustConfig").msgclass ServiceSelector = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("dev.sigstore.trustroot.v1.ServiceSelector").enummodule end end end protobuf-specs-0.4.1/gen/pb-ruby/lib/sigstore_verification_pb.rb000066400000000000000000000127651477352757300250340ustar00rootroot00000000000000# frozen_string_literal: true # Generated by the protocol buffer compiler. DO NOT EDIT! # source: sigstore_verification.proto require 'google/protobuf' require 'sigstore_common_pb' require 'sigstore_trustroot_pb' require 'sigstore_bundle_pb' descriptor_data = "\n\x1bsigstore_verification.proto\x12\x1c\x64\x65v.sigstore.verification.v1\x1a\x15sigstore_common.proto\x1a\x18sigstore_trustroot.proto\x1a\x15sigstore_bundle.proto\"\xa3\x01\n\x13\x43\x65rtificateIdentity\x12\x0e\n\x06issuer\x18\x01 \x01(\t\x12;\n\x03san\x18\x02 \x01(\x0b\x32..dev.sigstore.common.v1.SubjectAlternativeName\x12?\n\x04oids\x18\x03 \x03(\x0b\x32\x31.dev.sigstore.common.v1.ObjectIdentifierValuePair\"^\n\x15\x43\x65rtificateIdentities\x12\x45\n\nidentities\x18\x01 \x03(\x0b\x32\x31.dev.sigstore.verification.v1.CertificateIdentity\"M\n\x13PublicKeyIdentities\x12\x36\n\x0bpublic_keys\x18\x01 \x03(\x0b\x32!.dev.sigstore.common.v1.PublicKey\"\xaa\t\n\x1b\x41rtifactVerificationOptions\x12U\n\x16\x63\x65rtificate_identities\x18\x01 \x01(\x0b\x32\x33.dev.sigstore.verification.v1.CertificateIdentitiesH\x00\x12H\n\x0bpublic_keys\x18\x02 \x01(\x0b\x32\x31.dev.sigstore.verification.v1.PublicKeyIdentitiesH\x00\x12`\n\x0ctlog_options\x18\x03 \x01(\x0b\x32\x45.dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptionsH\x01\x88\x01\x01\x12\x62\n\rctlog_options\x18\x04 \x01(\x0b\x32\x46.dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptionsH\x02\x88\x01\x01\x12m\n\x0btsa_options\x18\x05 \x01(\x0b\x32S.dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptionsH\x03\x88\x01\x01\x12|\n\x15integrated_ts_options\x18\x06 \x01(\x0b\x32X.dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptionsH\x04\x88\x01\x01\x12q\n\x10observer_options\x18\x07 \x01(\x0b\x32R.dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptionsH\x05\x88\x01\x01\x1aV\n\x0bTlogOptions\x12\x11\n\tthreshold\x18\x01 \x01(\x05\x12#\n\x1bperform_online_verification\x18\x02 \x01(\x08\x12\x0f\n\x07\x64isable\x18\x03 \x01(\x08\x1a\x38\n\x0c\x43tlogOptions\x12\x11\n\tthreshold\x18\x01 \x01(\x05\x12\x0f\n\x07\x64isable\x18\x03 \x01(\x08J\x04\x08\x02\x10\x03\x1a?\n\x19TimestampAuthorityOptions\x12\x11\n\tthreshold\x18\x01 \x01(\x05\x12\x0f\n\x07\x64isable\x18\x02 \x01(\x08\x1a\x44\n\x1eTlogIntegratedTimestampOptions\x12\x11\n\tthreshold\x18\x01 \x01(\x05\x12\x0f\n\x07\x64isable\x18\x02 \x01(\x08\x1a>\n\x18ObserverTimestampOptions\x12\x11\n\tthreshold\x18\x01 \x01(\x05\x12\x0f\n\x07\x64isable\x18\x02 \x01(\x08\x42\t\n\x07signersB\x0f\n\r_tlog_optionsB\x10\n\x0e_ctlog_optionsB\x0e\n\x0c_tsa_optionsB\x18\n\x16_integrated_ts_optionsB\x13\n\x11_observer_options\"}\n\x08\x41rtifact\x12\x16\n\x0c\x61rtifact_uri\x18\x01 \x01(\tH\x00\x12\x12\n\x08\x61rtifact\x18\x02 \x01(\x0cH\x00\x12=\n\x0f\x61rtifact_digest\x18\x03 \x01(\x0b\x32\".dev.sigstore.common.v1.HashOutputH\x00\x42\x06\n\x04\x64\x61ta\"\xaa\x02\n\x05Input\x12\x43\n\x13\x61rtifact_trust_root\x18\x01 \x01(\x0b\x32&.dev.sigstore.trustroot.v1.TrustedRoot\x12`\n\x1d\x61rtifact_verification_options\x18\x02 \x01(\x0b\x32\x39.dev.sigstore.verification.v1.ArtifactVerificationOptions\x12.\n\x06\x62undle\x18\x03 \x01(\x0b\x32\x1e.dev.sigstore.bundle.v1.Bundle\x12=\n\x08\x61rtifact\x18\x04 \x01(\x0b\x32&.dev.sigstore.verification.v1.ArtifactH\x00\x88\x01\x01\x42\x0b\n\t_artifactB\x94\x01\n\"dev.sigstore.proto.verification.v1B\x11VerificationProtoP\x01Z= 2.3.0") spec.license = 'Apache-2.0' spec.required_ruby_version = ">= 2.7.0" spec.metadata["homepage_uri"] = spec.homepage spec.metadata["source_code_uri"] = "https://github.com/sigstore/protobuf-specs" spec.metadata["bug_tracker_uri"] = "https://github.com/sigstore/protobuf-specs/issues" # Specify which files should be added to the gem when it is released. # The `git ls-files -z` loads the files in the RubyGem that have been added into git. spec.files = Dir["README.md", "LICENSE", "lib/**/*"] spec.bindir = "bin" spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) } spec.require_paths = ["lib"] spec.add_runtime_dependency 'google-protobuf', '~> 4.29', '>= 4.29.3' spec.add_runtime_dependency 'googleapis-common-protos-types', '~> 1.18' end protobuf-specs-0.4.1/gen/pb-rust/000077500000000000000000000000001477352757300166605ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/.gitignore000066400000000000000000000000071477352757300206450ustar00rootroot00000000000000target protobuf-specs-0.4.1/gen/pb-rust/Cargo.lock000066400000000000000000000352221477352757300205710ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 4 [[package]] name = "aho-corasick" version = "1.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e60d3430d3a69478ad0993f19238d2df97c507009a52b3c10addcd7f6bcb916" dependencies = [ "memchr", ] [[package]] name = "anyhow" version = "1.0.97" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcfed56ad506cb2c684a14971b8861fdc3baaaae314b9e5f9bb532cbe3ba7a4f" [[package]] name = "autocfg" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "base64" version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "bitflags" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1be3f42a67d6d345ecd59f675f3f012d6974981560836e938c22b424b85ce1be" [[package]] name = "bytes" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "either" version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" [[package]] name = "env_home" version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7f84e12ccf0a7ddc17a6c41c93326024c42920d7ee630d04950e6926645c0fe" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", "windows-sys", ] [[package]] name = "fastrand" version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "fixedbitset" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "getrandom" version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c4567c8db10ae91089c99af84c68c38da3ec2f087c3f82960bcdbf3656b6f4d7" dependencies = [ "cfg-if", "libc", "wasi", ] [[package]] name = "glob" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a8d1add55171497b4705a648c6b583acafb01d58050a51727785f0b2c8e0a2b2" [[package]] name = "hashbrown" version = "0.15.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf151400ff0baff5465007dd2f3e717f3fe502074ca563069ce3a6629d07b289" [[package]] name = "heck" version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" [[package]] name = "indexmap" version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "itertools" version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "413ee7dfc52ee1a4949ceeb7dbc8a33f2d6c088194d9f922fb8318faf1f01186" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "libc" version = "0.2.169" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" [[package]] name = "linux-raw-sys" version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "log" version = "0.4.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" [[package]] name = "memchr" version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78ca9ab1a0babb1e7d5695e3530886289c18cf2f87ec19a575a0abdce112e3a3" [[package]] name = "multimap" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" [[package]] name = "num-traits" version = "0.2.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841" dependencies = [ "autocfg", ] [[package]] name = "once_cell" version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" [[package]] name = "ordered-float" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "petgraph" version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", "indexmap", ] [[package]] name = "prettyplease" version = "0.2.29" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6924ced06e1f7dfe3fa48d57b9f74f55d8915f5036121bef647ef4b204895fac" dependencies = [ "proc-macro2", "syn", ] [[package]] name = "proc-macro2" version = "1.0.93" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "60946a68e5f9d28b0dc1c21bb8a97ee7d018a8b322fa57838ba31cc878e22d99" dependencies = [ "unicode-ident", ] [[package]] name = "prost" version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" dependencies = [ "bytes", "prost-derive", ] [[package]] name = "prost-build" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" dependencies = [ "heck", "itertools", "log", "multimap", "once_cell", "petgraph", "prettyplease", "prost", "prost-types", "regex", "syn", "tempfile", ] [[package]] name = "prost-derive" version = "0.13.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" dependencies = [ "anyhow", "itertools", "proc-macro2", "quote", "syn", ] [[package]] name = "prost-reflect" version = "0.14.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7b318f733603136dcc61aa9e77c928d67f87d2436c34ec052ba3f1b5ca219de" dependencies = [ "base64", "once_cell", "prost", "prost-reflect-derive", "prost-types", "serde", "serde-value", ] [[package]] name = "prost-reflect-build" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "50e2537231d94dd2778920c2ada37dd9eb1ac0325bb3ee3ee651bd44c1134123" dependencies = [ "prost-build", "prost-reflect", ] [[package]] name = "prost-reflect-derive" version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f4fce6b22f15cc8d8d400a2b98ad29202b33bd56c7d9ddd815bc803a807ecb65" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "prost-types" version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" dependencies = [ "prost", ] [[package]] name = "quote" version = "1.0.40" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1885c039570dc00dcb4ff087a89e185fd56bae234ddc7f056a945bf36467248d" dependencies = [ "proc-macro2", ] [[package]] name = "regex" version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "rustix" version = "0.38.43" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a78891ee6bf2340288408954ac787aa063d8e8817e9f53abb37c695c6d834ef6" dependencies = [ "bitflags", "errno", "libc", "linux-raw-sys", "windows-sys", ] [[package]] name = "ryu" version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3cb5ba0dc43242ce17de99c180e96db90b235b8a9fdc9543c96d2209116bd9f" [[package]] name = "serde" version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] [[package]] name = "serde-value" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ "ordered-float", "serde", ] [[package]] name = "serde_derive" version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.140" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "20068b6e96dc6c9bd23e01df8827e6c7e1f2fddd43c21810382803c136b99373" dependencies = [ "itoa", "memchr", "ryu", "serde", ] [[package]] name = "sigstore-protobuf-specs-codegen" version = "0.0.1" dependencies = [ "anyhow", "glob", "prost-build", "prost-reflect-build", "which", ] [[package]] name = "sigstore-protobuf-specs-derive" version = "0.0.1" dependencies = [ "quote", "syn", ] [[package]] name = "sigstore_protobuf_specs" version = "0.4.0" dependencies = [ "anyhow", "glob", "prost", "prost-build", "prost-reflect", "prost-reflect-build", "prost-types", "serde", "serde_json", "sigstore-protobuf-specs-derive", "which", ] [[package]] name = "syn" version = "2.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b09a44accad81e1ba1cd74a32461ba89dee89095ba17b32f5d03683b1b1fc2a0" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tempfile" version = "3.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a8a559c81686f576e8cd0290cd2a24a2a9ad80c98b3478856500fcbd7acd704" dependencies = [ "cfg-if", "fastrand", "getrandom", "once_cell", "rustix", "windows-sys", ] [[package]] name = "unicode-ident" version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "wasi" version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "which" version = "7.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fb4a9e33648339dc1642b0e36e21b3385e6148e289226f657c809dee59df5028" dependencies = [ "either", "env_home", "rustix", "winsafe", ] [[package]] name = "windows-sys" version = "0.59.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9b724f72796e036ab90c1021d4780d4d3d648aca59e491e6b98e725b84e99973" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_gnullvm", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" [[package]] name = "windows_aarch64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" [[package]] name = "windows_i686_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" [[package]] name = "windows_i686_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" [[package]] name = "windows_x86_64_gnu" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" [[package]] name = "windows_x86_64_msvc" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winsafe" version = "0.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d135d17ab770252ad95e9a872d365cf3090e3be864a34ab46f48555993efc904" protobuf-specs-0.4.1/gen/pb-rust/Cargo.toml000066400000000000000000000002541477352757300206110ustar00rootroot00000000000000[workspace] resolver = "2" members = [ "sigstore-protobuf-specs", "sigstore-protobuf-specs-codegen", "sigstore-protobuf-specs-derive", ] license = "Apache-2.0" protobuf-specs-0.4.1/gen/pb-rust/README.md000066400000000000000000000003161477352757300201370ustar00rootroot00000000000000sigstore-protobuf-specs ======================= Rust language bindings for Sigstore's protobuf specs. See the [sigstore's protobuf-specs](https://github.com/sigstore/protobuf-specs) for more information. protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-codegen/000077500000000000000000000000001477352757300250725ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-codegen/Cargo.toml000066400000000000000000000003131477352757300270170ustar00rootroot00000000000000[package] name = "sigstore-protobuf-specs-codegen" version = "0.0.1" edition = "2021" [dependencies] anyhow = "1.0.97" glob = "0.3" prost-build = "0.13.4" prost-reflect-build = "0.14.0" which = "7.0.1" protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-codegen/src/000077500000000000000000000000001477352757300256615ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-codegen/src/main.rs000066400000000000000000000037451477352757300271640ustar00rootroot00000000000000use std::path::Path; /// Find the standard protobuf include directory. fn protobuf_include_path() -> String { let mut protobuf_root = which::which("protoc") .ok() // dirname(/bin/protoc) / ../ .and_then(|path| path.ancestors().nth(2).map(|p| p.to_path_buf())) .expect("protobuf installation directory not found!"); protobuf_root.push("include"); protobuf_root.to_str().unwrap().to_owned() } fn main() -> anyhow::Result<()> { let includes = vec![ concat!(env!("CARGO_MANIFEST_DIR"), "/../../../protos").to_owned(), // WKTs path protobuf_include_path(), "/googleapis".to_owned(), // googleapis types path: set `SIGSTORE_PROTOBUF_EXTRA_INCLUDE` to override. std::env::var("SIGSTORE_PROTOBUF_EXTRA_INCLUDE").unwrap_or("/opt/include".to_owned()), ]; for include in &includes { let include = Path::new(include); if !include.is_dir() { panic!("invalid include dir: {:?}", include); } } let mut config = prost_build::Config::new(); config .include_file("mod.rs") .type_attribute( ".", "#[derive(sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto)]", ) // Disable problematic comments interpreted as doctests. .disable_comments([".io.intoto.Envelope"]) .out_dir("sigstore-protobuf-specs/src/generated/"); prost_reflect_build::Builder::new() .file_descriptor_set_bytes("crate::FILE_DESCRIPTOR_SET_BYTES") .file_descriptor_set_path("sigstore-protobuf-specs/src/generated/file_descriptor_set.bin") .compile_protos_with_config( config, &glob::glob(concat!( env!("CARGO_MANIFEST_DIR"), "/../../../protos/*.proto" )) .expect("no protos found!") .flatten() .collect::>(), &includes, )?; Ok(()) } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-derive/000077500000000000000000000000001477352757300247445ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-derive/Cargo.toml000066400000000000000000000010161477352757300266720ustar00rootroot00000000000000[package] name = "sigstore-protobuf-specs-derive" version = "0.0.1" edition = "2021" authors = ["Sigstore Authors "] homepage = "https://github.com/sigstore/protobuf-specs" repository = "https://github.com/sigstore/protobuf-specs" description = "Derive macros for sigstore-protobuf-specs. This is an implementation detail, you do not need this dependency." readme = "../README.md" license = "Apache-2.0" keywords = ["sigstore"] [dependencies] syn = "2.0" quote = "1.0" [lib] proc-macro = true protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-derive/src/000077500000000000000000000000001477352757300255335ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs-derive/src/lib.rs000066400000000000000000000034031477352757300266470ustar00rootroot00000000000000use proc_macro::TokenStream; use quote::quote; use syn::{parse_macro_input, DeriveInput}; #[proc_macro_derive(Serialize_proto)] pub fn derive_serialize(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let ident = input.ident; match input.data { syn::Data::Struct(_) => (), _ => return Default::default(), }; let expanded = quote! { impl serde::Serialize for #ident { fn serialize(&self, serializer: S) -> Result where S: serde::Serializer, { let message = prost_reflect::ReflectMessage::transcode_to_dynamic(self); serde::Serialize::serialize(&message, serializer) } } }; TokenStream::from(expanded) } #[proc_macro_derive(Deserialize_proto)] pub fn derive_deserialize(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); let ident = input.ident; match input.data { syn::Data::Struct(_) => (), _ => return Default::default(), }; let expanded = quote! { impl<'de> serde::Deserialize<'de> for #ident { fn deserialize(deserializer: D) -> Result<#ident, D::Error> where D: serde::Deserializer<'de>, { let concrete_msg: #ident = Default::default(); let descriptor = prost_reflect::ReflectMessage::descriptor(&concrete_msg); let dynamic_msg = prost_reflect::DynamicMessage::deserialize(descriptor, deserializer)?; Ok(dynamic_msg.transcode_to().expect("failed to convert DynamicMessage to concrete Message!")) } } }; TokenStream::from(expanded) } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/000077500000000000000000000000001477352757300234705ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/Cargo.lock000066400000000000000000000343331477352757300254030ustar00rootroot00000000000000# This file is automatically @generated by Cargo. # It is not intended for manual editing. version = 3 [[package]] name = "aho-corasick" version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] [[package]] name = "anyhow" version = "1.0.72" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854" [[package]] name = "autocfg" version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" [[package]] name = "base64" version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" [[package]] name = "bitflags" version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" version = "2.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed570934406eb16438a4e976b1b4500774099c13b8cb96eec99f620f05090ddf" [[package]] name = "bytes" version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89b2fd2a0dcf38d7971e2194b6b6eebab45ae01067456a7fd93d5547a61b70be" [[package]] name = "cfg-if" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "either" version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a26ae43d7bcc3b814de94796a5e736d4029efb0ee900c12e2d54c993ad1a1e07" [[package]] name = "equivalent" version = "1.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a258e46cdc063eb8519c00b9fc845fc47bcfca4130e2f08e88665ceda8474245" dependencies = [ "libc", "windows-sys", ] [[package]] name = "fastrand" version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fixedbitset" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "glob" version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "hashbrown" version = "0.14.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290f1a1d9242c78d09ce40a5e87e7554ee637af1351968159f4952f028f75604" [[package]] name = "heck" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "home" version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e3d1354bf6b7235cb4a0576c2619fd4ed18183f689b12b006a0ee7329eeff9a5" dependencies = [ "windows-sys", ] [[package]] name = "indexmap" version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", "hashbrown", ] [[package]] name = "itertools" version = "0.10.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b0fd2260e829bddf4cb6ea802289de2f86d6a7a690192fbe91b3f46e0f2c8473" dependencies = [ "either", ] [[package]] name = "itoa" version = "1.0.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "libc" version = "0.2.152" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "13e3bf6590cbc649f4d1a3eefc9d5d6eb746f5200ffb04e5e142700b8faa56e7" [[package]] name = "linux-raw-sys" version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01cda141df6706de531b6c46c3a33ecca755538219bd484262fa09410c13539c" [[package]] name = "log" version = "0.4.20" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "memchr" version = "2.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" [[package]] name = "multimap" version = "0.8.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" [[package]] name = "num-traits" version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", ] [[package]] name = "once_cell" version = "1.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" [[package]] name = "ordered-float" version = "2.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68f19d67e5a2795c94e73e0bb1cc1a7edeb2e28efd39e2e1c9b7a40c1108b11c" dependencies = [ "num-traits", ] [[package]] name = "petgraph" version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", "indexmap", ] [[package]] name = "prettyplease" version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a41cf62165e97c7f814d2221421dbb9afcbcdb0a88068e5ea206e19951c2cbb5" dependencies = [ "proc-macro2", "syn", ] [[package]] name = "proc-macro2" version = "1.0.78" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" dependencies = [ "unicode-ident", ] [[package]] name = "prost" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ "bytes", "prost-derive", ] [[package]] name = "prost-build" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" dependencies = [ "bytes", "heck", "itertools", "log", "multimap", "once_cell", "petgraph", "prettyplease", "prost", "prost-types", "regex", "syn", "tempfile", "which 4.4.2", ] [[package]] name = "prost-derive" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools", "proc-macro2", "quote", "syn", ] [[package]] name = "prost-reflect" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "057237efdb71cf4b3f9396302a3d6599a92fa94063ba537b66130980ea9909f3" dependencies = [ "base64", "once_cell", "prost", "prost-reflect-derive", "prost-types", "serde", "serde-value", ] [[package]] name = "prost-reflect-build" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b4d0aa0c82e0fc36214c77b4dabe00750b3c41be45055baf2631cbbb7769b8ca" dependencies = [ "prost-build", "prost-reflect", ] [[package]] name = "prost-reflect-derive" version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "172da1212c02be2c94901440cb27183cd92bff00ebacca5c323bf7520b8f9c04" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "prost-types" version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "193898f59edcf43c26227dcd4c8427f00d99d61e95dcde58dabd49fa291d470e" dependencies = [ "prost", ] [[package]] name = "quote" version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ "proc-macro2", ] [[package]] name = "redox_syscall" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" dependencies = [ "bitflags 1.3.2", ] [[package]] name = "regex" version = "1.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" dependencies = [ "aho-corasick", "memchr", "regex-automata", "regex-syntax", ] [[package]] name = "regex-automata" version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5bb987efffd3c6d0d8f5f89510bb458559eab11e4f869acb20bf845e016259cd" dependencies = [ "aho-corasick", "memchr", "regex-syntax", ] [[package]] name = "regex-syntax" version = "0.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" [[package]] name = "rustix" version = "0.38.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "322394588aaf33c24007e8bb3238ee3e4c5c09c084ab32bc73890b99ff326bca" dependencies = [ "bitflags 2.4.2", "errno", "libc", "linux-raw-sys", "windows-sys", ] [[package]] name = "ryu" version = "1.0.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1ad4cc8da4ef723ed60bced201181d83791ad433213d8c24efffda1eec85d741" [[package]] name = "serde" version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "30e27d1e4fd7659406c492fd6cfaf2066ba8773de45ca75e855590f856dc34a9" dependencies = [ "serde_derive", ] [[package]] name = "serde-value" version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f3a1a3341211875ef120e117ea7fd5228530ae7e7036a779fdc9117be6b3282c" dependencies = [ "ordered-float", "serde", ] [[package]] name = "serde_derive" version = "1.0.171" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "389894603bd18c46fa56231694f8d827779c0951a667087194cf9de94ed24682" dependencies = [ "proc-macro2", "quote", "syn", ] [[package]] name = "serde_json" version = "1.0.103" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d03b412469450d4404fe8499a268edd7f8b79fecb074b0d812ad64ca21f4031b" dependencies = [ "itoa", "ryu", "serde", ] [[package]] name = "sigstore_protobuf_specs" version = "0.1.0-rc.2" dependencies = [ "anyhow", "glob", "prost", "prost-build", "prost-reflect", "prost-reflect-build", "prost-types", "serde", "serde_json", "which 6.0.0", ] [[package]] name = "syn" version = "2.0.48" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" dependencies = [ "proc-macro2", "quote", "unicode-ident", ] [[package]] name = "tempfile" version = "3.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "01ce4141aa927a6d1bd34a041795abd0db1cccba5d5f24b009f694bdf3a1f3fa" dependencies = [ "cfg-if", "fastrand", "redox_syscall", "rustix", "windows-sys", ] [[package]] name = "unicode-ident" version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "301abaae475aa91687eb82514b328ab47a211a533026cb25fc3e519b86adfc3c" [[package]] name = "which" version = "4.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" dependencies = [ "either", "home", "once_cell", "rustix", ] [[package]] name = "which" version = "6.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7fa5e0c10bf77f44aac573e498d1a82d5fbd5e91f6fc0a99e7be4b38e85e101c" dependencies = [ "either", "home", "once_cell", "rustix", "windows-sys", ] [[package]] name = "windows-sys" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d" dependencies = [ "windows-targets", ] [[package]] name = "windows-targets" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8a18201040b24831fbb9e4eb208f8892e1f50a37feb53cc7ff887feb8f50e7cd" dependencies = [ "windows_aarch64_gnullvm", "windows_aarch64_msvc", "windows_i686_gnu", "windows_i686_msvc", "windows_x86_64_gnu", "windows_x86_64_gnullvm", "windows_x86_64_msvc", ] [[package]] name = "windows_aarch64_gnullvm" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb7764e35d4db8a7921e09562a0304bf2f93e0a51bfccee0bd0bb0b666b015ea" [[package]] name = "windows_aarch64_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbaa0368d4f1d2aaefc55b6fcfee13f41544ddf36801e793edbbfd7d7df075ef" [[package]] name = "windows_i686_gnu" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a28637cb1fa3560a16915793afb20081aba2c92ee8af57b4d5f28e4b3e7df313" [[package]] name = "windows_i686_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ffe5e8e31046ce6230cc7215707b816e339ff4d4d67c65dffa206fd0f7aa7b9a" [[package]] name = "windows_x86_64_gnu" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3d6fa32db2bc4a2f5abeacf2b69f7992cd09dca97498da74a151a3132c26befd" [[package]] name = "windows_x86_64_gnullvm" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a657e1e9d3f514745a572a6846d3c7aa7dbe1658c056ed9c3344c4109a6949e" [[package]] name = "windows_x86_64_msvc" version = "0.52.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dff9641d1cd4be8d1a070daf9e3773c5f67e78b4d9d42263020c057706765c04" protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/Cargo.toml000066400000000000000000000015511477352757300254220ustar00rootroot00000000000000[package] name = "sigstore_protobuf_specs" version = "0.4.0" authors = ["Sigstore Authors "] edition = "2021" homepage = "https://github.com/sigstore/protobuf-specs" repository = "https://github.com/sigstore/protobuf-specs" description = "A library for serializing and deserializing Sigstore messages" readme = "../README.md" license = "Apache-2.0" keywords = ["sigstore"] categories = ["encoding", "parser-implementations"] [dependencies] prost-types = "0.13.4" prost = "0.13.5" prost-reflect = { version = "0.14.6", features = ["serde", "derive"] } serde = {version = "1.0", features = ["derive"]} serde_json = "1.0" sigstore-protobuf-specs-derive = { version = "0.0.1", path = "../sigstore-protobuf-specs-derive" } [build-dependencies] anyhow = "1.0.97" glob = "0.3" prost-build = "0.13.4" prost-reflect-build = "0.14.0" which = "7.0.1" protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/assets/000077500000000000000000000000001477352757300247725ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/assets/a.txt000066400000000000000000000001571477352757300257560ustar00rootroot00000000000000DO NOT MODIFY ME! this is "a.txt", a sample input for sigstore-protobuf-specs' test suite. DO NOT MODIFY ME! protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/assets/a.txt.sigstore000066400000000000000000000123161477352757300276140ustar00rootroot00000000000000{"mediaType": "application/vnd.dev.sigstore.bundle+json;version=0.2", "verificationMaterial": {"x509CertificateChain": {"certificates": [{"rawBytes": "MIICyjCCAk+gAwIBAgIUShApN6D/p2nhkAUYXANZuDspU40wCgYIKoZIzj0EAwMwNzEVMBMGA1UEChMMc2lnc3RvcmUuZGV2MR4wHAYDVQQDExVzaWdzdG9yZS1pbnRlcm1lZGlhdGUwHhcNMjQwMTI2MTkzNTI5WhcNMjQwMTI2MTk0NTI5WjAAMFkwEwYHKoZIzj0CAQYIKoZIzj0DAQcDQgAETlg64yErozlmXokHJcyN7OjHDBfIS1BXvukXd9PNxYTDkp1j5NdQnm+yH6HqvYLcylvga5iIK7KSprRX6M99I6OCAW4wggFqMA4GA1UdDwEB/wQEAwIHgDATBgNVHSUEDDAKBggrBgEFBQcDAzAdBgNVHQ4EFgQUeMzvd2GyzazwDGhInM+jtU130QAwHwYDVR0jBBgwFoAU39Ppz1YkEZb5qNjpKFWixi4YZD8wGAYDVR0RAQH/BA4wDIEKYUB0bnkudG93bjAsBgorBgEEAYO/MAEBBB5odHRwczovL2dpdGh1Yi5jb20vbG9naW4vb2F1dGgwLgYKKwYBBAGDvzABCAQgDB5odHRwczovL2dpdGh1Yi5jb20vbG9naW4vb2F1dGgwgYoGCisGAQQB1nkCBAIEfAR6AHgAdgDdPTBqxscRMmMZHhyZZzcCokpeuN48rf+HinKALynujgAAAY1HRSMSAAAEAwBHMEUCIQDODo1nxR9++rHfAZP+AyqwwmikJ27VcHPNPU+Gnq3S5wIgRjGJri32fkFxwf405Kmp3zNcx+s7kEdqV3Q6IUxTxQEwCgYIKoZIzj0EAwMDaQAwZgIxAMBcoQCOXt24cBBo5kCzF3j/SInrNCb4YivLyWrj5/rC5ych+Rygw/FgInM6kOROvAIxAJMiU4OFWWWAjaed8IS1DhG9YFNZnGWdwy7FFhLwwOa6qf4QsXAlUj+YPyrRkwfdng=="}]}, "tlogEntries": [{"logIndex": "66794718", "logId": {"keyId": "wNI9atQGlz+VWfO6LRygH4QUfY/8W4RFwiT5i5WRgB0="}, "kindVersion": {"kind": "hashedrekord", "version": "0.0.1"}, "integratedTime": "1706297730", "inclusionPromise": {"signedEntryTimestamp": "MEQCIA8KjI3qM1FojdnBSPXyII/7Q8NUgRQ0ji86ZNNWT1XqAiAA0msqxS4rN9xCo6jKcjGaKwFuHEwa5Mw1JCwBzLt1gw=="}, "inclusionProof": {"logIndex": "62631287", "rootHash": "1fx8bMb9/1d0q/PdLBgr5EVIs5kz2Shwpy4TFo8Uhis=", "treeSize": "62631288", "hashes": ["A6hYJrNwNazA1eoJIpV498CX76QaBgJWNoCRt1X74JE=", "f9+1RSu6Acof0xeSFOubv4ka3FdHBtpSVrdSbIAjMsQ=", "3ooji9Ujxw5HG1h56HHfj87vS4MOVVFUjVGuvJtW81M=", "HEgnXDufRCuJISdHCQjKnv3wP0PRUtE+AiYjdvZWaxw=", "/FEizqX7NOhA4OdohRvVtM2N5URHa6uesg3p4vEoQ4E=", "WoINPf5XzzezzULe1uVrKF5yQxRALb2KxRHOKi7Dttk=", "FpQhnaN+UmxzFqCood81DHl9WxyOOSpBMfD2FpNVk3k=", "WPXbPb4ACE/BbpP8q1dpTjRmTu4OFOse4d5YHP34YjA=", "+eTYHIbql8gaQnVj1zBqRSbN8d5uLSwQCZSNEu1IEQc=", "Dl6tJTXUpFc8TLlVlAbs+hrhujOBSxEW6PE/3+PwQIc=", "AGGlRS/pLuSZMVaGq6mY5uZswBtCoNSuaHM6P5twGuE=", "8v5YV3W9gmSnYBkC5JADJ4j3NA7GuFPPkPXA9OPNmTg=", "GgcbvbmxENRIPRbgqtWIgdwahX7JwKNl+o6XN+NdICM=", "v6TgT0lJE8lEEO1hEJGAUugTK5CNAqqixlVK80tmkb0=", "HjoTzYu7nFqxAa9lTSDZxoA4a1wJ4P8BT2/QyLM8PH4=", "IsLbMqrjdeHhyZ6XODgAs95aU12MJIbe9XB6kXaMDYw=", "UeXYBoLMUKvbOS7ToMsaoblG4fS/8QPQTTFGIBVeE70=", "mMSG/rXYcJKnikbEtb4EhoZUkAr/wuhv+yAHTcc6iDo=", "aWnEm9c/Gb8operqvTMd3WBQLe+yzT2W4Xt0HICt7Gw="], "checkpoint": {"envelope": "rekor.sigstore.dev - 2605736670972794746\n62631288\n1fx8bMb9/1d0q/PdLBgr5EVIs5kz2Shwpy4TFo8Uhis=\nTimestamp: 1706297730413822848\n\n\u2014 rekor.sigstore.dev wNI9ajBEAiAncCOrkCPoSXfFZt5jqL654xXX/OK7spQ8tkP9NTkexwIgY1HfG6TWamNSwNslbt5TXjgp4cxLiAYBG+n1/fpzu1U=\n"}}, "canonicalizedBody": "eyJhcGlWZXJzaW9uIjoiMC4wLjEiLCJraW5kIjoiaGFzaGVkcmVrb3JkIiwic3BlYyI6eyJkYXRhIjp7Imhhc2giOnsiYWxnb3JpdGhtIjoic2hhMjU2IiwidmFsdWUiOiI2MzI1NzliNTE4M2Q0MThmZjNkYzQ0Mzk5NGZkMzVlMGUxYTJhNmNlODlhMWVlMjJmZGNhNTc3ZjhlOGJjOWMzIn19LCJzaWduYXR1cmUiOnsiY29udGVudCI6Ik1FVUNJUURVdWt0dTZjckpBVHRRZ29Ra2FIb0hxRld0K1h2RGQ0UHZKbERRNWFLbVhBSWdDS1VPOHFjdUxUSTA4UER3NkYwUlNsaEJVamdtQ01FbFgrWENlU2FDanBnPSIsInB1YmxpY0tleSI6eyJjb250ZW50IjoiTFMwdExTMUNSVWRKVGlCRFJWSlVTVVpKUTBGVVJTMHRMUzB0Q2sxSlNVTjVha05EUVdzclowRjNTVUpCWjBsVlUyaEJjRTQyUkM5d01tNW9hMEZWV1ZoQlRscDFSSE53VlRRd2QwTm5XVWxMYjFwSmVtb3dSVUYzVFhjS1RucEZWazFDVFVkQk1WVkZRMmhOVFdNeWJHNWpNMUoyWTIxVmRWcEhWakpOVWpSM1NFRlpSRlpSVVVSRmVGWjZZVmRrZW1SSE9YbGFVekZ3WW01U2JBcGpiVEZzV2tkc2FHUkhWWGRJYUdOT1RXcFJkMDFVU1RKTlZHdDZUbFJKTlZkb1kwNU5hbEYzVFZSSk1rMVVhekJPVkVrMVYycEJRVTFHYTNkRmQxbElDa3R2V2tsNmFqQkRRVkZaU1V0dldrbDZhakJFUVZGalJGRm5RVVZVYkdjMk5IbEZjbTk2YkcxWWIydElTbU41VGpkUGFraEVRbVpKVXpGQ1dIWjFhMWdLWkRsUVRuaFpWRVJyY0RGcU5VNWtVVzV0SzNsSU5raHhkbGxNWTNsc2RtZGhOV2xKU3pkTFUzQnlVbGcyVFRrNVNUWlBRMEZYTkhkblowWnhUVUUwUndwQk1WVmtSSGRGUWk5M1VVVkJkMGxJWjBSQlZFSm5UbFpJVTFWRlJFUkJTMEpuWjNKQ1owVkdRbEZqUkVGNlFXUkNaMDVXU0ZFMFJVWm5VVlZsVFhwMkNtUXlSM2w2WVhwM1JFZG9TVzVOSzJwMFZURXpNRkZCZDBoM1dVUldVakJxUWtKbmQwWnZRVlV6T1ZCd2VqRlphMFZhWWpWeFRtcHdTMFpYYVhocE5Ga0tXa1E0ZDBkQldVUldVakJTUVZGSUwwSkJOSGRFU1VWTFdWVkNNR0p1YTNWa1J6a3pZbXBCYzBKbmIzSkNaMFZGUVZsUEwwMUJSVUpDUWpWdlpFaFNkd3BqZW05MlRESmtjR1JIYURGWmFUVnFZakl3ZG1KSE9XNWhWelIyWWpKR01XUkhaM2RNWjFsTFMzZFpRa0pCUjBSMmVrRkNRMEZSWjBSQ05XOWtTRkozQ21ONmIzWk1NbVJ3WkVkb01WbHBOV3BpTWpCMllrYzVibUZYTkhaaU1rWXhaRWRuZDJkWmIwZERhWE5IUVZGUlFqRnVhME5DUVVsRlprRlNOa0ZJWjBFS1pHZEVaRkJVUW5GNGMyTlNUVzFOV2tob2VWcGFlbU5EYjJ0d1pYVk9ORGh5Wml0SWFXNUxRVXg1Ym5WcVowRkJRVmt4U0ZKVFRWTkJRVUZGUVhkQ1NBcE5SVlZEU1ZGRVQwUnZNVzU0VWprckszSklaa0ZhVUN0QmVYRjNkMjFwYTBveU4xWmpTRkJPVUZVclIyNXhNMU0xZDBsblVtcEhTbkpwTXpKbWEwWjRDbmRtTkRBMVMyMXdNM3BPWTNncmN6ZHJSV1J4VmpOUk5rbFZlRlI0VVVWM1EyZFpTVXR2V2tsNmFqQkZRWGROUkdGUlFYZGFaMGw0UVUxQ1kyOVJRMDhLV0hReU5HTkNRbTgxYTBONlJqTnFMMU5KYm5KT1EySTBXV2wyVEhsWGNtbzFMM0pETlhsamFDdFNlV2QzTDBablNXNU5ObXRQVWs5MlFVbDRRVXBOYVFwVk5FOUdWMWRYUVdwaFpXUTRTVk14UkdoSE9WbEdUbHB1UjFka2QzazNSa1pvVEhkM1QyRTJjV1kwVVhOWVFXeFZhaXRaVUhseVVtdDNabVJ1WnowOUNpMHRMUzB0UlU1RUlFTkZVbFJKUmtsRFFWUkZMUzB0TFMwSyJ9fX19"}]}, "messageSignature": {"messageDigest": {"algorithm": "SHA2_256", "digest": "YyV5tRg9QY/z3EQ5lP014OGips6Joe4i/cpXf46LycM="}, "signature": "MEUCIQDUuktu6crJATtQgoQkaHoHqFWt+XvDd4PvJlDQ5aKmXAIgCKUO8qcuLTI08PDw6F0RSlhBUjgmCMElX+XCeSaCjpg="}} protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/000077500000000000000000000000001477352757300242575ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/000077500000000000000000000000001477352757300262155ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/dev.sigstore.bundle.v1.rs000066400000000000000000000242071477352757300330010ustar00rootroot00000000000000// This file is @generated by prost-build. /// Various timestamped counter signatures over the artifacts signature. /// Currently only RFC3161 signatures are provided. More formats may be added /// in the future. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.bundle.v1.TimestampVerificationData")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TimestampVerificationData { /// A list of RFC3161 signed timestamps provided by the user. /// This can be used when the entry has not been stored on a /// transparency log, or in conjunction for a stronger trust model. /// Clients MUST verify the hashed message in the message imprint /// against the signature in the bundle. #[prost(message, repeated, tag = "1")] pub rfc3161_timestamps: ::prost::alloc::vec::Vec< super::super::common::v1::Rfc3161SignedTimestamp, >, } /// VerificationMaterial captures details on the materials used to verify /// signatures. This message may be embedded in a DSSE envelope as a signature /// extension. Specifically, the `ext` field of the extension will expect this /// message when the signature extension is for Sigstore. This is identified by /// the `kind` field in the extension, which must be set to /// application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. /// When used as a DSSE extension, if the `public_key` field is used to indicate /// the key identifier, it MUST match the `keyid` field of the signature the /// extension is attached to. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.bundle.v1.VerificationMaterial")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct VerificationMaterial { /// An inclusion proof and an optional signed timestamp from the log. /// Client verification libraries MAY provide an option to support v0.1 /// bundles for backwards compatibility, which may contain an inclusion /// promise and not an inclusion proof. In this case, the client MUST /// validate the promise. /// Verifiers SHOULD NOT allow v0.1 bundles if they're used in an /// ecosystem which never produced them. #[prost(message, repeated, tag = "3")] pub tlog_entries: ::prost::alloc::vec::Vec< super::super::rekor::v1::TransparencyLogEntry, >, /// Timestamp may also come from /// tlog_entries.inclusion_promise.signed_entry_timestamp. #[prost(message, optional, tag = "4")] pub timestamp_verification_data: ::core::option::Option, /// The key material for verification purposes. /// /// This allows key material to be conveyed in one of three forms: /// /// 1. An unspecified public key identifier, for retrieving a key /// from an out-of-band mechanism (such as a keyring); /// /// 2. A sequence of one or more X.509 certificates, of which the first member /// MUST be a leaf certificate conveying the signing key. Subsequent members /// SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`. /// /// Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT /// include intermediate CA certificates that appear in an independent root of trust /// (such as the Public Good Instance's trusted root). /// /// Verifiers MUST validate the chain carefully to ensure that it chains up /// to a CA certificate that they independently trust. Verifiers SHOULD /// handle old or non-complying bundles that have superfluous intermediate and/or /// root CA certificates by either ignoring them or explicitly considering them /// untrusted for the purposes of chain building. /// /// 3. A single X.509 certificate, which MUST be a leaf certificate conveying /// the signing key. /// /// When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing /// via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1) /// MAY be used with the PGI for self-managed keys. /// /// When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing, /// form (2) MUST be used. /// /// When used in a `0.3` bundle with the PGI and "keyless" signing, /// form (3) MUST be used. #[prost(oneof = "verification_material::Content", tags = "1, 2, 5")] pub content: ::core::option::Option, } /// Nested message and enum types in `VerificationMaterial`. pub mod verification_material { /// The key material for verification purposes. /// /// This allows key material to be conveyed in one of three forms: /// /// 1. An unspecified public key identifier, for retrieving a key /// from an out-of-band mechanism (such as a keyring); /// /// 2. A sequence of one or more X.509 certificates, of which the first member /// MUST be a leaf certificate conveying the signing key. Subsequent members /// SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`. /// /// Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT /// include intermediate CA certificates that appear in an independent root of trust /// (such as the Public Good Instance's trusted root). /// /// Verifiers MUST validate the chain carefully to ensure that it chains up /// to a CA certificate that they independently trust. Verifiers SHOULD /// handle old or non-complying bundles that have superfluous intermediate and/or /// root CA certificates by either ignoring them or explicitly considering them /// untrusted for the purposes of chain building. /// /// 3. A single X.509 certificate, which MUST be a leaf certificate conveying /// the signing key. /// /// When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing /// via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1) /// MAY be used with the PGI for self-managed keys. /// /// When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing, /// form (2) MUST be used. /// /// When used in a `0.3` bundle with the PGI and "keyless" signing, /// form (3) MUST be used. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Content { #[prost(message, tag = "1")] PublicKey(super::super::super::common::v1::PublicKeyIdentifier), #[prost(message, tag = "2")] X509CertificateChain(super::super::super::common::v1::X509CertificateChain), #[prost(message, tag = "5")] Certificate(super::super::super::common::v1::X509Certificate), } } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.bundle.v1.Bundle")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Bundle { /// MUST be application/vnd.dev.sigstore.bundle.v0.3+json when /// when encoded as JSON. /// Clients must to be able to accept media type using the previously /// defined formats: /// * application/vnd.dev.sigstore.bundle+json;version=0.1 /// * application/vnd.dev.sigstore.bundle+json;version=0.2 /// * application/vnd.dev.sigstore.bundle+json;version=0.3 #[prost(string, tag = "1")] pub media_type: ::prost::alloc::string::String, /// When a signer is identified by a X.509 certificate, a verifier MUST /// verify that the signature was computed at the time the certificate /// was valid as described in the Sigstore client spec: "Verification /// using a Bundle". /// <> /// If the verification material contains a public key identifier /// (key hint) and the `content` is a DSSE envelope, the key hints /// MUST be exactly the same in the verification material and in the /// DSSE envelope. #[prost(message, optional, tag = "2")] pub verification_material: ::core::option::Option, #[prost(oneof = "bundle::Content", tags = "3, 4")] pub content: ::core::option::Option, } /// Nested message and enum types in `Bundle`. pub mod bundle { #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Content { #[prost(message, tag = "3")] MessageSignature(super::super::super::common::v1::MessageSignature), /// A DSSE envelope can contain arbitrary payloads. /// Verifiers must verify that the payload type is a /// supported and expected type. This is part of the DSSE /// protocol which is defined here: /// <> /// DSSE envelopes in a bundle MUST have exactly one signature. /// This is a limitation from the DSSE spec, as it can contain /// multiple signatures. There are two primary reasons: /// 1. It simplifies the verification logic and policy /// 2. The bundle (currently) can only contain a single /// instance of the required verification materials /// During verification a client MUST reject an envelope if /// the number of signatures is not equal to one. #[prost(message, tag = "4")] DsseEnvelope(super::super::super::super::super::io::intoto::Envelope), } } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/dev.sigstore.common.v1.rs000066400000000000000000000474441477352757300330300ustar00rootroot00000000000000// This file is @generated by prost-build. /// HashOutput captures a digest of a 'message' (generic octet sequence) /// and the corresponding hash algorithm used. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.HashOutput")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct HashOutput { #[prost(enumeration = "HashAlgorithm", tag = "1")] pub algorithm: i32, /// This is the raw octets of the message digest as computed by /// the hash algorithm. #[prost(bytes = "vec", tag = "2")] pub digest: ::prost::alloc::vec::Vec, } /// MessageSignature stores the computed signature over a message. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.MessageSignature")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct MessageSignature { /// Message digest can be used to identify the artifact. /// Clients MUST NOT attempt to use this digest to verify the associated /// signature; it is intended solely for identification. #[prost(message, optional, tag = "1")] pub message_digest: ::core::option::Option, /// The raw bytes as returned from the signature algorithm. /// The signature algorithm (and so the format of the signature bytes) /// are determined by the contents of the 'verification_material', /// either a key-pair or a certificate. If using a certificate, the /// certificate contains the required information on the signature /// algorithm. /// When using a key pair, the algorithm MUST be part of the public /// key, which MUST be communicated out-of-band. #[prost(bytes = "vec", tag = "2")] pub signature: ::prost::alloc::vec::Vec, } /// LogId captures the identity of a transparency log. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.LogId")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct LogId { /// The unique identity of the log, represented by its public key. #[prost(bytes = "vec", tag = "1")] pub key_id: ::prost::alloc::vec::Vec, } /// This message holds a RFC 3161 timestamp. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.RFC3161SignedTimestamp")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Rfc3161SignedTimestamp { /// Signed timestamp is the DER encoded TimeStampResponse. /// See #[prost(bytes = "vec", tag = "1")] pub signed_timestamp: ::prost::alloc::vec::Vec, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.PublicKey")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PublicKey { /// DER-encoded public key, encoding method is specified by the /// key_details attribute. #[prost(bytes = "vec", optional, tag = "1")] pub raw_bytes: ::core::option::Option<::prost::alloc::vec::Vec>, /// Key encoding and signature algorithm to use for this key. #[prost(enumeration = "PublicKeyDetails", tag = "2")] pub key_details: i32, /// Optional validity period for this key, *inclusive* of the endpoints. #[prost(message, optional, tag = "3")] pub valid_for: ::core::option::Option, } /// PublicKeyIdentifier can be used to identify an (out of band) delivered /// key, to verify a signature. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.PublicKeyIdentifier")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PublicKeyIdentifier { /// Optional unauthenticated hint on which key to use. /// The format of the hint must be agreed upon out of band by the /// signer and the verifiers, and so is not subject to this /// specification. /// Example use-case is to specify the public key to use, from a /// trusted key-ring. /// Implementors are RECOMMENDED to derive the value from the public /// key as described in RFC 6962. /// See: <> #[prost(string, tag = "1")] pub hint: ::prost::alloc::string::String, } /// An ASN.1 OBJECT IDENTIFIER #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.ObjectIdentifier")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ObjectIdentifier { #[prost(int32, repeated, packed = "false", tag = "1")] pub id: ::prost::alloc::vec::Vec, } /// An OID and the corresponding (byte) value. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.ObjectIdentifierValuePair")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ObjectIdentifierValuePair { #[prost(message, optional, tag = "1")] pub oid: ::core::option::Option, #[prost(bytes = "vec", tag = "2")] pub value: ::prost::alloc::vec::Vec, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.DistinguishedName")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct DistinguishedName { #[prost(string, tag = "1")] pub organization: ::prost::alloc::string::String, #[prost(string, tag = "2")] pub common_name: ::prost::alloc::string::String, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.X509Certificate")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct X509Certificate { /// DER-encoded X.509 certificate. #[prost(bytes = "vec", tag = "1")] pub raw_bytes: ::prost::alloc::vec::Vec, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.SubjectAlternativeName")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SubjectAlternativeName { #[prost(enumeration = "SubjectAlternativeNameType", tag = "1")] pub r#type: i32, #[prost(oneof = "subject_alternative_name::Identity", tags = "2, 3")] pub identity: ::core::option::Option, } /// Nested message and enum types in `SubjectAlternativeName`. pub mod subject_alternative_name { #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Identity { /// A regular expression describing the expected value for /// the SAN. #[prost(string, tag = "2")] Regexp(::prost::alloc::string::String), /// The exact value to match against. #[prost(string, tag = "3")] Value(::prost::alloc::string::String), } } /// A collection of X.509 certificates. /// /// This "chain" can be used in multiple contexts, such as providing a root CA /// certificate within a TUF root of trust or multiple untrusted certificates for /// the purpose of chain building. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.X509CertificateChain")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct X509CertificateChain { /// One or more DER-encoded certificates. /// /// In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence /// has an imposed order. Unless explicitly specified, there is otherwise no /// guaranteed order. #[prost(message, repeated, tag = "1")] pub certificates: ::prost::alloc::vec::Vec, } /// The time range is closed and includes both the start and end times, /// (i.e., \[start, end\]). /// End is optional to be able to capture a period that has started but /// has no known end. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.common.v1.TimeRange")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct TimeRange { #[prost(message, optional, tag = "1")] pub start: ::core::option::Option<::prost_types::Timestamp>, #[prost(message, optional, tag = "2")] pub end: ::core::option::Option<::prost_types::Timestamp>, } /// Only a subset of the secure hash standard algorithms are supported. /// See <> for more /// details. /// UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force /// any proto JSON serialization to emit the used hash algorithm, as default /// option is to *omit* the default value of an enum (which is the first /// value, represented by '0'. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum HashAlgorithm { Unspecified = 0, Sha2256 = 1, Sha2384 = 2, Sha2512 = 3, Sha3256 = 4, Sha3384 = 5, } impl HashAlgorithm { /// String value of the enum field names used in the ProtoBuf definition. /// /// The values are not transformed in any way and thus are considered stable /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { Self::Unspecified => "HASH_ALGORITHM_UNSPECIFIED", Self::Sha2256 => "SHA2_256", Self::Sha2384 => "SHA2_384", Self::Sha2512 => "SHA2_512", Self::Sha3256 => "SHA3_256", Self::Sha3384 => "SHA3_384", } } /// Creates an enum from field names used in the ProtoBuf definition. pub fn from_str_name(value: &str) -> ::core::option::Option { match value { "HASH_ALGORITHM_UNSPECIFIED" => Some(Self::Unspecified), "SHA2_256" => Some(Self::Sha2256), "SHA2_384" => Some(Self::Sha2384), "SHA2_512" => Some(Self::Sha2512), "SHA3_256" => Some(Self::Sha3256), "SHA3_384" => Some(Self::Sha3384), _ => None, } } } /// Details of a specific public key, capturing the the key encoding method, /// and signature algorithm. /// /// PublicKeyDetails captures the public key/hash algorithm combinations /// recommended in the Sigstore ecosystem. /// /// This is modelled as a linear set as we want to provide a small number of /// opinionated options instead of allowing every possible permutation. /// /// Any changes to this enum MUST be reflected in the algorithm registry. /// See: docs/algorithm-registry.md /// /// To avoid the possibility of contradicting formats such as PKCS1 with /// ED25519 the valid permutations are listed as a linear set instead of a /// cartesian set (i.e one combined variable instead of two, one for encoding /// and one for the signature algorithm). #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum PublicKeyDetails { Unspecified = 0, /// RSA /// /// See RFC8017 Pkcs1RsaPkcs1v5 = 1, /// See RFC8017 Pkcs1RsaPss = 2, PkixRsaPkcs1v5 = 3, PkixRsaPss = 4, /// RSA public key in PKIX format, PKCS#1v1.5 signature PkixRsaPkcs1v152048Sha256 = 9, PkixRsaPkcs1v153072Sha256 = 10, PkixRsaPkcs1v154096Sha256 = 11, /// RSA public key in PKIX format, RSASSA-PSS signature /// /// See RFC4055 PkixRsaPss2048Sha256 = 16, PkixRsaPss3072Sha256 = 17, PkixRsaPss4096Sha256 = 18, /// ECDSA /// /// See RFC6979 PkixEcdsaP256HmacSha256 = 6, /// See NIST FIPS 186-4 PkixEcdsaP256Sha256 = 5, PkixEcdsaP384Sha384 = 12, PkixEcdsaP521Sha512 = 13, /// Ed 25519 /// /// See RFC8032 PkixEd25519 = 7, PkixEd25519Ph = 8, /// These algorithms are deprecated and should not be used, but they /// were/are being used by most Sigstore clients implementations. PkixEcdsaP384Sha256 = 19, PkixEcdsaP521Sha256 = 20, /// LMS and LM-OTS /// /// These keys and signatures may be used by private Sigstore /// deployments, but are not currently supported by the public /// good instance. /// /// USER WARNING: LMS and LM-OTS are both stateful signature schemes. /// Using them correctly requires discretion and careful consideration /// to ensure that individual secret keys are not used more than once. /// In addition, LM-OTS is a single-use scheme, meaning that it /// MUST NOT be used for more than one signature per LM-OTS key. /// If you cannot maintain these invariants, you MUST NOT use these /// schemes. LmsSha256 = 14, LmotsSha256 = 15, } impl PublicKeyDetails { /// String value of the enum field names used in the ProtoBuf definition. /// /// The values are not transformed in any way and thus are considered stable /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { Self::Unspecified => "PUBLIC_KEY_DETAILS_UNSPECIFIED", Self::Pkcs1RsaPkcs1v5 => "PKCS1_RSA_PKCS1V5", Self::Pkcs1RsaPss => "PKCS1_RSA_PSS", Self::PkixRsaPkcs1v5 => "PKIX_RSA_PKCS1V5", Self::PkixRsaPss => "PKIX_RSA_PSS", Self::PkixRsaPkcs1v152048Sha256 => "PKIX_RSA_PKCS1V15_2048_SHA256", Self::PkixRsaPkcs1v153072Sha256 => "PKIX_RSA_PKCS1V15_3072_SHA256", Self::PkixRsaPkcs1v154096Sha256 => "PKIX_RSA_PKCS1V15_4096_SHA256", Self::PkixRsaPss2048Sha256 => "PKIX_RSA_PSS_2048_SHA256", Self::PkixRsaPss3072Sha256 => "PKIX_RSA_PSS_3072_SHA256", Self::PkixRsaPss4096Sha256 => "PKIX_RSA_PSS_4096_SHA256", Self::PkixEcdsaP256HmacSha256 => "PKIX_ECDSA_P256_HMAC_SHA_256", Self::PkixEcdsaP256Sha256 => "PKIX_ECDSA_P256_SHA_256", Self::PkixEcdsaP384Sha384 => "PKIX_ECDSA_P384_SHA_384", Self::PkixEcdsaP521Sha512 => "PKIX_ECDSA_P521_SHA_512", Self::PkixEd25519 => "PKIX_ED25519", Self::PkixEd25519Ph => "PKIX_ED25519_PH", Self::PkixEcdsaP384Sha256 => "PKIX_ECDSA_P384_SHA_256", Self::PkixEcdsaP521Sha256 => "PKIX_ECDSA_P521_SHA_256", Self::LmsSha256 => "LMS_SHA256", Self::LmotsSha256 => "LMOTS_SHA256", } } /// Creates an enum from field names used in the ProtoBuf definition. pub fn from_str_name(value: &str) -> ::core::option::Option { match value { "PUBLIC_KEY_DETAILS_UNSPECIFIED" => Some(Self::Unspecified), "PKCS1_RSA_PKCS1V5" => Some(Self::Pkcs1RsaPkcs1v5), "PKCS1_RSA_PSS" => Some(Self::Pkcs1RsaPss), "PKIX_RSA_PKCS1V5" => Some(Self::PkixRsaPkcs1v5), "PKIX_RSA_PSS" => Some(Self::PkixRsaPss), "PKIX_RSA_PKCS1V15_2048_SHA256" => Some(Self::PkixRsaPkcs1v152048Sha256), "PKIX_RSA_PKCS1V15_3072_SHA256" => Some(Self::PkixRsaPkcs1v153072Sha256), "PKIX_RSA_PKCS1V15_4096_SHA256" => Some(Self::PkixRsaPkcs1v154096Sha256), "PKIX_RSA_PSS_2048_SHA256" => Some(Self::PkixRsaPss2048Sha256), "PKIX_RSA_PSS_3072_SHA256" => Some(Self::PkixRsaPss3072Sha256), "PKIX_RSA_PSS_4096_SHA256" => Some(Self::PkixRsaPss4096Sha256), "PKIX_ECDSA_P256_HMAC_SHA_256" => Some(Self::PkixEcdsaP256HmacSha256), "PKIX_ECDSA_P256_SHA_256" => Some(Self::PkixEcdsaP256Sha256), "PKIX_ECDSA_P384_SHA_384" => Some(Self::PkixEcdsaP384Sha384), "PKIX_ECDSA_P521_SHA_512" => Some(Self::PkixEcdsaP521Sha512), "PKIX_ED25519" => Some(Self::PkixEd25519), "PKIX_ED25519_PH" => Some(Self::PkixEd25519Ph), "PKIX_ECDSA_P384_SHA_256" => Some(Self::PkixEcdsaP384Sha256), "PKIX_ECDSA_P521_SHA_256" => Some(Self::PkixEcdsaP521Sha256), "LMS_SHA256" => Some(Self::LmsSha256), "LMOTS_SHA256" => Some(Self::LmotsSha256), _ => None, } } } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum SubjectAlternativeNameType { Unspecified = 0, Email = 1, Uri = 2, /// OID 1.3.6.1.4.1.57264.1.7 /// See /// for more details. OtherName = 3, } impl SubjectAlternativeNameType { /// String value of the enum field names used in the ProtoBuf definition. /// /// The values are not transformed in any way and thus are considered stable /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { Self::Unspecified => "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED", Self::Email => "EMAIL", Self::Uri => "URI", Self::OtherName => "OTHER_NAME", } } /// Creates an enum from field names used in the ProtoBuf definition. pub fn from_str_name(value: &str) -> ::core::option::Option { match value { "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED" => Some(Self::Unspecified), "EMAIL" => Some(Self::Email), "URI" => Some(Self::Uri), "OTHER_NAME" => Some(Self::OtherName), _ => None, } } } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/dev.sigstore.events.v1.rs000066400000000000000000000075021477352757300330330ustar00rootroot00000000000000// This file is @generated by prost-build. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.events.v1.CloudEvent")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CloudEvent { /// Required Attributes #[prost(string, tag = "1")] pub id: ::prost::alloc::string::String, /// URI-reference #[prost(string, tag = "2")] pub source: ::prost::alloc::string::String, #[prost(string, tag = "3")] pub spec_version: ::prost::alloc::string::String, #[prost(string, tag = "4")] pub r#type: ::prost::alloc::string::String, /// Optional & Extension Attributes #[prost(map = "string, message", tag = "5")] pub attributes: ::std::collections::HashMap< ::prost::alloc::string::String, cloud_event::CloudEventAttributeValue, >, /// -- CloudEvent Data (Bytes, Text, or Proto) #[prost(oneof = "cloud_event::Data", tags = "6, 7, 8")] pub data: ::core::option::Option, } /// Nested message and enum types in `CloudEvent`. pub mod cloud_event { #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValue" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CloudEventAttributeValue { #[prost( oneof = "cloud_event_attribute_value::Attr", tags = "1, 2, 3, 4, 5, 6, 7" )] pub attr: ::core::option::Option, } /// Nested message and enum types in `CloudEventAttributeValue`. pub mod cloud_event_attribute_value { #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Attr { #[prost(bool, tag = "1")] CeBoolean(bool), #[prost(int32, tag = "2")] CeInteger(i32), #[prost(string, tag = "3")] CeString(::prost::alloc::string::String), #[prost(bytes, tag = "4")] CeBytes(::prost::alloc::vec::Vec), #[prost(string, tag = "5")] CeUri(::prost::alloc::string::String), #[prost(string, tag = "6")] CeUriRef(::prost::alloc::string::String), #[prost(message, tag = "7")] CeTimestamp(::prost_types::Timestamp), } } /// -- CloudEvent Data (Bytes, Text, or Proto) #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Data { #[prost(bytes, tag = "6")] BinaryData(::prost::alloc::vec::Vec), #[prost(string, tag = "7")] TextData(::prost::alloc::string::String), #[prost(message, tag = "8")] ProtoData(::prost_types::Any), } } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.events.v1.CloudEventBatch")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CloudEventBatch { #[prost(message, repeated, tag = "1")] pub events: ::prost::alloc::vec::Vec, } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/dev.sigstore.rekor.v1.rs000066400000000000000000000207431477352757300326530ustar00rootroot00000000000000// This file is @generated by prost-build. /// KindVersion contains the entry's kind and api version. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.rekor.v1.KindVersion")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct KindVersion { /// Kind is the type of entry being stored in the log. /// See here for a list: #[prost(string, tag = "1")] pub kind: ::prost::alloc::string::String, /// The specific api version of the type. #[prost(string, tag = "2")] pub version: ::prost::alloc::string::String, } /// The checkpoint MUST contain an origin string as a unique log identifier, /// the tree size, and the root hash. It MAY also be followed by optional data, /// and clients MUST NOT assume optional data. The checkpoint MUST also contain /// a signature over the root hash (tree head). The checkpoint MAY contain additional /// signatures, but the first SHOULD be the signature from the log. Checkpoint contents /// are concatenated with newlines into a single string. /// The checkpoint format is described in /// /// and /// An example implementation can be found in #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.rekor.v1.Checkpoint")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Checkpoint { #[prost(string, tag = "1")] pub envelope: ::prost::alloc::string::String, } /// InclusionProof is the proof returned from the transparency log. Can /// be used for offline or online verification against the log. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.rekor.v1.InclusionProof")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct InclusionProof { /// The index of the entry in the tree it was written to. #[prost(int64, tag = "1")] pub log_index: i64, /// The hash digest stored at the root of the merkle tree at the time /// the proof was generated. #[prost(bytes = "vec", tag = "2")] pub root_hash: ::prost::alloc::vec::Vec, /// The size of the merkle tree at the time the proof was generated. #[prost(int64, tag = "3")] pub tree_size: i64, /// A list of hashes required to compute the inclusion proof, sorted /// in order from leaf to root. /// Note that leaf and root hashes are not included. /// The root hash is available separately in this message, and the /// leaf hash should be calculated by the client. #[prost(bytes = "vec", repeated, tag = "4")] pub hashes: ::prost::alloc::vec::Vec<::prost::alloc::vec::Vec>, /// Signature of the tree head, as of the time of this proof was /// generated. See above info on 'Checkpoint' for more details. #[prost(message, optional, tag = "5")] pub checkpoint: ::core::option::Option, } /// The inclusion promise is calculated by Rekor. It's calculated as a /// signature over a canonical JSON serialization of the persisted entry, the /// log ID, log index and the integration timestamp. /// See /// The format of the signature depends on the transparency log's public key. /// If the signature algorithm requires a hash function and/or a signature /// scheme (e.g. RSA) those has to be retrieved out-of-band from the log's /// operators, together with the public key. /// This is used to verify the integration timestamp's value and that the log /// has promised to include the entry. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.rekor.v1.InclusionPromise")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct InclusionPromise { #[prost(bytes = "vec", tag = "1")] pub signed_entry_timestamp: ::prost::alloc::vec::Vec, } /// TransparencyLogEntry captures all the details required from Rekor to /// reconstruct an entry, given that the payload is provided via other means. /// This type can easily be created from the existing response from Rekor. /// Future iterations could rely on Rekor returning the minimal set of /// attributes (excluding the payload) that are required for verifying the /// inclusion promise. The inclusion promise (called SignedEntryTimestamp in /// the response from Rekor) is similar to a Signed Certificate Timestamp /// as described here #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.rekor.v1.TransparencyLogEntry")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransparencyLogEntry { /// The global index of the entry, used when querying the log by index. #[prost(int64, tag = "1")] pub log_index: i64, /// The unique identifier of the log. #[prost(message, optional, tag = "2")] pub log_id: ::core::option::Option, /// The kind (type) and version of the object associated with this /// entry. These values are required to construct the entry during /// verification. #[prost(message, optional, tag = "3")] pub kind_version: ::core::option::Option, /// The UNIX timestamp from the log when the entry was persisted. /// The integration time MUST NOT be trusted if inclusion_promise /// is omitted. #[prost(int64, tag = "4")] pub integrated_time: i64, /// The inclusion promise/signed entry timestamp from the log. /// Required for v0.1 bundles, and MUST be verified. /// Optional for >= v0.2 bundles if another suitable source of /// time is present (such as another source of signed time, /// or the current system time for long-lived certificates). /// MUST be verified if no other suitable source of time is present, /// and SHOULD be verified otherwise. #[prost(message, optional, tag = "5")] pub inclusion_promise: ::core::option::Option, /// The inclusion proof can be used for offline or online verification /// that the entry was appended to the log, and that the log has not been /// altered. #[prost(message, optional, tag = "6")] pub inclusion_proof: ::core::option::Option, /// Optional. The canonicalized transparency log entry, used to /// reconstruct the Signed Entry Timestamp (SET) during verification. /// The contents of this field are the same as the `body` field in /// a Rekor response, meaning that it does **not** include the "full" /// canonicalized form (of log index, ID, etc.) which are /// exposed as separate fields. The verifier is responsible for /// combining the `canonicalized_body`, `log_index`, `log_id`, /// and `integrated_time` into the payload that the SET's signature /// is generated over. /// This field is intended to be used in cases where the SET cannot be /// produced determinisitically (e.g. inconsistent JSON field ordering, /// differing whitespace, etc). /// /// If set, clients MUST verify that the signature referenced in the /// `canonicalized_body` matches the signature provided in the /// `Bundle.content`. /// If not set, clients are responsible for constructing an equivalent /// payload from other sources to verify the signature. #[prost(bytes = "vec", tag = "7")] pub canonicalized_body: ::prost::alloc::vec::Vec, } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/dev.sigstore.trustroot.v1.rs000066400000000000000000000440511477352757300336140ustar00rootroot00000000000000// This file is @generated by prost-build. /// TransparencyLogInstance describes the immutable parameters from a /// transparency log. /// See /// for more details. /// The included parameters are the minimal set required to identify a log, /// and verify an inclusion proof/promise. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.TransparencyLogInstance")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TransparencyLogInstance { /// The base URL at which can be used to URLs for the client. #[prost(string, tag = "1")] pub base_url: ::prost::alloc::string::String, /// The hash algorithm used for the Merkle Tree. #[prost(enumeration = "super::super::common::v1::HashAlgorithm", tag = "2")] pub hash_algorithm: i32, /// The public key used to verify signatures generated by the log. /// This attribute contains the signature algorithm used by the log. #[prost(message, optional, tag = "3")] pub public_key: ::core::option::Option, /// The unique identifier for this transparency log. /// Represented as the SHA-256 hash of the log's public key, /// calculated over the DER encoding of the key represented as /// SubjectPublicKeyInfo. /// See #[prost(message, optional, tag = "4")] pub log_id: ::core::option::Option, /// The checkpoint key identifier for the log used in a checkpoint. /// Optional, not provided for logs that do not generate checkpoints. /// For logs that do generate checkpoints, if not set, assume /// log_id equals checkpoint_key_id. /// Follows the specification described here /// for ECDSA and Ed25519 signatures: /// /// For RSA signatures, the key ID will match the ECDSA format, the /// hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT /// use RSA-signed checkpoints, since witnesses do not support /// RSA signatures. /// This is provided for convenience. Clients can also calculate the /// checkpoint key ID given the log's public key. /// SHOULD be set for logs generating Ed25519 signatures. /// SHOULD be 4 bytes long, as a truncated hash. #[prost(message, optional, tag = "5")] pub checkpoint_key_id: ::core::option::Option, } /// CertificateAuthority enlists the information required to identify which /// CA to use and perform signature verification. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.CertificateAuthority")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CertificateAuthority { /// The root certificate MUST be self-signed, and so the subject and /// issuer are the same. #[prost(message, optional, tag = "1")] pub subject: ::core::option::Option, /// The URI identifies the certificate authority. /// /// It is RECOMMENDED that the URI is the base URL for the certificate /// authority, that can be provided to any SDK/client provided /// by the certificate authority to interact with the certificate /// authority. #[prost(string, tag = "2")] pub uri: ::prost::alloc::string::String, /// The certificate chain for this CA. The last certificate in the chain /// MUST be the trust anchor. The trust anchor MAY be a self-signed root /// CA certificate or MAY be an intermediate CA certificate. #[prost(message, optional, tag = "3")] pub cert_chain: ::core::option::Option< super::super::common::v1::X509CertificateChain, >, /// The time the *entire* chain was valid. This is at max the /// longest interval when *all* certificates in the chain were valid, /// but it MAY be shorter. Clients MUST check timestamps against *both* /// the `valid_for` time range *and* the entire certificate chain. /// /// The TimeRange should be considered valid *inclusive* of the /// endpoints. #[prost(message, optional, tag = "4")] pub valid_for: ::core::option::Option, } /// TrustedRoot describes the client's complete set of trusted entities. /// How the TrustedRoot is populated is not specified, but can be a /// combination of many sources such as TUF repositories, files on disk etc. /// /// The TrustedRoot is not meant to be used for any artifact verification, only /// to capture the complete/global set of trusted verification materials. /// When verifying an artifact, based on the artifact and policies, a selection /// of keys/authorities are expected to be extracted and provided to the /// verification function. This way the set of keys/authorities can be kept to /// a minimal set by the policy to gain better control over what signatures /// that are allowed. /// /// The embedded transparency logs, CT logs, CAs and TSAs MUST include any /// previously used instance -- otherwise signatures made in the past cannot /// be verified. /// /// All the listed instances SHOULD be sorted by the 'valid_for' in ascending /// order, that is, the oldest instance first. Only the last instance is /// allowed to have their 'end' timestamp unset. All previous instances MUST /// have a closed interval of validity. The last instance MAY have a closed /// interval. Clients MUST accept instances that overlaps in time, if not /// clients may experience problems during rotations of verification /// materials. /// /// To be able to manage planned rotations of either transparency logs or /// certificate authorities, clienst MUST accept lists of instances where /// the last instance have a 'valid_for' that belongs to the future. /// This should not be a problem as clients SHOULD first seek the trust root /// for a suitable instance before creating a per artifact trust root (that /// is, a sub-set of the complete trust root) that is used for verification. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.TrustedRoot")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct TrustedRoot { /// MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json /// when encoded as JSON. /// Clients MUST be able to process and parse content with the media /// type defined in the old format: /// application/vnd.dev.sigstore.trustedroot+json;version=0.1 #[prost(string, tag = "1")] pub media_type: ::prost::alloc::string::String, /// A set of trusted Rekor servers. #[prost(message, repeated, tag = "2")] pub tlogs: ::prost::alloc::vec::Vec, /// A set of trusted certificate authorities (e.g Fulcio), and any /// intermediate certificates they provide. /// If a CA is issuing multiple intermediate certificate, each /// combination shall be represented as separate chain. I.e, a single /// root cert may appear in multiple chains but with different /// intermediate and/or leaf certificates. /// The certificates are intended to be used for verifying artifact /// signatures. #[prost(message, repeated, tag = "3")] pub certificate_authorities: ::prost::alloc::vec::Vec, /// A set of trusted certificate transparency logs. #[prost(message, repeated, tag = "4")] pub ctlogs: ::prost::alloc::vec::Vec, /// A set of trusted timestamping authorities. #[prost(message, repeated, tag = "5")] pub timestamp_authorities: ::prost::alloc::vec::Vec, } /// SigningConfig represents the trusted entities/state needed by Sigstore /// signing. In particular, it primarily contains service URLs that a Sigstore /// signer may need to connect to for the online aspects of signing. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.SigningConfig")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct SigningConfig { /// MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json /// Clients MAY choose to also support /// application/vnd.dev.sigstore.signingconfig.v0.1+json #[prost(string, tag = "5")] pub media_type: ::prost::alloc::string::String, /// URLs to Fulcio-compatible CAs, capable of receiving /// Certificate Signing Requests (CSRs) and responding with /// issued certificates. /// /// These URLs MUST be the "base" URL for the CAs, which clients /// should construct an appropriate CSR endpoint on top of. /// For example, if a CA URL is ` then /// the client MAY construct the CSR endpoint as /// ` /// /// Clients MUST select only one Service with the highest API version /// that the client is compatible with, that is within its /// validity period, and has the newest validity start date. /// Client SHOULD select the first Service that meets this requirement. /// All listed Services SHOULD be sorted by the `valid_for` window in /// descending order, with the newest instance first. #[prost(message, repeated, tag = "6")] pub ca_urls: ::prost::alloc::vec::Vec, /// URLs to OpenID Connect identity providers. /// /// These URLs MUST be the "base" URLs for the OIDC IdPs, which clients /// should perform well-known OpenID Connect discovery against. /// /// Clients MUST select only one Service with the highest API version /// that the client is compatible with, that is within its /// validity period, and has the newest validity start date. /// Client SHOULD select the first Service that meets this requirement. /// All listed Services SHOULD be sorted by the `valid_for` window in /// descending order, with the newest instance first. #[prost(message, repeated, tag = "7")] pub oidc_urls: ::prost::alloc::vec::Vec, /// URLs to Rekor transparency logs. /// /// These URL MUST be the "base" URLs for the transparency logs, /// which clients should construct appropriate API endpoints on top of. /// /// Clients MUST select Services with the highest API version /// that the client is compatible with, that are within its /// validity period, and have the newest validity start dates. /// All listed Services SHOULD be sorted by the `valid_for` window in /// descending order, with the newest instance first. /// /// Clients MUST select Services based on the selector value of /// `rekor_tlog_config`. #[prost(message, repeated, tag = "8")] pub rekor_tlog_urls: ::prost::alloc::vec::Vec, /// Specifies how a client should select the set of Rekor transparency /// logs to write to. #[prost(message, optional, tag = "9")] pub rekor_tlog_config: ::core::option::Option, /// URLs to RFC 3161 Time Stamping Authorities (TSA). /// /// These URLs MUST be the *full* URL for the TSA, meaning that it /// should be suitable for submitting Time Stamp Requests (TSRs) to /// via HTTP, per RFC 3161. /// /// Clients MUST select Services with the highest API version /// that the client is compatible with, that are within its /// validity period, and have the newest validity start dates. /// All listed Services SHOULD be sorted by the `valid_for` window in /// descending order, with the newest instance first. /// /// Clients MUST select Services based on the selector value of /// `tsa_config`. #[prost(message, repeated, tag = "10")] pub tsa_urls: ::prost::alloc::vec::Vec, /// Specifies how a client should select the set of TSAs to request /// signed timestamps from. #[prost(message, optional, tag = "11")] pub tsa_config: ::core::option::Option, } /// Service represents an instance of a service that is a part of Sigstore infrastructure. /// Clients MUST use the API version hint to determine the service with the /// highest API version that the client is compatible with. Clients MUST also /// only connect to services within the specified validity period and that has the /// newest validity start date. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.Service")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Service { /// URL of the service. MUST include scheme and authority. MAY include path. #[prost(string, tag = "1")] pub url: ::prost::alloc::string::String, /// Specifies the major API version. A value of 0 represents a service that /// has not yet been released. #[prost(uint32, tag = "2")] pub major_api_version: u32, /// Validity period of a service. A service that has only a start date /// SHOULD be considered the most recent instance of that service, but /// the client MUST NOT assume there is only one valid instance. /// The TimeRange MUST be considered valid *inclusive* of the /// endpoints. #[prost(message, optional, tag = "3")] pub valid_for: ::core::option::Option, } /// ServiceConfiguration specifies how a client should select a set of /// Services to connect to, along with a count when a specific number /// of Services is requested. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.ServiceConfiguration")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct ServiceConfiguration { /// How a client should select a set of Services to connect to. #[prost(enumeration = "ServiceSelector", tag = "1")] pub selector: i32, /// count specifies the number of Services the client should use. /// Only used when selector is set to EXACT, and count MUST be greater /// than 0. count MUST be less than or equal to the number of Services. #[prost(uint32, tag = "2")] pub count: u32, } /// ClientTrustConfig describes the complete state needed by a client /// to perform both signing and verification operations against a particular /// instance of Sigstore. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.trustroot.v1.ClientTrustConfig")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ClientTrustConfig { /// MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json #[prost(string, tag = "1")] pub media_type: ::prost::alloc::string::String, /// The root of trust, which MUST be present. #[prost(message, optional, tag = "2")] pub trusted_root: ::core::option::Option, /// Configuration for signing clients, which MUST be present. #[prost(message, optional, tag = "3")] pub signing_config: ::core::option::Option, } /// ServiceSelector specifies how a client SHOULD select a set of /// Services to connect to. A client SHOULD throw an error if /// the value is SERVICE_SELECTOR_UNDEFINED. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum ServiceSelector { Undefined = 0, /// Clients SHOULD select all Services based on supported API version /// and validity window. All = 1, /// Clients SHOULD select one Service based on supported API version /// and validity window. It is up to the client implementation to /// decide how to select the Service, e.g. random or round-robin. Any = 2, /// Clients SHOULD select a specific number of Services based on /// supported API version and validity window, using the provided /// `count`. It is up to the client implementation to decide how to /// select the Service, e.g. random or round-robin. Exact = 3, } impl ServiceSelector { /// String value of the enum field names used in the ProtoBuf definition. /// /// The values are not transformed in any way and thus are considered stable /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { Self::Undefined => "SERVICE_SELECTOR_UNDEFINED", Self::All => "ALL", Self::Any => "ANY", Self::Exact => "EXACT", } } /// Creates an enum from field names used in the ProtoBuf definition. pub fn from_str_name(value: &str) -> ::core::option::Option { match value { "SERVICE_SELECTOR_UNDEFINED" => Some(Self::Undefined), "ALL" => Some(Self::All), "ANY" => Some(Self::Any), "EXACT" => Some(Self::Exact), _ => None, } } } dev.sigstore.verification.v1.rs000066400000000000000000000316121477352757300341310ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated// This file is @generated by prost-build. /// The identity of a X.509 Certificate signer. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.verification.v1.CertificateIdentity")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CertificateIdentity { /// The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) #[prost(string, tag = "1")] pub issuer: ::prost::alloc::string::String, #[prost(message, optional, tag = "2")] pub san: ::core::option::Option, /// An unordered list of OIDs that must be verified. /// All OID/values provided in this list MUST exactly match against /// the values in the certificate for verification to be successful. #[prost(message, repeated, tag = "3")] pub oids: ::prost::alloc::vec::Vec< super::super::common::v1::ObjectIdentifierValuePair, >, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.verification.v1.CertificateIdentities")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct CertificateIdentities { #[prost(message, repeated, tag = "1")] pub identities: ::prost::alloc::vec::Vec, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.verification.v1.PublicKeyIdentities")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct PublicKeyIdentities { #[prost(message, repeated, tag = "1")] pub public_keys: ::prost::alloc::vec::Vec, } /// A light-weight set of options/policies for identifying trusted signers, /// used during verification of a single artifact. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.verification.v1.ArtifactVerificationOptions" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct ArtifactVerificationOptions { /// Optional options for artifact transparency log verification. /// If none is provided, the default verification options are: /// Threshold: 1 /// Online verification: false /// Disable: false #[prost(message, optional, tag = "3")] pub tlog_options: ::core::option::Option, /// Optional options for certificate transparency log verification. /// If none is provided, the default verification options are: /// Threshold: 1 /// Disable: false #[prost(message, optional, tag = "4")] pub ctlog_options: ::core::option::Option< artifact_verification_options::CtlogOptions, >, /// Optional options for certificate signed timestamp verification. /// If none is provided, the default verification options are: /// Threshold: 0 /// Disable: true #[prost(message, optional, tag = "5")] pub tsa_options: ::core::option::Option< artifact_verification_options::TimestampAuthorityOptions, >, /// Optional options for integrated timestamp verification. /// If none is provided, the default verification options are: /// Threshold: 0 /// Disable: true #[prost(message, optional, tag = "6")] pub integrated_ts_options: ::core::option::Option< artifact_verification_options::TlogIntegratedTimestampOptions, >, /// Optional options for observed timestamp verification. /// If none is provided, the default verification options are: /// Threshold 1 /// Disable: false #[prost(message, optional, tag = "7")] pub observer_options: ::core::option::Option< artifact_verification_options::ObserverTimestampOptions, >, /// At least one identity MUST be provided. Providing zero identities /// is an error. If at least one provided identity is found as a /// signer, the verification is considered successful. #[prost(oneof = "artifact_verification_options::Signers", tags = "1, 2")] pub signers: ::core::option::Option, } /// Nested message and enum types in `ArtifactVerificationOptions`. pub mod artifact_verification_options { #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptions" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct TlogOptions { /// Number of transparency logs the entry must appear on. #[prost(int32, tag = "1")] pub threshold: i32, /// Perform an online inclusion proof. #[prost(bool, tag = "2")] pub perform_online_verification: bool, /// Disable verification for transparency logs. #[prost(bool, tag = "3")] pub disable: bool, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptions" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct CtlogOptions { /// The number of ct transparency logs the certificate must /// appear on. #[prost(int32, tag = "1")] pub threshold: i32, /// Disable ct transparency log verification #[prost(bool, tag = "3")] pub disable: bool, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptions" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct TimestampAuthorityOptions { /// The number of signed timestamps that are expected. #[prost(int32, tag = "1")] pub threshold: i32, /// Disable signed timestamp verification. #[prost(bool, tag = "2")] pub disable: bool, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptions" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct TlogIntegratedTimestampOptions { /// The number of integrated timestamps that are expected. #[prost(int32, tag = "1")] pub threshold: i32, /// Disable integrated timestamp verification. #[prost(bool, tag = "2")] pub disable: bool, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect( message_name = "dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptions" )] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, Copy, PartialEq, ::prost::Message)] pub struct ObserverTimestampOptions { /// The number of external observers of the timestamp. /// This is a union of RFC3161 signed timestamps, and /// integrated timestamps from a transparency log, that /// could include additional timestamp sources in the /// future. #[prost(int32, tag = "1")] pub threshold: i32, /// Disable observer timestamp verification. #[prost(bool, tag = "2")] pub disable: bool, } /// At least one identity MUST be provided. Providing zero identities /// is an error. If at least one provided identity is found as a /// signer, the verification is considered successful. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Signers { #[prost(message, tag = "1")] CertificateIdentities(super::CertificateIdentities), /// To simplify verification implementation, the logic for /// bundle verification should be implemented as a /// higher-order function, where one of argument should be an /// interface over the set of trusted public keys, like this: /// `Verify(bytes artifact, bytes signature, string key_id)`. /// This way the caller is in full control of mapping the /// identified (or hinted) key in the bundle to one of the /// trusted keys, as this process is inherently application /// specific. #[prost(message, tag = "2")] PublicKeys(super::PublicKeyIdentities), } } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.verification.v1.Artifact")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Artifact { #[prost(oneof = "artifact::Data", tags = "1, 2, 3")] pub data: ::core::option::Option, } /// Nested message and enum types in `Artifact`. pub mod artifact { #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, PartialEq, ::prost::Oneof)] pub enum Data { /// Location of the artifact #[prost(string, tag = "1")] ArtifactUri(::prost::alloc::string::String), /// The raw bytes of the artifact #[prost(bytes, tag = "2")] Artifact(::prost::alloc::vec::Vec), /// Digest of the artifact. SHOULD NOT be used when verifying an /// in-toto attestation as the subject digest cannot be /// reconstructed. This option will not work with Ed25519 /// signatures, use Ed25519Ph or another algorithm instead. #[prost(message, tag = "3")] ArtifactDigest(super::super::super::common::v1::HashOutput), } } /// Input captures all that is needed to call the bundle verification method, /// to verify a single artifact referenced by the bundle. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "dev.sigstore.verification.v1.Input")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Input { /// The verification materials provided during a bundle verification. /// The running process is usually preloaded with a "global" /// dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to /// verifying an artifact (i.e a bundle), and/or based on current /// policy, some selection is expected to happen, to filter out the /// exact certificate authority to use, which transparency logs are /// relevant etc. The result should b ecaptured in the /// `artifact_trust_root`. #[prost(message, optional, tag = "1")] pub artifact_trust_root: ::core::option::Option< super::super::trustroot::v1::TrustedRoot, >, #[prost(message, optional, tag = "2")] pub artifact_verification_options: ::core::option::Option< ArtifactVerificationOptions, >, #[prost(message, optional, tag = "3")] pub bundle: ::core::option::Option, /// If the bundle contains a message signature, the artifact must be /// provided. #[prost(message, optional, tag = "4")] pub artifact: ::core::option::Option, } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/file_descriptor_set.bin000066400000000000000000004341301477352757300327440ustar00rootroot00000000000000 µ envelope.proto io.intoto"| Envelope payload ( Rpayload payloadType ( R payloadType4 signatures ( 2.io.intoto.SignatureR signatures"3 Signature sig ( Rsig keyid ( RkeyidBDZ1github.com/sigstore/protobuf-specs/gen/pb-go/dsseêSigstore::DSSEJ– 3 •  2s https://raw.githubusercontent.com/secure-systems-lab/dsse/9c813476bd36de70a5738c72e784f123ecea16af/envelope.proto 2• Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.    H  H  ' -' 9 )- An authenticated message of arbitrary type.  U H Message to be signed. (In JSON, this is encoded as base64.) REQUIRED.    T G String unambiguously identifying how to interpret payload. REQUIRED.     § ($™ Signature over: PAE(type, payload) Where PAE is defined as: PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload + = concatenation SP = ASCII space [0x20] "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros REQUIRED (length >= 1). ( (  ( ("# +3 + Q .D Signature itself. (In JSON, this is encoded as base64.) REQUIRED. . . . W 2J *Unauthenticated* hint identifying which public key was used. OPTIONAL. 2 2  2bproto3 Ä. google/protobuf/any.protogoogle.protobuf"6 Any type_url ( RtypeUrl value ( RvalueBv com.google.protobufBAnyProtoPZ,google.golang.org/protobuf/types/known/anypb¢GPBªGoogle.Protobuf.WellKnownTypesJÝ, ¡ Ì  2Á Protocol Buffers - Google's data interchange format Copyright 2008 Google Inc. All rights reserved. https://developers.google.com/protocol-buffers/ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.     "C  "C  #, #,  $) $)  %"  %"  &! $&!  '; %'; ü ¡î `Any` contains an arbitrary serialized protocol buffer message along with a URL that describes the type of the serialized message. Protobuf library provides support to pack/unpack Any values in the form of utility functions or additional generated methods of the Any type. Example 1: Pack and unpack a message in C++. Foo foo = ...; Any any; any.PackFrom(foo); ... if (any.UnpackTo(&foo)) { ... } Example 2: Pack and unpack a message in Java. Foo foo = ...; Any any = Any.pack(foo); ... if (any.is(Foo.class)) { foo = any.unpack(Foo.class); } // or ... if (any.isSameTypeAs(Foo.getDefaultInstance())) { foo = any.unpack(Foo.getDefaultInstance()); } Example 3: Pack and unpack a message in Python. foo = Foo(...) any = Any() any.Pack(foo) ... if any.Is(Foo.DESCRIPTOR): any.Unpack(foo) ... Example 4: Pack and unpack a message in Go foo := &pb.Foo{...} any, err := anypb.New(foo) if err != nil { ... } ... foo := &pb.Foo{} if err := any.UnmarshalTo(foo); err != nil { ... } The pack methods provided by protobuf library will by default use 'type.googleapis.com/full.type.name' as the type URL and the unpack methods only use the fully qualified type name after the last '/' in the type URL, for example "foo.bar.com/x/y.z" will yield type name "y.z". JSON ==== The JSON representation of an `Any` value uses the regular representation of the deserialized, embedded message, with an additional field `@type` which contains the type URL. Example: package google.profile; message Person { string first_name = 1; string last_name = 2; } { "@type": "type.googleapis.com/google.profile.Person", "firstName": , "lastName": } If the embedded message type is well-known and has a custom JSON representation, that representation will be embedded adding a field `value` which holds the custom JSON in addition to the `@type` field. Example (for message [google.protobuf.Duration][]): { "@type": "type.googleapis.com/google.protobuf.Duration", "value": "1.212s" }  ¼ ­ A URL/resource name that uniquely identifies the type of the serialized protocol buffer message. This string must contain at least one "/" character. The last segment of the URL's path must represent the fully qualified name of the type (as in `path/google.protobuf.Duration`). The name should be in a canonical form (e.g., leading "." is not accepted). In practice, teams usually precompile into the binary all types that they expect it to use in the context of Any. However, for URLs which use the scheme `http`, `https`, or no scheme, one can optionally set up a type server that maps type URLs to message definitions as follows: * If no scheme is provided, `https` is assumed. * An HTTP GET on the URL must yield a [google.protobuf.Type][] value in binary format, or produce an error. * Applications are allowed to cache lookup results based on the URL, or have them precompiled into a binary to avoid any lookup. Therefore, binary compatibility needs to be preserved on changes to types. (Use versioned type names to manage breaking changes.) Note: this functionality is not currently available in the official protobuf release, and it is not used for type URLs beginning with type.googleapis.com. As of May 2023, there are no widely used type server implementations and no plans to implement one. Schemes other than `http`, `https` (or the empty scheme) might be used with implementation specific semantics.     W  I Must be a valid serialized protocol buffer of the above specified type.      bproto3 Ã1 google/protobuf/timestamp.protogoogle.protobuf"; Timestamp seconds (Rseconds nanos (RnanosB… com.google.protobufBTimestampProtoPZ2google.golang.org/protobuf/types/known/timestamppbø¢GPBªGoogle.Protobuf.WellKnownTypesJÁ/  Ì  2Á Protocol Buffers - Google's data interchange format Copyright 2008 Google Inc. All rights reserved. https://developers.google.com/protocol-buffers/ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.     " "  #I  #I  $, $,  %/ %/  &"  &"  '! $'!  (; %(; Ú „Ë A Timestamp represents a point in time independent of any time zone or local calendar, encoded as a count of seconds and fractions of seconds at nanosecond resolution. The count is relative to an epoch at UTC midnight on January 1, 1970, in the proleptic Gregorian calendar which extends the Gregorian calendar backwards to year one. All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap second table is needed for interpretation, using a [24-hour linear smear](https://developers.google.com/time/smear). The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By restricting to that range, we ensure that we can convert to and from [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. # Examples Example 1: Compute Timestamp from POSIX `time()`. Timestamp timestamp; timestamp.set_seconds(time(NULL)); timestamp.set_nanos(0); Example 2: Compute Timestamp from POSIX `gettimeofday()`. struct timeval tv; gettimeofday(&tv, NULL); Timestamp timestamp; timestamp.set_seconds(tv.tv_sec); timestamp.set_nanos(tv.tv_usec * 1000); Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. FILETIME ft; GetSystemTimeAsFileTime(&ft); UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. Timestamp timestamp; timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. long millis = System.currentTimeMillis(); Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) .setNanos((int) ((millis % 1000) * 1000000)).build(); Example 5: Compute Timestamp from Java `Instant.now()`. Instant now = Instant.now(); Timestamp timestamp = Timestamp.newBuilder().setSeconds(now.getEpochSecond()) .setNanos(now.getNano()).build(); Example 6: Compute Timestamp from current time in Python. timestamp = Timestamp() timestamp.GetCurrentTime() # JSON Mapping In JSON format, the Timestamp type is encoded as a string in the [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" where {year} is always expressed using four digits while {month}, {day}, {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone is required. A proto3 JSON serializer should always use UTC (as indicated by "Z") when printing the Timestamp type and a proto3 JSON parser should be able to accept both UTC and other timezones (as indicated by an offset). For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past 01:30 UTC on January 15, 2017. In JavaScript, one can convert a Date object to this format using the standard [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) method. In Python, a standard `datetime.datetime` object can be converted to this format using [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use the Joda Time's [`ISODateTimeFormat.dateTime()`]( http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() ) to obtain a formatter capable of generating timestamps in this format. „  ˆŽ Represents seconds of UTC time since Unix epoch 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive. ˆ ˆ ˆ å ŽÖ Non-negative fractions of a second at nanosecond resolution. Negative second values with fractions must still have non-negative nanos values that count forward in time. Must be from 0 to 999,999,999 inclusive. Ž Ž Žbproto3 ´ events.protodev.sigstore.events.v1google/protobuf/any.protogoogle/protobuf/timestamp.proto"Ù CloudEvent id ( Rid source ( Rsource! spec_version ( R specVersion type ( RtypeR attributes ( 22.dev.sigstore.events.v1.CloudEvent.AttributesEntryR attributes! binary_data ( HR binaryData text_data ( HRtextData5 proto_data ( 2.google.protobuf.AnyHR protoDataz AttributesEntry key ( RkeyQ value ( 2;.dev.sigstore.events.v1.CloudEvent.CloudEventAttributeValueRvalue:8š CloudEventAttributeValue ce_boolean (HR ceBoolean ce_integer (HR ceInteger ce_string ( HRceString ce_bytes ( HRceBytes ce_uri ( HRceUri ce_uri_ref ( HRceUriRef? ce_timestamp ( 2.google.protobuf.TimestampHR ceTimestampB attrB data"M CloudEventBatch: events ( 2".dev.sigstore.events.v1.CloudEventReventsBk dev.sigstore.proto.events.v1PZ6github.com/sigstore/protobuf-specs/gen/pb-go/events/v1êSigstore::EventsJ¯ P ä  2W https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/formats/cloudevents.proto 2• Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 2è* CloudEvent Protobuf Format - Required context attributes are explicity represented. - Optional and Extension context attributes are carried in a map structure. - Data may be represented as binary, text, or protobuf messages.   $ *  N  N  6 6  #  #   * - * "G "  F ' Required Attributes 2" -- CloudEvent Context Attributes ' ' '  (" URI-reference ( (  ( ) ) )  ) * * *  * . -8! Optional & Extension Attributes -( -)3 -67 : 04, -- CloudEvent Data (Bytes, Text, or Proto) 0 1 1 1  1 2 2 2  2 3( 3 3# 3&' V ;F2H* The CloudEvent specification defines seven attribute value types... ; #  =E  =  >  >  >   > ?  ?  ?   ? @  @  @  @ A  A  A   A B  B  B  B C  C  C  C D2  D  D!-  D01 1 NP2%* CloudEvent Protobuf Batch Format N  O" O O  O O !bproto3 ì¢ google/protobuf/descriptor.protogoogle.protobuf"[ FileDescriptorSet8 file ( 2$.google.protobuf.FileDescriptorProtoRfile* €ìÊÿìÊÿ"˜ FileDescriptorProto name ( Rname package ( Rpackage dependency ( R dependency+ public_dependency (RpublicDependency' weak_dependency (RweakDependencyC message_type ( 2 .google.protobuf.DescriptorProtoR messageTypeA enum_type ( 2$.google.protobuf.EnumDescriptorProtoRenumTypeA service ( 2'.google.protobuf.ServiceDescriptorProtoRserviceC extension ( 2%.google.protobuf.FieldDescriptorProtoR extension6 options ( 2.google.protobuf.FileOptionsRoptionsI source_code_info ( 2.google.protobuf.SourceCodeInfoRsourceCodeInfo syntax ( Rsyntax2 edition (2.google.protobuf.EditionRedition"¹ DescriptorProto name ( Rname; field ( 2%.google.protobuf.FieldDescriptorProtoRfieldC extension ( 2%.google.protobuf.FieldDescriptorProtoR extensionA nested_type ( 2 .google.protobuf.DescriptorProtoR nestedTypeA enum_type ( 2$.google.protobuf.EnumDescriptorProtoRenumTypeX extension_range ( 2/.google.protobuf.DescriptorProto.ExtensionRangeRextensionRangeD oneof_decl ( 2%.google.protobuf.OneofDescriptorProtoR oneofDecl9 options ( 2.google.protobuf.MessageOptionsRoptionsU reserved_range ( 2..google.protobuf.DescriptorProto.ReservedRangeR reservedRange# reserved_name ( R reservedNamez ExtensionRange start (Rstart end (Rend@ options ( 2&.google.protobuf.ExtensionRangeOptionsRoptions7 ReservedRange start (Rstart end (Rend"Ì ExtensionRangeOptionsX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOptionY declaration ( 22.google.protobuf.ExtensionRangeOptions.DeclarationBˆR declaration7 features2 ( 2.google.protobuf.FeatureSetRfeaturesm verification (28.google.protobuf.ExtensionRangeOptions.VerificationState: UNVERIFIEDBˆR verification” Declaration number (Rnumber full_name ( RfullName type ( Rtype reserved (Rreserved repeated (RrepeatedJ"4 VerificationState DECLARATION UNVERIFIED* 耀€€"Á FieldDescriptorProto name ( Rname number (RnumberA label (2+.google.protobuf.FieldDescriptorProto.LabelRlabel> type (2*.google.protobuf.FieldDescriptorProto.TypeRtype type_name ( RtypeName extendee ( Rextendee# default_value ( R defaultValue oneof_index (R oneofIndex json_name ( RjsonName7 options ( 2.google.protobuf.FieldOptionsRoptions' proto3_optional (Rproto3Optional"¶ Type TYPE_DOUBLE TYPE_FLOAT TYPE_INT64 TYPE_UINT64 TYPE_INT32 TYPE_FIXED64 TYPE_FIXED32 TYPE_BOOL TYPE_STRING  TYPE_GROUP  TYPE_MESSAGE  TYPE_BYTES  TYPE_UINT32  TYPE_ENUM TYPE_SFIXED32 TYPE_SFIXED64 TYPE_SINT32 TYPE_SINT64"C Label LABEL_OPTIONAL LABEL_REPEATED LABEL_REQUIRED"c OneofDescriptorProto name ( Rname7 options ( 2.google.protobuf.OneofOptionsRoptions"ã EnumDescriptorProto name ( Rname? value ( 2).google.protobuf.EnumValueDescriptorProtoRvalue6 options ( 2.google.protobuf.EnumOptionsRoptions] reserved_range ( 26.google.protobuf.EnumDescriptorProto.EnumReservedRangeR reservedRange# reserved_name ( R reservedName; EnumReservedRange start (Rstart end (Rend"ƒ EnumValueDescriptorProto name ( Rname number (Rnumber; options ( 2!.google.protobuf.EnumValueOptionsRoptions"§ ServiceDescriptorProto name ( Rname> method ( 2&.google.protobuf.MethodDescriptorProtoRmethod9 options ( 2.google.protobuf.ServiceOptionsRoptions"‰ MethodDescriptorProto name ( Rname input_type ( R inputType output_type ( R outputType8 options ( 2.google.protobuf.MethodOptionsRoptions0 client_streaming (:falseRclientStreaming0 server_streaming (:falseRserverStreaming"­ FileOptions! java_package ( R javaPackage0 java_outer_classname ( RjavaOuterClassname5 java_multiple_files (:falseRjavaMultipleFilesD java_generate_equals_and_hash (BRjavaGenerateEqualsAndHash: java_string_check_utf8 (:falseRjavaStringCheckUtf8S optimize_for (2).google.protobuf.FileOptions.OptimizeMode:SPEEDR optimizeFor go_package ( R goPackage5 cc_generic_services (:falseRccGenericServices9 java_generic_services (:falseRjavaGenericServices5 py_generic_services (:falseRpyGenericServices% deprecated (:falseR deprecated. cc_enable_arenas (:trueRccEnableArenas* objc_class_prefix$ ( RobjcClassPrefix) csharp_namespace% ( RcsharpNamespace! swift_prefix' ( R swiftPrefix( php_class_prefix( ( RphpClassPrefix# php_namespace) ( R phpNamespace4 php_metadata_namespace, ( RphpMetadataNamespace! ruby_package- ( R rubyPackage7 features2 ( 2.google.protobuf.FeatureSetRfeaturesX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption": OptimizeMode SPEED CODE_SIZE LITE_RUNTIME* 耀€€J*+J&'Rphp_generic_services"ô MessageOptions< message_set_wire_format (:falseRmessageSetWireFormatL no_standard_descriptor_accessor (:falseRnoStandardDescriptorAccessor% deprecated (:falseR deprecated map_entry (RmapEntryV &deprecated_legacy_json_field_conflicts (BR"deprecatedLegacyJsonFieldConflicts7 features ( 2.google.protobuf.FeatureSetRfeaturesX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption* 耀€€JJJJ J  " FieldOptionsA ctype (2#.google.protobuf.FieldOptions.CType:STRINGRctype packed (RpackedG jstype (2$.google.protobuf.FieldOptions.JSType: JS_NORMALRjstype lazy (:falseRlazy. unverified_lazy (:falseRunverifiedLazy% deprecated (:falseR deprecated weak (:falseRweak( debug_redact (:falseR debugRedactK retention (2-.google.protobuf.FieldOptions.OptionRetentionR retentionH targets (2..google.protobuf.FieldOptions.OptionTargetTypeRtargetsW edition_defaults ( 2,.google.protobuf.FieldOptions.EditionDefaultReditionDefaults7 features ( 2.google.protobuf.FeatureSetRfeaturesU feature_support ( 2,.google.protobuf.FieldOptions.FeatureSupportRfeatureSupportX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOptionZ EditionDefault2 edition (2.google.protobuf.EditionRedition value ( Rvalue– FeatureSupportG edition_introduced (2.google.protobuf.EditionReditionIntroducedG edition_deprecated (2.google.protobuf.EditionReditionDeprecated/ deprecation_warning ( RdeprecationWarningA edition_removed (2.google.protobuf.EditionReditionRemoved"/ CType STRING CORD STRING_PIECE"5 JSType JS_NORMAL JS_STRING JS_NUMBER"U OptionRetention RETENTION_UNKNOWN RETENTION_RUNTIME RETENTION_SOURCE"Œ OptionTargetType TARGET_TYPE_UNKNOWN TARGET_TYPE_FILE TARGET_TYPE_EXTENSION_RANGE TARGET_TYPE_MESSAGE TARGET_TYPE_FIELD TARGET_TYPE_ONEOF TARGET_TYPE_ENUM TARGET_TYPE_ENUM_ENTRY TARGET_TYPE_SERVICE TARGET_TYPE_METHOD * 耀€€JJ"¬ OneofOptions7 features ( 2.google.protobuf.FeatureSetRfeaturesX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption* 耀€€"Ñ EnumOptions allow_alias (R allowAlias% deprecated (:falseR deprecatedV &deprecated_legacy_json_field_conflicts (BR"deprecatedLegacyJsonFieldConflicts7 features ( 2.google.protobuf.FeatureSetRfeaturesX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption* 耀€€J"Ø EnumValueOptions% deprecated (:falseR deprecated7 features ( 2.google.protobuf.FeatureSetRfeatures( debug_redact (:falseR debugRedactU feature_support ( 2,.google.protobuf.FieldOptions.FeatureSupportRfeatureSupportX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption* 耀€€"Õ ServiceOptions7 features" ( 2.google.protobuf.FeatureSetRfeatures% deprecated! (:falseR deprecatedX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption* 耀€€"™ MethodOptions% deprecated! (:falseR deprecatedq idempotency_level" (2/.google.protobuf.MethodOptions.IdempotencyLevel:IDEMPOTENCY_UNKNOWNRidempotencyLevel7 features# ( 2.google.protobuf.FeatureSetRfeaturesX uninterpreted_optionç ( 2$.google.protobuf.UninterpretedOptionRuninterpretedOption"P IdempotencyLevel IDEMPOTENCY_UNKNOWN NO_SIDE_EFFECTS IDEMPOTENT* 耀€€"š UninterpretedOptionA name ( 2-.google.protobuf.UninterpretedOption.NamePartRname) identifier_value ( RidentifierValue, positive_int_value (RpositiveIntValue, negative_int_value (RnegativeIntValue! double_value (R doubleValue! string_value ( R stringValue' aggregate_value ( RaggregateValueJ NamePart name_part ( RnamePart! is_extension (R isExtension"§ FeatureSet‘ field_presence (2).google.protobuf.FeatureSet.FieldPresenceB?ˆ˜˜¢ EXPLICIT„¢ IMPLICITç¢ EXPLICITè²èR fieldPresencel enum_type (2$.google.protobuf.FeatureSet.EnumTypeB)ˆ˜˜¢ CLOSED„¢ OPENç²èRenumType˜ repeated_field_encoding (21.google.protobuf.FeatureSet.RepeatedFieldEncodingB-ˆ˜˜¢ EXPANDED„¢ PACKEDç²èRrepeatedFieldEncoding~ utf8_validation (2*.google.protobuf.FeatureSet.Utf8ValidationB)ˆ˜˜¢ NONE„¢ VERIFYç²èRutf8Validation~ message_encoding (2+.google.protobuf.FeatureSet.MessageEncodingB&ˆ˜˜¢LENGTH_PREFIXED„²èRmessageEncoding‚ json_format (2&.google.protobuf.FeatureSet.JsonFormatB9ˆ˜˜˜¢LEGACY_BEST_EFFORT„¢ ALLOWç²èR jsonFormat"\ FieldPresence FIELD_PRESENCE_UNKNOWN EXPLICIT IMPLICIT LEGACY_REQUIRED"7 EnumType ENUM_TYPE_UNKNOWN OPEN CLOSED"V RepeatedFieldEncoding# REPEATED_FIELD_ENCODING_UNKNOWN PACKED EXPANDED"I Utf8Validation UTF8_VALIDATION_UNKNOWN VERIFY NONE""S MessageEncoding MESSAGE_ENCODING_UNKNOWN LENGTH_PREFIXED DELIMITED"H JsonFormat JSON_FORMAT_UNKNOWN ALLOW LEGACY_BEST_EFFORT*è‹N*‹NN*N‘NJçè"ï FeatureSetDefaultsX defaults ( 2<.google.protobuf.FeatureSetDefaults.FeatureSetEditionDefaultRdefaultsA minimum_edition (2.google.protobuf.EditionRminimumEditionA maximum_edition (2.google.protobuf.EditionRmaximumEditionø FeatureSetEditionDefault2 edition (2.google.protobuf.EditionReditionN overridable_features ( 2.google.protobuf.FeatureSetRoverridableFeaturesB fixed_features ( 2.google.protobuf.FeatureSetR fixedFeaturesJJRfeatures"µ SourceCodeInfoD location ( 2(.google.protobuf.SourceCodeInfo.LocationRlocationÎ Location path (BRpath span (BRspan) leading_comments ( RleadingComments+ trailing_comments ( RtrailingComments: leading_detached_comments ( RleadingDetachedComments* €ìÊÿìÊÿ"Ð GeneratedCodeInfoM annotation ( 2-.google.protobuf.GeneratedCodeInfo.AnnotationR annotationë Annotation path (BRpath source_file ( R sourceFile begin (Rbegin end (RendR semantic (26.google.protobuf.GeneratedCodeInfo.Annotation.SemanticRsemantic"( Semantic NONE SET ALIAS*§ Edition EDITION_UNKNOWN EDITION_LEGACY„ EDITION_PROTO2æ EDITION_PROTO3ç EDITION_2023è EDITION_2024é EDITION_1_TEST_ONLY EDITION_2_TEST_ONLY EDITION_99997_TEST_ONLY EDITION_99998_TEST_ONLYž EDITION_99999_TEST_ONLYŸ EDITION_MAXÿÿÿÿB~ com.google.protobufBDescriptorProtosHZ-google.golang.org/protobuf/types/descriptorpbø¢GPBªGoogle.Protobuf.ReflectionJà &š  ª  &2Á Protocol Buffers - Google's data interchange format Copyright 2008 Google Inc. All rights reserved. https://developers.google.com/protocol-buffers/ Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of Google Inc. nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 2Û Author: kenton@google.com (Kenton Varda) Based on original Protocol Buffers design by Sanjay Ghemawat, Jeff Dean, and others. The messages in this file describe the definitions found in .proto files. A valid .proto file can be translated directly to a FileDescriptorProto without any other information (e.g. without reading its imports).  (  *D  *D  +, +,  ,1 ,1  -7 %-7  .! $.!  / /  3   3t descriptor.proto must be optimized for speed because reflection-based algorithms don't work during bootstrapping. j 7@^ The protocol compiler can output a FileDescriptorSet containing the .proto files it parses. 7 8( 8 8  8# 8&' & ;? Extensions for tooling. ;  ;  ;  - Cd! The full set of known editions. C : E- A placeholder for an unknown edition value. E E ™ I‹ A placeholder edition for specifying default behaviors *before* a feature was first introduced. This is effectively an "infinite past". I I ‚ Oô Legacy syntax "editions". These pre-date editions, but behave much like distinct editions. These can't be used to specify the edition of proto files, but feature definitions must supply proto2/proto3 defaults for backwards compatibility. O O P P P ¯ U¡ Editions that have been released. The specific values are arbitrary and should not be depended on, but they will always be time-ordered for easy comparison. U U V V V } Zp Placeholder editions for testing feature resolution. These should not be used or relied on outside of tests. Z Z [ [ [ \" \ \!  ]"  ]  ]!  ^"  ^  ^! ¸  cª Placeholder for specifying unbounded edition support. This should only ever be used by plugins that can expect to never require any changes to support a new edition.  c  c 0 g‰# Describes a complete .proto file. g 9 h", file name, relative to root of source tree h h  h h * i" e.g. "foo", "foo.bar", etc. i i  i i 4 l!' Names of files imported by this file. l l  l l Q n(D Indexes of the public imported files in the dependency list above. n n  n" n%' z q&m Indexes of the weak imported files in the dependency list. For Google-internal migration only. Do not use. q q  q q#% 6 t,) All top-level definitions in this file. t t  t' t*+ u- u u  u( u+, v. v v ! v") v,- w. w w  w ) w,-  y#  y  y   y  y!" ô  /æ This field contains optional information about the original source code. You may safely remove this entire field without harming runtime functionality of the descriptors -- the information is needed only by development tools.       *  -. ¦  …— The syntax of the proto file. The supported values are "proto2", "proto3", and "editions". If `edition` is present, this value must be "editions".  …  …   …  … .  ˆ  The edition of the proto file.  ˆ  ˆ   ˆ  ˆ ) Œ¬ Describes a message type. Œ       *     % () .     ) ,- ’+ ’ ’  ’& ’)* “- “ “  “( “+,  •š •   –" Inclusive.  –  –   –  –  —" Exclusive.  —  —   —  —  ™/  ™  ™ "  ™#*  ™-. ›. › ›  ›) ›,- /     * -. Ÿ& Ÿ Ÿ  Ÿ! Ÿ$% ¬ ¤§› Range of reserved tag numbers. Reserved tag numbers may not be used by fields or extension ranges in the same message. Reserved ranges may not overlap. ¤   ¥" Inclusive.  ¥  ¥   ¥  ¥  ¦" Exclusive.  ¦  ¦   ¦  ¦ ¨, ¨ ¨  ¨' ¨*+ ƒ  «%u Reserved field names, which may not be used by fields in the same message. A given name may only be reserved once.  «  «   «  «"$ ®â ® O °:A The parser stores options it doesn't recognize here. See above. ° °  °3 °69  ²É ²  K ´; The extension number declared within the extension range.  ´  ´   ´  ´ z ¸"j The fully-qualified name of the extension field. There must be a leading dot in front of the full name.  ¸  ¸   ¸  ¸ ! ¡ ½ The fully-qualified type name of the extension field. Unlike Metadata.type, Declaration.type must have a leading dot for messages and enums.  ½  ½   ½  ½ Π½ If true, indicates that the number is reserved in the extension range, and any extension field with the number will fail to compile. Set this when a declared extension field is deleted.         Â Š Æz If true, indicates that the extension must be defined as repeated. Otherwise the extension must be defined as optional.  Æ  Æ   Æ  Æ $  È" removed is_repeated   È    È    È  ± ÎF¢ For external users: DO NOT USE. We are in the process of open sourcing extension declaration and executing internal cleanups before it can be used externally. Î Î  Î" Î%& Î'E  Î(D = Ñ$/ Any features defined in the specific edition. Ñ Ñ  Ñ Ñ!# @ ÔØ0 The verification state of the extension range. Ô C Ö3 All the extensions of the range must be declared.  Ö  Ö  ×  ×  × Ž ÝÞ;~ The verification state of the range. TODO: flip the default to DECLARATION once all empty ranges are marked as UNVERIFIED. Ý Ý  Ý) Ý,- Þ: Þ  Þ9 Z áM Clients can define custom options in extensions of this message. See above. á  á  á 3 åÍ% Describes a field within a message. å  æ† æ S éC 0 is reserved for errors. Order is weird for historical reasons.  é  é  ê  ê  ê w íg Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if negative values are likely.  í  í  î  î  î w ñg Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if negative values are likely.  ñ  ñ  ò  ò  ò  ó  ó  ó  ô  ô  ô  õ  õ  õ Æ  ûµ Tag-delimited aggregate. Group type is deprecated and not supported after google.protobuf. However, Proto3 implementations should still be able to parse the group wire format and treat group fields as unknown fields. In Editions, the group wire format can be enabled via the `message_encoding` feature.   û   û -  ü" Length-delimited aggregate.   ü   ü #  ÿ New in version 2.   ÿ   ÿ   €   €   €           ‚  ‚  ‚  ƒ  ƒ  ƒ ' „" Uses ZigZag encoding.  „  „ ' …" Uses ZigZag encoding.  …  …  ˆ ˆ * Š 0 is reserved for errors  Š  Š  ‹  ‹  ‹ Ì » The required label is only allowed in google.protobuf. In proto3 and Editions it's explicitly prohibited. In Editions, the `field_presence` feature can be used to get this behavior.     ’ ’ ’  ’ ’ “ “ “  “ “ ” ” ”  ” ” œ ˜ If type_name is set, this need not be set. If both this and type_name are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. ˜ ˜  ˜ ˜ · Ÿ ¨ For message and enum types, this is the name of the type. If the name starts with a '.', it is fully-qualified. Otherwise, C++-like scoping rules are used to find the type (i.e. first the nested types within this message are searched, then within the parent, on up to the root namespace). Ÿ Ÿ  Ÿ Ÿ ~ £p For extensions, this is the name of the type being extended. It is resolved in the same manner as type_name. £ £  £ £ ‘ ©$‚ For numeric types, contains the original text representation of the value. For booleans, "true" or "false". For strings, contains the default text contents (not escaped in any way). For bytes, contains the C escaped value. All bytes >= 128 are escaped. © ©  © ©"# „ ­!v If set, gives the index of a oneof in the containing type's oneof_decl list. This field is a member of that oneof. ­ ­  ­ ­ ú ³!ë JSON name of this field. The value is set by protocol compiler. If the user has set a "json_name" option on this field, that option's value will be used. Otherwise, it's deduced from the field's name by converting it to camelCase. ³ ³  ³ ³  µ$  µ  µ   µ  µ"# °  Ì%¡ If true, this is a proto3 "optional". When a proto3 field is optional, it tracks presence regardless of field type. When proto3_optional is true, this field must belong to a oneof to signal to old proto3 clients that presence is tracked for this field. This oneof is known as a "synthetic" oneof, and this field must be its sole member (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs exist in the descriptor only, and do not generate any API. Synthetic oneofs must be ordered after all "real" oneofs. For message fields, proto3_optional doesn't create any semantic change, since non-repeated message fields always track presence. However it still indicates the semantic detail of whether the user wrote "optional" or not. This can be useful for round-tripping the .proto file. For consistency we give message fields a synthetic oneof also, even though it is not required to track presence. This is especially important because the parser can't tell if a field is a message or an enum, so it must always create a synthetic oneof. Proto2 optional fields do not set this flag, because they already indicate optional with `LABEL_OPTIONAL`.  Ì  Ì   Ì  Ì"$ " ÐÓ Describes a oneof. Ð Ñ Ñ Ñ  Ñ Ñ Ò$ Ò Ò  Ò Ò"# ' Öð Describes an enum type. Ö × × ×  × × Ù. Ù Ù # Ù$) Ù,- Û# Û Û  Û Û!" ¯ ãæž Range of reserved numeric values. Reserved values may not be used by entries in the same enum. Reserved ranges may not overlap. Note that this is distinct from DescriptorProto.ReservedRange in that it is inclusive such that it can appropriately represent the entire int32 domain. ã   ä" Inclusive.  ä  ä   ä  ä  å" Inclusive.  å  å   å  å ª ë0› Range of reserved numeric values. Reserved numeric values may not be used by enum values in the same enum declaration. Reserved ranges may not overlap. ë ë  ë+ ë./ l ï$^ Reserved enum value names, which may not be reused. A given name may only be reserved once. ï ï  ï ï"# 1 óø# Describes a value within an enum. ó ô ô ô  ô ô õ õ õ  õ õ ÷( ÷ ÷  ÷# ÷&' $ û€ Describes a service. û ü ü ü  ü ü ý, ý ý ý!' ý*+ ÿ& ÿ ÿ  ÿ! ÿ$% 0  ƒ‘" Describes a method of a service.  ƒ  „  „  „   „  „ —  ˆ!ˆ Input and output type names. These are resolved in the same way as FieldDescriptorProto.type_name, but must refer to a message type.  ˆ  ˆ   ˆ  ˆ  ‰"  ‰  ‰   ‰  ‰ !  ‹%  ‹  ‹   ‹  ‹#$ E  Ž77 Identifies if client streams multiple client messages  Ž  Ž   Ž  Ž#$  Ž%6  Ž05 E  77 Identifies if server streams multiple server messages         #$  %6  05 ¯  ³¯2N =================================================================== Options 2Ð Each of the definitions above may have "options" attached. These are just annotations which may cause code to be generated slightly differently or may contain hints for code that manipulates protocol messages. Clients may define custom options as extensions of the *Options messages. These extensions may not yet be known at parsing time, so the parser cannot store the values in them. Instead it stores them in a field in the *Options message called uninterpreted_option. This field must have the same name across all *Options messages. We then use this field to populate the extensions when we build a descriptor, at which point all protos have been parsed and so all extensions are known. Extension numbers for custom options may be chosen as follows: * For options which will only be used within a single application or organization, or for experimental options, use field numbers 50000 through 99999. It is up to you to ensure that you do not use the same number for multiple options. * For options which will be published and used publicly by multiple independent entities, e-mail protobuf-global-extension-registry@google.com to reserve extension numbers. Simply provide your project name (e.g. Objective-C plugin) and your project website (if available) -- there's no need to explain how you intend to use them. Usually you only need one extension number. You can declare multiple options with only one extension number by putting them in a sub-message. See the Custom Options section of the docs for examples: https://developers.google.com/protocol-buffers/docs/proto#options If this turns out to be popular, a web service will be set up to automatically assign option numbers.  ³ ô  ¹#å Sets the Java package where classes generated from this .proto will be placed. By default, the proto package is used, but this is often inappropriate because proto packages do not normally start with backwards domain names.  ¹  ¹   ¹  ¹!" ñ  À+â Controls the name of the wrapper Java class generated for the .proto file. That class will always contain the .proto file's getDescriptor() method as well as any top-level extensions defined in the .proto file. If java_multiple_files is disabled, then all the other classes from the .proto file will be nested inside the single wrapper outer class.  À  À   À&  À)* ¦  È;— If enabled, then the Java code generator will generate a separate .java file for each top-level message, enum, and service defined in the .proto file. Thus, these types will *not* be nested inside the wrapper class named by java_outer_classname. However, the wrapper class will still be generated to contain the file's getDescriptor() method as well as any top-level extensions defined in the file.  È  È   È#  È&(  È):  È49 )  ËE This option does nothing.  Ë  Ë   Ë-  Ë02  Ë3D   Ë4C Ï  ×>À A proto2 file can set this to true to opt in to UTF-8 checking for Java, which will throw an exception if invalid UTF-8 is parsed from the wire or assigned to a string field. TODO: clarify exactly what kinds of field types this option applies to, and update these docs accordingly. Proto3 files already perform these checks. Setting the option explicitly to false has no effect: it cannot be used to opt proto3 files out of UTF-8 checks.  ×  ×   ×&  ×)+  ×,=  ×7< L  Úß< Generated classes can be optimized for speed or code size.  Ú D  Û"4 Generate complete code for parsing, serialization,   Û   Û G  Ý etc. "/ Use ReflectionOps to implement these methods.   Ý   Ý G  Þ"7 Generate code using MessageLite and the lite runtime.   Þ   Þ  à;  à  à   à$  à'(  à):  à49 â  ç"Ó Sets the Go package where structs generated from this .proto will be placed. If omitted, the Go package will be derived from the following: - The basename of the package import path, if provided. - Otherwise, the package statement in the .proto file, if present. - Otherwise, the basename of the .proto file, without extension.  ç  ç   ç  ç! Ô  ó;Å Should generic services be generated in each language? "Generic" services are not specific to any particular RPC system. They are generated by the main code generators in each language (without additional plugins). Generic services were the only kind of service generation supported by early versions of google.protobuf. Generic services are now considered deprecated in favor of using plugins that generate code specific to your particular RPC system. Therefore, these default to false. Old code which depends on generic services should explicitly set them to true.  ó  ó   ó#  ó&(  ó):  ó49  ô=  ô  ô   ô%  ô(*  ô+<  ô6;   õ;   õ   õ    õ#   õ&(   õ):   õ49 +  ö" removed php_generic_services  ö  ö  ö  ÷"  ÷ ! ó   ý2ä Is this file deprecated? Depending on the target platform, this can emit Deprecated annotations for everything in the file, or it will be completely ignored; in the very least, this is a formalization for deprecating files.   ý   ý    ý   ý   ý 1   ý+0    7q Enables the use of arenas for the proto messages in this file. This applies only to generated classes for C++.             #%   &6   15 ’   …)ƒ Sets the objective c class prefix which is prepended to all objective c generated classes from this .proto. There is no default.   …   …    …#   …&( I   ˆ(; Namespace for generated classes; defaults to the package.   ˆ   ˆ    ˆ"   ˆ%' ‘  Ž$‚ By default Swift generators will take the proto package and CamelCase it replacing '.' with underscore and use that to prefix the types/symbols defined. When this options is provided, they will use this value instead to prefix the types/symbols defined.  Ž  Ž   Ž  Ž!# ~  ’(p Sets the php class prefix which is prepended to all php generated classes from this .proto. Default is empty.  ’  ’   ’"  ’%' ¾  —%¯ Use this option to change the namespace of php generated classes. Default is empty. When this option is empty, the package name will be used for determining the namespace.  —  —   —  —"$ Ê  œ.» Use this option to change the namespace of php generated metadata classes. Default is empty. When this option is empty, the proto file name will be used for determining the namespace.  œ  œ   œ(  œ+-   ¡$³ Use this option to change the package of ruby generated classes. Default is empty. When this option is not set, the package name will be used for determining the ruby package.  ¡  ¡   ¡  ¡!# =  ¤$/ Any features defined in the specific edition.  ¤  ¤   ¤  ¤!# |  ¨:n The parser stores options it doesn't recognize here. See the documentation for the "Options" section above.  ¨  ¨   ¨3  ¨69 ‡  ¬z Clients can define custom options in extensions of this message. See the documentation for the "Options" section above.  ¬   ¬   ¬  ®  ®  ®  ®  ±  ± Ø  Ä>É Set true to use the old proto1 MessageSet wire format for extensions. This is provided for backwards-compatibility with the MessageSet wire format. You should not use this for any other reason: It's less efficient, has fewer features, and is more complicated. The message must be defined exactly as follows: message Foo { option message_set_wire_format = true; extensions 4 to max; } Note that the message cannot have any defined fields; MessageSets only have extensions. All extensions of your type must be singular messages; e.g. they cannot be int32s, enums, or repeated messages. Because this is an option, the above two restrictions are not enforced by the protocol compiler.  Ä  Ä   Ä'  Ä*+  Ä,=  Ä7< ë  ÉFÜ Disables the generation of the standard "descriptor()" accessor, which can conflict with a field of the same name. This is meant to make migration from proto1 easier; new code should avoid fields named "descriptor".  É  É   É/  É23  É4E  É?D î  Ï1ß Is this message deprecated? Depending on the target platform, this can emit Deprecated annotations for the message, or it will be completely ignored; in the very least, this is a formalization for deprecating messages.  Ï  Ï   Ï  Ï  Ï0  Ï*/  Ñ  Ñ  Ñ  Ñ  Ñ  Ñ  Ñ  Ñ  Ñ  Ñ    è‘ Whether the message is an automatically generated map entry type for the maps field. For maps fields: map map_field = 1; The parsed descriptor looks like: message MapFieldEntry { option map_entry = true; optional KeyType key = 1; optional ValueType value = 2; } repeated MapFieldEntry map_field = 1; Implementations may choose not to generate the map_entry=true message, but use a native map in the target language to hold the keys and values. The reflection APIs in such implementations still need to work as if the field is a repeated message field. NOTE: Do not set the option in .proto files. Always use the maps syntax instead. The option should only be implicitly set by the proto compiler parser.  è  è   è  è $  ê " javalite_serializable  ê  ê  ê   ë " javanano_as_lite  ë  ë  ë Ý  ÷PÎ Enable the legacy handling of JSON field name conflicts. This lowercases and strips underscored from the fields before comparison in proto3 only. The new behavior takes `json_name` into account and applies to proto2 as well. This should only be used as a temporary measure against broken builds due to the change in behavior for JSON field name conflicts. TODO This is legacy behavior we plan to remove once downstream teams have had time to migrate.  ÷  ÷   ÷6  ÷9;  ÷ Defaults of features that can be overridden in this edition.  Ô  Ô   Ô,  Ô/0 P ×+@ Defaults of features that can't be overridden in this edition.  ×  ×   ×&  ×)*  Ù   Ù    Ù    Ù    Ù   Ù   Ù  Ú   Ú  Ü1 Ü Ü # Ü$, Ü/0 ‚ à't The minimum supported edition (inclusive) when this was constructed. Editions before this will not have defaults. à à  à" à%& † ä'x The maximum known edition (inclusive) when this was constructed. Editions after this will not have reliable defaults. ä ä  ä" ä%& Ú ìô j Encapsulates information about the original source file from which a FileDescriptorProto was generated. 2` =================================================================== Optional source code info ì ‚ ˜ !ó A Location identifies a piece of source code in a .proto file which corresponds to a particular definition. This information is intended to be useful to IDEs, code indexers, documentation generators, and similar tools. For example, say we have a file like: message Foo { optional string foo = 1; } Let's look at just the field definition: optional string foo = 1; ^ ^^ ^^ ^ ^^^ a bc de f ghi We have the following locations: span path represents [a,i) [ 4, 0, 2, 0 ] The whole field definition. [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). [c,d) [ 4, 0, 2, 0, 5 ] The type (string). [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). [g,h) [ 4, 0, 2, 0, 3 ] The number (1). Notes: - A location may refer to a repeated field itself (i.e. not to any particular index within it). This is used whenever a set of elements are logically enclosed in a single code segment. For example, an entire extend block (possibly containing multiple extension definitions) will have an outer location whose path refers to the "extensions" repeated field without an index. - Multiple locations may have the same path. This happens when a single logical declaration is spread out across multiple places. The most obvious example is the "extend" block again -- there may be multiple extend blocks in the same scope, each of which will have the same path. - A location's span is not always a subset of its parent's span. For example, the "extendee" of an extension declaration appears at the beginning of the "extend" block and is shared by all extensions within the block. - Just because a location's span is a subset of some other location's span does not mean that it is a descendant. For example, a "group" defines both a type and a field in a single declaration. Thus, the locations corresponding to the type and field and their components will overlap. - Code which tries to interpret locations should probably be designed to ignore those that it doesn't understand, as more types of locations could be recorded in the future. ˜  ˜  ˜  ˜   ™ ì  ™  Š ± ,ù Identifies which part of the FileDescriptorProto was defined at this location. Each element is a field number or an index. They form a path from the root FileDescriptorProto to the place where the definition appears. For example, this path: [ 4, 3, 2, 7, 1 ] refers to: file.message_type(3) // 4, 3 .field(7) // 2, 7 .name() // 1 This is because FileDescriptorProto.message_type has field number 4: repeated DescriptorProto message_type = 4; and DescriptorProto.field has field number 2: repeated FieldDescriptorProto field = 2; and FieldDescriptorProto.name has field number 1: optional string name = 1; Thus, the above path gives the location of a field name. If we removed the last element: [ 4, 3, 2, 7 ] this path refers to the whole field declaration (from the beginning of the label to the terminating semicolon).  ±   ±   ±   ±   ± +  ± * Ò ¸ ,Á Always has exactly three or four elements: start line, start column, end line (optional, otherwise assumed same as start line), end column. These are packed into a single field for efficiency. Note that line and column numbers are zero-based -- typically you will want to add 1 to each before displaying to a user.  ¸   ¸   ¸   ¸   ¸ +  ¸ * ¥ é )” If this SourceCodeInfo represents a complete declaration, these are any comments appearing before and after the declaration which appear to be attached to the declaration. A series of line comments appearing on consecutive lines, with no other tokens appearing on those lines, will be treated as a single comment. leading_detached_comments will keep paragraphs of comments that appear before (but not connected to) the current element. Each paragraph, separated by empty lines, will be one comment element in the repeated field. Only the comment content is provided; comment markers (e.g. //) are stripped out. For block comments, leading whitespace and an asterisk will be stripped from the beginning of each line other than the first. Newlines are included in the output. Examples: optional int32 foo = 1; // Comment attached to foo. // Comment attached to bar. optional int32 bar = 2; optional string baz = 3; // Comment attached to baz. // Another line attached to baz. // Comment attached to moo. // // Another line attached to moo. optional double moo = 4; // Detached comment for corge. This is not leading or trailing comments // to moo or corge because there are blank lines separating it from // both. // Detached comment for corge paragraph 2. optional string corge = 5; /* Block comment attached * to corge. Leading asterisks * will be removed. */ /* Block comment attached to * grault. */ optional int32 grault = 6; // ignored detached comments.  é   é   é $  é '(  ê *  ê   ê   ê %  ê ()  ë 2  ë   ë   ë -  ë 01 ( ï ó  Extensions for tooling. ï  ï  ï  î ù š ß Describes the relationship between generated code and its original source file. A GeneratedCodeInfo message is associated with only one generated source file, but may contain references to different source .proto files. ù  x ü %j An Annotation connects some span of text in generated code to an element of its generating .proto file. ü  ü  ü  ü #$  ý ™  ý   € , Identifies the element in the original source .proto file. This field is formatted the same as SourceCodeInfo.Location.path.  €   €   €   €   € +  € * O ƒ $? Identifies the filesystem path to the original source .proto.  ƒ   ƒ   ƒ   ƒ "# w ‡ g Identifies the starting offset in bytes in the generated code that relates to the identified object.  ‡   ‡   ‡   ‡  Û Œ Ê Identifies the ending offset in bytes in the generated code that relates to the identified object. The end offset should be one past the last relevant byte (so the length of the text = end - begin).  Œ   Œ   Œ   Œ  j  — X Represents the identified object's effect on the element in the original .proto file.    F ’ 4 There is no effect or the effect is indescribable.  ’   ’  < ” * The element is set or otherwise mutated.  ”   ” 8 – & An alias to the element is returned.  –   –   ˜ #  ˜   ˜   ˜   ˜ !" æ# google/api/field_behavior.proto google.api google/protobuf/descriptor.proto*¶ FieldBehavior FIELD_BEHAVIOR_UNSPECIFIED OPTIONAL REQUIRED OUTPUT_ONLY INPUT_ONLY IMMUTABLE UNORDERED_LIST NON_EMPTY_DEFAULT IDENTIFIER:d field_behavior.google.protobuf.FieldOptionsœ (2.google.api.FieldBehaviorBR fieldBehaviorBp com.google.apiBFieldBehaviorProtoPZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations¢GAPIJû g ¼  2± Copyright 2025 Google LLC Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.   *  X  X  "  "  3 3  ' '  " $" ( Ú 'KÎ A designation of a specific field behavior (required, output only, etc.) in protobuf messages. Examples: string name = 1 [(google.api.field_behavior) = REQUIRED]; State state = 1 [(google.api.field_behavior) = OUTPUT_ONLY]; google.protobuf.Duration ttl = 1 [(google.api.field_behavior) = INPUT_ONLY]; google.protobuf.Timestamp expire_time = 1 [(google.api.field_behavior) = OUTPUT_ONLY, (google.api.field_behavior) = IMMUTABLE]; # ' ' # '$2 '59 ':J ';I ê 0gÝ An indicator of the behavior of a given field (for example, that a field is required in requests, or given as output but ignored as input). This **does not** change the behavior in protocol buffers itself; it only denotes the behavior and may affect how API tooling handles the field. Note: This enum **may** receive new values in the future. 0 ? 2!2 Conventional default for enums. Do not use this. 2 2 ¡ 7“ Specifically denotes a field as optional. While all fields in protocol buffers are optional, this may be specified for emphasis if appropriate. 7 7  À <² Denotes a field as required. This indicates that the field **must** be provided as part of the request, and failure to do so will cause an error (usually `INVALID_ARGUMENT`). < <  ý Bï Denotes a field as output only. This indicates that the field is provided in responses, but including the field in a request does nothing (the server *must* ignore it and *must not* throw an error as a result of the field's presence). B B ž G Denotes a field as input only. This indicates that the field is provided in requests, and the corresponding field is not included in output. G G £ L• Denotes a field as immutable. This indicates that the field may be set once in a request to create a resource, but may not be changed thereafter. L L “ R… Denotes that a (repeated) field is an unordered list. This indicates that the service may provide the elements of the list in any arbitrary order, rather than the order the user originally provided. Additionally, the list's order may or may not be stable. R R  Xó Denotes that this field returns a non-empty default value if not set. This indicates that if the user provides the empty value in a request, a non-empty value will be returned. The user will not be aware of what non-empty value to expect. X X ø fê Denotes that the field in a resource (a message annotated with google.api.resource) is used in the resource name to uniquely identify the resource. For AIP-compliant APIs, this should only be applied to the `name` field on the resource. This behavior should not be applied to references to other resources within the message. The identifier field of resources often have different field behavior depending on the request it is embedded in (e.g. for Create methods name is optional and unused, while for Update methods it is required). Instead of method-specific annotations, only `IDENTIFIER` is required. f fbproto3 ‡\ sigstore_common.protodev.sigstore.common.v1google/api/field_behavior.protogoogle/protobuf/timestamp.proto"i HashOutputC algorithm (2%.dev.sigstore.common.v1.HashAlgorithmR algorithm digest ( Rdigest"€ MessageSignatureI message_digest ( 2".dev.sigstore.common.v1.HashOutputR messageDigest! signature ( BàAR signature"# LogId key_id ( BàARkeyId"H RFC3161SignedTimestamp. signed_timestamp ( BàARsignedTimestamp"Ù PublicKey raw_bytes ( HRrawBytesˆI key_details (2(.dev.sigstore.common.v1.PublicKeyDetailsR keyDetailsC valid_for ( 2!.dev.sigstore.common.v1.TimeRangeHRvalidForˆB _raw_bytesB _valid_for") PublicKeyIdentifier hint ( Rhint"' ObjectIdentifier id (BàARid"m ObjectIdentifierValuePair: oid ( 2(.dev.sigstore.common.v1.ObjectIdentifierRoid value ( Rvalue"X DistinguishedName" organization ( R organization common_name ( R commonName"3 X509Certificate raw_bytes ( BàARrawBytes"ž SubjectAlternativeNameF type (22.dev.sigstore.common.v1.SubjectAlternativeNameTypeRtype regexp ( HRregexp value ( HRvalueB identity"c X509CertificateChainK certificates ( 2'.dev.sigstore.common.v1.X509CertificateR certificates"x TimeRange0 start ( 2.google.protobuf.TimestampRstart1 end ( 2.google.protobuf.TimestampHRendˆB _end*u HashAlgorithm HASH_ALGORITHM_UNSPECIFIED SHA2_256 SHA2_384 SHA2_512 SHA3_256 SHA3_384*é PublicKeyDetails" PUBLIC_KEY_DETAILS_UNSPECIFIED PKCS1_RSA_PKCS1V5 PKCS1_RSA_PSS PKIX_RSA_PKCS1V5 PKIX_RSA_PSS! PKIX_RSA_PKCS1V15_2048_SHA256 ! PKIX_RSA_PKCS1V15_3072_SHA256 ! PKIX_RSA_PKCS1V15_4096_SHA256  PKIX_RSA_PSS_2048_SHA256 PKIX_RSA_PSS_3072_SHA256 PKIX_RSA_PSS_4096_SHA256$ PKIX_ECDSA_P256_HMAC_SHA_256 PKIX_ECDSA_P256_SHA_256 PKIX_ECDSA_P384_SHA_384  PKIX_ECDSA_P521_SHA_512  PKIX_ED25519 PKIX_ED25519_PH PKIX_ECDSA_P384_SHA_256 PKIX_ECDSA_P521_SHA_256 LMS_SHA256 LMOTS_SHA256"2*o SubjectAlternativeNameType- )SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED EMAIL URI OTHER_NAMEB| dev.sigstore.proto.common.v1B CommonProtoPZ6github.com/sigstore/protobuf-specs/gen/pb-go/common/v1êSigstore::Common::V1J®I ï Ç  2¼ Copyright 2022 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.   ) )  M  M  5 5  "  "  , ,  - -- ñ $+ Only a subset of the secure hash standard algorithms are supported. See for more details. UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force any proto JSON serialization to emit the used hash algorithm, as default option is to *omit* the default value of an enum (which is the first value, represented by '0'. 2R This package defines commonly used message types within the Sigstore community. $ %' %" %%& & & & ' ' ' ( ( ( ) ) ) * * * Ú =nÍ Details of a specific public key, capturing the the key encoding method, and signature algorithm. PublicKeyDetails captures the public key/hash algorithm combinations recommended in the Sigstore ecosystem. This is modelled as a linear set as we want to provide a small number of opinionated options instead of allowing every possible permutation. Any changes to this enum MUST be reflected in the algorithm registry. See: docs/algorithm-registry.md To avoid the possibility of contradicting formats such as PKCS1 with ED25519 the valid permutations are listed as a linear set instead of a cartesian set (i.e one combined variable instead of two, one for encoding and one for the signature algorithm). = >+ >& >)* ! @2 RSA " See RFC8017 @ @ @1 @0  A." See RFC8017 A A A- A, B1 B B B0 B/ C- C C C, C+ B E*5 RSA public key in PKIX format, PKCS#1v1.5 signature E% E() F+ F% F(* G+ G% G(* Q I&5 RSA public key in PKIX format, RSASSA-PSS signature " See RFC4055 I I#%  J&  J  J#%  K&  K  K#% #  N= ECDSA " See RFC6979  N$  N'(  N)<  N*; "  O$" See NIST FIPS 186-4  O  O"#  P%  P  P"$ Q% Q Q"$ & T Ed 25519 " See RFC8032 T T U U U  Y9 These algorithms are deprecated and should not be used, but they were/are being used by most Sigstore clients implementations. Y Y"$ Y%8 Y&7 Z9 Z Z"$ Z%8 Z&7 ¸ iª LMS and LM-OTS These keys and signatures may be used by private Sigstore deployments, but are not currently supported by the public good instance. USER WARNING: LMS and LM-OTS are both stateful signature schemes. Using them correctly requires discretion and careful consideration to ensure that individual secret keys are not used more than once. In addition, LM-OTS is a single-use scheme, meaning that it MUST NOT be used for more than one signature per LM-OTS key. If you cannot maintain these invariants, you MUST NOT use these schemes. i i j j j T mH Reserved for future additions of public key/signature algorithm types. m m m ~ rwr HashOutput captures a digest of a 'message' (generic octet sequence) and the corresponding hash algorithm used. r s$ s s s"# _ vR This is the raw octets of the message digest as computed by the hash algorithm. v v v M zˆ@ MessageSignature stores the computed signature over a message. z À ~&² Message digest can be used to identify the artifact. Clients MUST NOT attempt to use this digest to verify the associated signature; it is intended solely for identification. ~ ~! ~$% È ‡E¹ The raw bytes as returned from the signature algorithm. The signature algorithm (and so the format of the signature bytes) are determined by the contents of the 'verification_material', either a key-pair or a certificate. If using a certificate, the certificate contains the required information on the signature algorithm. When using a key pair, the algorithm MUST be part of the public key, which MUST be communicated out-of-band. ‡ ‡ ‡ ‡D  œ‡C B ‹Ž4 LogId captures the identity of a transparency log. ‹ N B@ The unique identity of the log, represented by its public key.    A  œ@ 8 ‘•* This message holds a RFC 3161 timestamp. ‘ … ”Lw Signed timestamp is the DER encoded TimeStampResponse. See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2 ” ” ”!" ”#K  œ”$J —Ÿ — c š%U DER-encoded public key, encoding method is specified by the key_details attribute. š š š š#$ I œ); Key encoding and signature algorithm to use for this key. œ œ$ œ'( T ž)F Optional validity period for this key, *inclusive* of the endpoints. ž ž ž$ ž'( s £®e PublicKeyIdentifier can be used to identify an (out of band) delivered key, to verify a signature. £ ¸ ­© Optional unauthenticated hint on which key to use. The format of the hint must be agreed upon out of band by the signer and the verifiers, and so is not subject to this specification. Example use-case is to specify the public key to use, from a trusted key-ring. Implementors are RECOMMENDED to derive the value from the public key as described in RFC 6962. See: ­ ­ ­ * ±³ An ASN.1 OBJECT IDENTIFIER ± ²G ² ² ² ² ²F  œ²E : ¶¹, An OID and the corresponding (byte) value. ¶! ·! · · · ¸ ¸ ¸ ¸ »¾ » ¼ ¼ ¼ ¼ ½ ½ ½ ½  Àà  À .  ÂE DER-encoded X.509 certificate.        ÂD   œÂC ÅÍ Å Æ6 Æ1 Æ45 Ç Ç Ç È È È  ÌŽ OID 1.3.6.1.4.1.57264.1.7 See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san for more details. Ì Ì  ÏØ  Ï  Ð,  Ð"  Ð#'  Ð*+   Ñ×  Ñ P  Ô"B A regular expression describing the expected value for the SAN.  Ô  Ô  Ô ! 1  Ö!# The exact value to match against.  Ö  Ö  Ö ð  ßæá A collection of X.509 certificates. This "chain" can be used in multiple contexts, such as providing a root CA certificate within a TUF root of trust or multiple untrusted certificates for the purpose of chain building.  ß í  å2Þ One or more DER-encoded certificates. In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence has an imposed order. Unless explicitly specified, there is otherwise no guaranteed order.  å  å  å!-  å01 à  ìï´ The time range is closed and includes both the start and end times, (i.e., [start, end]). End is optional to be able to capture a period that has started but has no known end.  ì  í,  í!  í"'  í*+  î3  î  î*  î+.  î12bproto3 á? sigstore_rekor.protodev.sigstore.rekor.v1google/api/field_behavior.protosigstore_common.proto"E KindVersion kind ( BàARkind version ( BàARversion"- Checkpoint envelope ( BàARenvelope"Û InclusionProof log_index (BàARlogIndex root_hash ( BàARrootHash tree_size (BàARtreeSize hashes ( BàARhashesF checkpoint ( 2!.dev.sigstore.rekor.v1.CheckpointBàAR checkpoint"M InclusionPromise9 signed_entry_timestamp ( BàARsignedEntryTimestamp"Ç TransparencyLogEntry log_index (BàARlogIndex9 log_id ( 2.dev.sigstore.common.v1.LogIdBàARlogIdJ kind_version ( 2".dev.sigstore.rekor.v1.KindVersionBàAR kindVersion, integrated_time (BàARintegratedTimeT inclusion_promise ( 2'.dev.sigstore.rekor.v1.InclusionPromiseRinclusionPromiseS inclusion_proof ( 2%.dev.sigstore.rekor.v1.InclusionProofBàARinclusionProof- canonicalized_body ( RcanonicalizedBodyBx dev.sigstore.proto.rekor.v1B RekorProtoPZ5github.com/sigstore/protobuf-specs/gen/pb-go/rekor/v1êSigstore::Rekor::V1JŠ7 ‰ Ç  2¼ Copyright 2022 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.   )   L  L  4 4  "  "  + +  , -, D "8 KindVersion contains the entry's kind and api version.  Ž A€ Kind is the type of entry being stored in the log. See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types    @  œ? 4 !D' The specific api version of the type. ! ! ! !C  œ!B ê .0Ý The checkpoint MUST contain an origin string as a unique log identifier, the tree size, and the root hash. It MAY also be followed by optional data, and clients MUST NOT assume optional data. The checkpoint MUST also contain a signature over the root hash (tree head). The checkpoint MAY contain additional signatures, but the first SHOULD be the signature from the log. Checkpoint contents are concatenated with newlines into a single string. The checkpoint format is described in https://github.com/transparency-dev/formats/blob/main/log/README.md and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go . /E / / / /D  œ/C  4E‚ InclusionProof is the proof returned from the transparency log. Can be used for offline or online verification against the log. 4 D 6E7 The index of the entry in the tree it was written to. 6 6 6 6D  œ6C j 9E] The hash digest stored at the root of the merkle tree at the time the proof was generated. 9 9 9 9D  œ9C O ;EB The size of the merkle tree at the time the proof was generated. ; ; ; ;D  œ;C Ž AK€ A list of hashes required to compute the inclusion proof, sorted in order from leaf to root. Note that leaf and root hashes are not included. The root hash is available separately in this message, and the leaf hash should be calculated by the client. A A A A ! A"J  œA#I ˆ DK{ Signature of the tree head, as of the time of this proof was generated. See above info on 'Checkpoint' for more details. D D D ! D"J  œD#I ® QS¡ The inclusion promise is calculated by Rekor. It's calculated as a signature over a canonical JSON serialization of the persisted entry, the log ID, log index and the integration timestamp. See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 The format of the signature depends on the transparency log's public key. If the signature algorithm requires a hash function and/or a signature scheme (e.g. RSA) those has to be retrieved out-of-band from the log's operators, together with the public key. This is used to verify the integration timestamp's value and that the log has promised to include the entry. Q RR R R$ R'( R)Q  œR*P Ð ]‰Â TransparencyLogEntry captures all the details required from Rekor to reconstruct an entry, given that the payload is provided via other means. This type can easily be created from the existing response from Rekor. Future iterations could rely on Rekor returning the minimal set of attributes (excluding the payload) that are required for verifying the inclusion promise. The inclusion promise (called SignedEntryTimestamp in the response from Rekor) is similar to a Signed Certificate Timestamp as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2. ] R _EE The global index of the entry, used when querying the log by index. _ _ _ _D  œ_C 0 aY# The unique identifier of the log. a$ a%+ a./ a0X  œa1W  eN The kind (type) and version of the object associated with this entry. These values are required to construct the entry during verification. e e e#$ e%M  œe&L ™ iK‹ The UNIX timestamp from the log when the entry was persisted. The integration time MUST NOT be trusted if inclusion_promise is omitted. i i i ! i"J  œi#I  q/‚ The inclusion promise/signed entry timestamp from the log. Required for v0.1 bundles, and MUST be verified. Optional for >= v0.2 bundles if another suitable source of time is present (such as another source of signed time, or the current system time for long-lived certificates). MUST be verified if no other suitable source of time is present, and SHOULD be verified otherwise. q q* q-. £ uT• The inclusion proof can be used for offline or online verification that the entry was appended to the log, and that the log has not been altered. u u& u)* u+S  œu,R È ˆ%¹ Optional. The canonicalized transparency log entry, used to reconstruct the Signed Entry Timestamp (SET) during verification. The contents of this field are the same as the `body` field in a Rekor response, meaning that it does **not** include the "full" canonicalized form (of log index, ID, etc.) which are exposed as separate fields. The verifier is responsible for combining the `canonicalized_body`, `log_index`, `log_id`, and `integrated_time` into the payload that the SET's signature is generated over. This field is intended to be used in cases where the SET cannot be produced determinisitically (e.g. inconsistent JSON field ordering, differing whitespace, etc). If set, clients MUST verify that the signature referenced in the `canonicalized_body` matches the signature provided in the `Bundle.content`. If not set, clients are responsible for constructing an equivalent payload from other sources to verify the signature. ˆ ˆ ˆ#$bproto3 ³? sigstore_bundle.protodev.sigstore.bundle.v1google/api/field_behavior.protoenvelope.protosigstore_common.protosigstore_rekor.proto"z TimestampVerificationData] rfc3161_timestamps ( 2..dev.sigstore.common.v1.RFC3161SignedTimestampRrfc3161Timestamps"ô VerificationMaterialQ public_key ( 2+.dev.sigstore.common.v1.PublicKeyIdentifierBàAHR publicKeyi x509_certificate_chain ( 2,.dev.sigstore.common.v1.X509CertificateChainBàAHRx509CertificateChainP certificate ( 2'.dev.sigstore.common.v1.X509CertificateBàAHR certificateN tlog_entries ( 2+.dev.sigstore.rekor.v1.TransparencyLogEntryR tlogEntriesq timestamp_verification_data ( 21.dev.sigstore.bundle.v1.TimestampVerificationDataRtimestampVerificationDataB content"¿ Bundle media_type ( R mediaTypef verification_material ( 2,.dev.sigstore.bundle.v1.VerificationMaterialBàARverificationMaterial\ message_signature ( 2(.dev.sigstore.common.v1.MessageSignatureBàAHRmessageSignature? dsse_envelope ( 2.io.intoto.EnvelopeBàAHR dsseEnvelopeB contentJ3B| dev.sigstore.proto.bundle.v1B BundleProtoPZ6github.com/sigstore/protobuf-specs/gen/pb-go/bundle/v1êSigstore::Bundle::V1Jè5 ” Ç  2¼ Copyright 2022 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.   ) ~ s https://raw.githubusercontent.com/secure-systems-lab/dsse/9c813476bd36de70a5738c72e784f123ecea16af/envelope.proto    M  M  5 5  "  "  , ,  - -- ä (/¡ Various timestamped counter signatures over the artifacts signature. Currently only RFC3161 signatures are provided. More formats may be added in the future. 2³ Notes on versioning. The primary message ('Bundle') MUST be versioned, by populating the 'media_type' field. Semver-ish (only major/minor versions) scheme MUST be used. The current version as specified by this file is: application/vnd.dev.sigstore.bundle.v0.3+json The semantic version is thus '0.3'. (! © .V› A list of RFC3161 signed timestamps provided by the user. This can be used when the entry has not been stored on a transparency log, or in conjunction for a stronger trust model. Clients MUST verify the hashed message in the message imprint against the signature in the bundle. . .> .?Q .TU ò :lå VerificationMaterial captures details on the materials used to verify signatures. This message may be embedded in a DSSE envelope as a signature extension. Specifically, the `ext` field of the extension will expect this message when the signature extension is for Sigstore. This is identified by the `kind` field in the extension, which must be set to application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. When used as a DSSE extension, if the `public_key` field is used to indicate the key identifier, it MUST match the `keyid` field of the signature the extension is attached to. : “ \` „ The key material for verification purposes. This allows key material to be conveyed in one of three forms: 1. An unspecified public key identifier, for retrieving a key from an out-of-band mechanism (such as a keyring); 2. A sequence of one or more X.509 certificates, of which the first member MUST be a leaf certificate conveying the signing key. Subsequent members SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`. Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT include intermediate CA certificates that appear in an independent root of trust (such as the Public Good Instance's trusted root). Verifiers MUST validate the chain carefully to ensure that it chains up to a CA certificate that they independently trust. Verifiers SHOULD handle old or non-complying bundles that have superfluous intermediate and/or root CA certificates by either ignoring them or explicitly considering them untrusted for the purposes of chain building. 3. A single X.509 certificate, which MUST be a leaf certificate conveying the signing key. When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1) MAY be used with the PGI for self-managed keys. When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing, form (2) MUST be used. When used in a `0.3` bundle with the PGI and "keyless" signing, form (3) MUST be used. \ ]s ]: ];E ]HI ]Jr  œ]Kq ^€ ^; ^ If the verification material contains a public key identifier (key hint) and the `content` is a DSSE envelope, the key hints MUST be exactly the same in the verification material and in the DSSE envelope. € €2 €56 €7_  œ€8^  ‘  ‚w ‚7 ‚8I ‚LM ‚Nv  œ‚Ou à ^´ A DSSE envelope can contain arbitrary payloads. Verifiers must verify that the payload type is a supported and expected type. This is part of the DSSE protocol which is defined here: DSSE envelopes in a bundle MUST have exactly one signature. This is a limitation from the DSSE spec, as it can contain multiple signatures. There are two primary reasons: 1. It simplifies the verification logic and policy 2. The bundle (currently) can only contain a single instance of the required verification materials During verification a client MUST reject an envelope if the number of signatures is not equal to one. " #0 34 5]  œ6\ ?  “2 Reserved for future additions of artifact types.  “  “  “bproto3 { sigstore_trustroot.protodev.sigstore.trustroot.v1google/api/field_behavior.protosigstore_common.proto"Å TransparencyLogInstance base_url ( RbaseUrlL hash_algorithm (2%.dev.sigstore.common.v1.HashAlgorithmR hashAlgorithm@ public_key ( 2!.dev.sigstore.common.v1.PublicKeyR publicKey4 log_id ( 2.dev.sigstore.common.v1.LogIdRlogIdI checkpoint_key_id ( 2.dev.sigstore.common.v1.LogIdRcheckpointKeyId"ú CertificateAuthorityC subject ( 2).dev.sigstore.common.v1.DistinguishedNameRsubject uri ( RuriK cert_chain ( 2,.dev.sigstore.common.v1.X509CertificateChainR certChain> valid_for ( 2!.dev.sigstore.common.v1.TimeRangeRvalidFor"’ TrustedRoot media_type ( R mediaTypeH tlogs ( 22.dev.sigstore.trustroot.v1.TransparencyLogInstanceRtlogsh certificate_authorities ( 2/.dev.sigstore.trustroot.v1.CertificateAuthorityRcertificateAuthoritiesJ ctlogs ( 22.dev.sigstore.trustroot.v1.TransparencyLogInstanceRctlogsd timestamp_authorities ( 2/.dev.sigstore.trustroot.v1.CertificateAuthorityRtimestampAuthorities"ê SigningConfig media_type ( R mediaType; ca_urls ( 2".dev.sigstore.trustroot.v1.ServiceRcaUrls? oidc_urls ( 2".dev.sigstore.trustroot.v1.ServiceRoidcUrlsJ rekor_tlog_urls ( 2".dev.sigstore.trustroot.v1.ServiceR rekorTlogUrls[ rekor_tlog_config ( 2/.dev.sigstore.trustroot.v1.ServiceConfigurationRrekorTlogConfig= tsa_urls ( 2".dev.sigstore.trustroot.v1.ServiceRtsaUrlsN tsa_config ( 2/.dev.sigstore.trustroot.v1.ServiceConfigurationR tsaConfigJ"‡ Service url ( Rurl* major_api_version ( RmajorApiVersion> valid_for ( 2!.dev.sigstore.common.v1.TimeRangeRvalidFor"t ServiceConfigurationF selector (2*.dev.sigstore.trustroot.v1.ServiceSelectorRselector count ( Rcount"Ø ClientTrustConfig media_type ( R mediaTypeN trusted_root ( 2&.dev.sigstore.trustroot.v1.TrustedRootBàAR trustedRootT signing_config ( 2(.dev.sigstore.trustroot.v1.SigningConfigBàAR signingConfig*N ServiceSelector SERVICE_SELECTOR_UNDEFINED ALL ANY EXACTBˆ dev.sigstore.proto.trustroot.v1BTrustRootProtoPZ9github.com/sigstore/protobuf-specs/gen/pb-go/trustroot/v1êSigstore::TrustRoot::V1J›i ¦ Ç  2¼ Copyright 2022 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.  " )   P  P  8 8  "  "  / /  0 -0 ¬  >Ÿ TransparencyLogInstance describes the immutable parameters from a transparency log. See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters for more details. The included parameters are the minimal set required to identify a log, and verify an inclusion proof/promise.   H "; The base URL at which can be used to URLs for the client. " " " ; $@. The hash algorithm used for the Merkle Tree. $, $-; $>?  '8‚ The public key used to verify signatures generated by the log. This attribute contains the signature algorithm used by the log. '( ')3 '67 … -0÷ The unique identifier for this transparency log. Represented as the SHA-256 hash of the log's public key, calculated over the DER encoding of the key represented as SubjectPublicKeyInfo. See https://www.rfc-editor.org/rfc/rfc6962#section-3.2 -$ -%+ -./ ¨ =;š The checkpoint key identifier for the log used in a checkpoint. Optional, not provided for logs that do not generate checkpoints. For logs that do generate checkpoints, if not set, assume log_id equals checkpoint_key_id. Follows the specification described here for ECDSA and Ed25519 signatures: https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures For RSA signatures, the key ID will match the ECDSA format, the hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT use RSA-signed checkpoints, since witnesses do not support RSA signatures. This is provided for convenience. Clients can also calculate the checkpoint key ID given the log's public key. SHOULD be set for logs generating Ed25519 signatures. SHOULD be 4 bytes long, as a truncated hash. =$ =%6 =9: „ BYx CertificateAuthority enlists the information required to identify which CA to use and perform signature verification. B e E=X The root certificate MUST be self-signed, and so the subject and issuer are the same. E0 E18 E;< ‰ Lû The URI identifies the certificate authority. It is RECOMMENDED that the URI is the base URL for the certificate authority, that can be provided to any SDK/client provided by the certificate authority to interact with the certificate authority. L L L Ô PCÆ The certificate chain for this CA. The last certificate in the chain MUST be the trust anchor. The trust anchor MAY be a self-signed root CA certificate or MAY be an intermediate CA certificate. P3 P4> PAB Û X7Í The time the *entire* chain was valid. This is at max the longest interval when *all* certificates in the chain were valid, but it MAY be shorter. Clients MUST check timestamps against *both* the `valid_for` time range *and* the entire certificate chain. The TimeRange should be considered valid *inclusive* of the endpoints. X( X)2 X56 ¸ yª TrustedRoot describes the client's complete set of trusted entities. How the TrustedRoot is populated is not specified, but can be a combination of many sources such as TUF repositories, files on disk etc. The TrustedRoot is not meant to be used for any artifact verification, only to capture the complete/global set of trusted verification materials. When verifying an artifact, based on the artifact and policies, a selection of keys/authorities are expected to be extracted and provided to the verification function. This way the set of keys/authorities can be kept to a minimal set by the policy to gain better control over what signatures that are allowed. The embedded transparency logs, CT logs, CAs and TSAs MUST include any previously used instance -- otherwise signatures made in the past cannot be verified. All the listed instances SHOULD be sorted by the 'valid_for' in ascending order, that is, the oldest instance first. Only the last instance is allowed to have their 'end' timestamp unset. All previous instances MUST have a closed interval of validity. The last instance MAY have a closed interval. Clients MUST accept instances that overlaps in time, if not clients may experience problems during rotations of verification materials. To be able to manage planned rotations of either transparency logs or certificate authorities, clienst MUST accept lists of instances where the last instance have a 'valid_for' that belongs to the future. This should not be a problem as clients SHOULD first seek the trust root for a suitable instance before creating a per artifact trust root (that is, a sub-set of the complete trust root) that is used for verification. y ÿ ñ MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json when encoded as JSON. Clients MUST be able to process and parse content with the media type defined in the old format: application/vnd.dev.sigstore.trustedroot+json;version=0.1    / 3! A set of trusted Rekor servers.  ( ). 12 © ŠBš A set of trusted certificate authorities (e.g Fulcio), and any intermediate certificates they provide. If a CA is issuing multiple intermediate certificate, each combination shall be represented as separate chain. I.e, a single root cert may appear in multiple chains but with different intermediate and/or leaf certificates. The certificates are intended to be used for verifying artifact signatures. Š Š% Š&= Š@A ? Œ41 A set of trusted certificate transparency logs. Œ Œ( Œ)/ Œ23 : Ž@, A set of trusted timestamping authorities. Ž Ž% Ž&; Ž>? å •ãÖ SigningConfig represents the trusted entities/state needed by Sigstore signing. In particular, it primarily contains service URLs that a Sigstore signer may need to connect to for the online aspects of signing. • § ™˜ MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json Clients MAY choose to also support application/vnd.dev.sigstore.signingconfig.v0.1+json ™ ™ ™ ” «%… URLs to Fulcio-compatible CAs, capable of receiving Certificate Signing Requests (CSRs) and responding with issued certificates. These URLs MUST be the "base" URL for the CAs, which clients should construct an appropriate CSR endpoint on top of. For example, if a CA URL is `https://example.com/ca`, then the client MAY construct the CSR endpoint as `https://example.com/ca/api/v2/signingCert`. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. « « « «#$ ¯ ¸'  URLs to OpenID Connect identity providers. These URLs MUST be the "base" URLs for the OIDC IdPs, which clients should perform well-known OpenID Connect discovery against. Clients MUST select only one Service with the highest API version that the client is compatible with, that is within its validity period, and has the newest validity start date. Client SHOULD select the first Service that meets this requirement. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. ¸ ¸ ¸" ¸%& ° Ç-¡ URLs to Rekor transparency logs. These URL MUST be the "base" URLs for the transparency logs, which clients should construct appropriate API endpoints on top of. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `rekor_tlog_config`. Ç Ç Ç( Ç+, e Ë3W Specifies how a client should select the set of Rekor transparency logs to write to. Ë Ë. Ë12 Ñ Û' URLs to RFC 3161 Time Stamping Authorities (TSA). These URLs MUST be the *full* URL for the TSA, meaning that it should be suitable for submitting Time Stamp Requests (TSRs) to via HTTP, per RFC 3161. Clients MUST select Services with the highest API version that the client is compatible with, that are within its validity period, and have the newest validity start dates. All listed Services SHOULD be sorted by the `valid_for` window in descending order, with the newest instance first. Clients MUST select Services based on the selector value of `tsa_config`. Û Û Û! Û$& h ß-Z Specifies how a client should select the set of TSAs to request signed timestamps from. ß ß' ß*, F  â9 Reserved tags for previously defined service URL fields  â  â  â è êøÙ Service represents an instance of a service that is a part of Sigstore infrastructure. Clients MUST use the API version hint to determine the service with the highest API version that the client is compatible with. Clients MUST also only connect to services within the specified validity period and that has the newest validity start date. ê X ìJ URL of the service. MUST include scheme and authority. MAY include path. ì ì ì s ð%e Specifies the major API version. A value of 0 represents a service that has not yet been released. ð ð ð#$ œ ÷7 Validity period of a service. A service that has only a start date SHOULD be considered the most recent instance of that service, but the client MUST NOT assume there is only one valid instance. The TimeRange MUST be considered valid *inclusive* of the endpoints. ÷( ÷)2 ÷56 ³ ý‹¤ ServiceSelector specifies how a client SHOULD select a set of Services to connect to. A client SHOULD throw an error if the value is SERVICE_SELECTOR_UNDEFINED. ý þ' þ" þ%& g Y Clients SHOULD select all Services based on supported API version and validity window.   Ð …Á Clients SHOULD select one Service based on supported API version and validity window. It is up to the client implementation to decide how to select the Service, e.g. random or round-robin. … … ÿ Šð Clients SHOULD select a specific number of Services based on supported API version and validity window, using the provided `count`. It is up to the client implementation to decide how to select the Service, e.g. random or round-robin. Š Š ± ˜¢ ServiceConfiguration specifies how a client should select a set of Services to connect to, along with a count when a specific number of Services is requested.  K ’%= How a client should select a set of Services to connect to. ’ ’ ’#$ × —È count specifies the number of Services the client should use. Only used when selector is set to EXACT, and count MUST be greater than 0. count MUST be less than or equal to the number of Services. — — — ³ ¦¤ ClientTrustConfig describes the complete state needed by a client to perform both signing and verification operations against a particular instance of Sigstore.  P ŸB MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json Ÿ Ÿ Ÿ 9 ¢N+ The root of trust, which MUST be present. ¢ ¢ ¢#$ ¢%M  œ¢&L I ¥R; Configuration for signing clients, which MUST be present. ¥ ¥$ ¥'( ¥)Q  œ¥*Pbproto3 ÉH sigstore_verification.protodev.sigstore.verification.v1sigstore_common.protosigstore_trustroot.protosigstore_bundle.proto"¶ CertificateIdentity issuer ( Rissuer@ san ( 2..dev.sigstore.common.v1.SubjectAlternativeNameRsanE oids ( 21.dev.sigstore.common.v1.ObjectIdentifierValuePairRoids"j CertificateIdentitiesQ identities ( 21.dev.sigstore.verification.v1.CertificateIdentityR identities"Y PublicKeyIdentitiesB public_keys ( 2!.dev.sigstore.common.v1.PublicKeyR publicKeys"œ ArtifactVerificationOptionsl certificate_identities ( 23.dev.sigstore.verification.v1.CertificateIdentitiesHRcertificateIdentitiesT public_keys ( 21.dev.sigstore.verification.v1.PublicKeyIdentitiesHR publicKeysm tlog_options ( 2E.dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogOptionsHR tlogOptionsˆp ctlog_options ( 2F.dev.sigstore.verification.v1.ArtifactVerificationOptions.CtlogOptionsHR ctlogOptionsˆy tsa_options ( 2S.dev.sigstore.verification.v1.ArtifactVerificationOptions.TimestampAuthorityOptionsHR tsaOptionsˆ‘ integrated_ts_options ( 2X.dev.sigstore.verification.v1.ArtifactVerificationOptions.TlogIntegratedTimestampOptionsHRintegratedTsOptionsˆ‚ observer_options ( 2R.dev.sigstore.verification.v1.ArtifactVerificationOptions.ObserverTimestampOptionsHRobserverOptionsˆ… TlogOptions threshold (R threshold> perform_online_verification (RperformOnlineVerification disable (RdisableL CtlogOptions threshold (R threshold disable (RdisableJS TimestampAuthorityOptions threshold (R threshold disable (RdisableX TlogIntegratedTimestampOptions threshold (R threshold disable (RdisableR ObserverTimestampOptions threshold (R threshold disable (RdisableB signersB _tlog_optionsB _ctlog_optionsB _tsa_optionsB _integrated_ts_optionsB _observer_options"¤ Artifact# artifact_uri ( HR artifactUri artifact ( HRartifactM artifact_digest ( 2".dev.sigstore.common.v1.HashOutputHRartifactDigestB data"ì InputV artifact_trust_root ( 2&.dev.sigstore.trustroot.v1.TrustedRootRartifactTrustRoot} artifact_verification_options ( 29.dev.sigstore.verification.v1.ArtifactVerificationOptionsRartifactVerificationOptions6 bundle ( 2.dev.sigstore.bundle.v1.BundleRbundleG artifact ( 2&.dev.sigstore.verification.v1.ArtifactHRartifactˆB _artifactB” "dev.sigstore.proto.verification.v1BVerificationProtoPZ 5 69 <= à #Kµ An unordered list of OIDs that must be verified. All OID/values provided in this list MUST exactly match against the values in the certificate for verification to be successful. # #A #BF #IJ &( & '4 ' '$ '%/ '23 *, * +B + +1 +2= +@A † 0‚y A light-weight set of options/policies for identifying trusted signers, used during verification of a single artifact. 0# 18 1 F 3$7 Number of transparency logs the entry must appear on.  3  3  3"# 3 55$ Perform an online inclusion proof.  5  50  534 < 7!- Disable verification for transparency logs.  7  7  7 9@ 9 T <$E The number of ct transparency logs the certificate must appear on.  <  <  <"# 5  ="' Deprecated: Support for detached SCTs  =   =   = 9 ?!* Disable ct transparency log verification  ?  ?  ? AF A) C C$4 The number of signed timestamps that are expected.  C  C  C"# 7 E!( Disable signed timestamp verification.  E  E  E GL G. G I$8 The number of integrated timestamps that are expected.  I  I  I"# ; K!, Disable integrated timestamp verification.  K  K  K MV M( è S$Ø The number of external observers of the timestamp. This is a union of RFC3161 signed timestamps, and integrated timestamps from a transparency log, that could include additional timestamp sources in the future.  S  S  S"# 9 U!* Disable observer timestamp verification.  U  U  U Ä [g µ At least one identity MUST be provided. Providing zero identities is an error. If at least one provided identity is found as a signer, the verification is considered successful. [ \A \% \&< \?@ Ú f4Ì To simplify verification implementation, the logic for bundle verification should be implemented as a higher-order function, where one of argument should be an interface over the set of trusted public keys, like this: `Verify(bytes artifact, bytes signature, string key_id)`. This way the caller is in full control of mapping the identified (or hinted) key in the bundle to one of the trusted keys, as this process is inherently application specific. f# f$/ f23  m.´ Optional options for artifact transparency log verification. If none is provided, the default verification options are: Threshold: 1 Online verification: false Disable: false m m m) m,- © r0› Optional options for certificate transparency log verification. If none is provided, the default verification options are: Threshold: 1 Disable: false r r r+ r./ ¨ w;š Optional options for certificate signed timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true w w* w+6 w9:   |J’ Optional options for integrated timestamp verification. If none is provided, the default verification options are: Threshold: 0 Disable: true | |/ |0E |HI Ÿ ? Optional options for observed timestamp verification. If none is provided, the default verification options are: Threshold 1 Disable: false  ) *: => „ „  … … ( ‡( Location of the artifact ‡ ‡# ‡&' - ‰# The raw bytes of the artifact ‰ ‰ ‰!" ò ŽFã Digest of the artifact. SHOULD NOT be used when verifying an in-toto attestation as the subject digest cannot be reconstructed. This option will not work with Ed25519 signatures, use Ed25519Ph or another algorithm instead. Ž1 Ž2A ŽDE ‘ ”£‚ Input captures all that is needed to call the bundle verification method, to verify a single artifact referenced by the bundle. ” Ó FÄ The verification materials provided during a bundle verification. The running process is usually preloaded with a "global" dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to verifying an artifact (i.e a bundle), and/or based on current policy, some selection is expected to happen, to filter out the exact certificate authority to use, which transparency logs are relevant etc. The result should b ecaptured in the `artifact_trust_root`. - .A DE žF ž# ž$A žDE Ÿ1 Ÿ% Ÿ&, Ÿ/0 [ ¢'M If the bundle contains a message signature, the artifact must be provided. ¢ ¢ ¢" ¢%&bproto3protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/google.api.rs000066400000000000000000000110271477352757300306100ustar00rootroot00000000000000// This file is @generated by prost-build. /// An indicator of the behavior of a given field (for example, that a field /// is required in requests, or given as output but ignored as input). /// This **does not** change the behavior in protocol buffers itself; it only /// denotes the behavior and may affect how API tooling handles the field. /// /// Note: This enum **may** receive new values in the future. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, ::prost::Enumeration)] #[repr(i32)] pub enum FieldBehavior { /// Conventional default for enums. Do not use this. Unspecified = 0, /// Specifically denotes a field as optional. /// While all fields in protocol buffers are optional, this may be specified /// for emphasis if appropriate. Optional = 1, /// Denotes a field as required. /// This indicates that the field **must** be provided as part of the request, /// and failure to do so will cause an error (usually `INVALID_ARGUMENT`). Required = 2, /// Denotes a field as output only. /// This indicates that the field is provided in responses, but including the /// field in a request does nothing (the server *must* ignore it and /// *must not* throw an error as a result of the field's presence). OutputOnly = 3, /// Denotes a field as input only. /// This indicates that the field is provided in requests, and the /// corresponding field is not included in output. InputOnly = 4, /// Denotes a field as immutable. /// This indicates that the field may be set once in a request to create a /// resource, but may not be changed thereafter. Immutable = 5, /// Denotes that a (repeated) field is an unordered list. /// This indicates that the service may provide the elements of the list /// in any arbitrary order, rather than the order the user originally /// provided. Additionally, the list's order may or may not be stable. UnorderedList = 6, /// Denotes that this field returns a non-empty default value if not set. /// This indicates that if the user provides the empty value in a request, /// a non-empty value will be returned. The user will not be aware of what /// non-empty value to expect. NonEmptyDefault = 7, /// Denotes that the field in a resource (a message annotated with /// google.api.resource) is used in the resource name to uniquely identify the /// resource. For AIP-compliant APIs, this should only be applied to the /// `name` field on the resource. /// /// This behavior should not be applied to references to other resources within /// the message. /// /// The identifier field of resources often have different field behavior /// depending on the request it is embedded in (e.g. for Create methods name /// is optional and unused, while for Update methods it is required). Instead /// of method-specific annotations, only `IDENTIFIER` is required. Identifier = 8, } impl FieldBehavior { /// String value of the enum field names used in the ProtoBuf definition. /// /// The values are not transformed in any way and thus are considered stable /// (if the ProtoBuf definition does not change) and safe for programmatic use. pub fn as_str_name(&self) -> &'static str { match self { Self::Unspecified => "FIELD_BEHAVIOR_UNSPECIFIED", Self::Optional => "OPTIONAL", Self::Required => "REQUIRED", Self::OutputOnly => "OUTPUT_ONLY", Self::InputOnly => "INPUT_ONLY", Self::Immutable => "IMMUTABLE", Self::UnorderedList => "UNORDERED_LIST", Self::NonEmptyDefault => "NON_EMPTY_DEFAULT", Self::Identifier => "IDENTIFIER", } } /// Creates an enum from field names used in the ProtoBuf definition. pub fn from_str_name(value: &str) -> ::core::option::Option { match value { "FIELD_BEHAVIOR_UNSPECIFIED" => Some(Self::Unspecified), "OPTIONAL" => Some(Self::Optional), "REQUIRED" => Some(Self::Required), "OUTPUT_ONLY" => Some(Self::OutputOnly), "INPUT_ONLY" => Some(Self::InputOnly), "IMMUTABLE" => Some(Self::Immutable), "UNORDERED_LIST" => Some(Self::UnorderedList), "NON_EMPTY_DEFAULT" => Some(Self::NonEmptyDefault), "IDENTIFIER" => Some(Self::Identifier), _ => None, } } } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/io.intoto.rs000066400000000000000000000025501477352757300305070ustar00rootroot00000000000000// This file is @generated by prost-build. #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "io.intoto.Envelope")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Envelope { #[prost(bytes = "vec", tag = "1")] pub payload: ::prost::alloc::vec::Vec, #[prost(string, tag = "2")] pub payload_type: ::prost::alloc::string::String, #[prost(message, repeated, tag = "3")] pub signatures: ::prost::alloc::vec::Vec, } #[derive( sigstore_protobuf_specs_derive::Deserialize_proto, sigstore_protobuf_specs_derive::Serialize_proto )] #[derive(::prost_reflect::ReflectMessage)] #[prost_reflect(message_name = "io.intoto.Signature")] #[prost_reflect(file_descriptor_set_bytes = "crate::FILE_DESCRIPTOR_SET_BYTES")] #[derive(Clone, PartialEq, ::prost::Message)] pub struct Signature { /// Signature itself. (In JSON, this is encoded as base64.) /// REQUIRED. #[prost(bytes = "vec", tag = "1")] pub sig: ::prost::alloc::vec::Vec, /// *Unauthenticated* hint identifying which public key was used. /// OPTIONAL. #[prost(string, tag = "2")] pub keyid: ::prost::alloc::string::String, } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/generated/mod.rs000066400000000000000000000020101477352757300273330ustar00rootroot00000000000000// This file is @generated by prost-build. pub mod dev { pub mod sigstore { pub mod bundle { pub mod v1 { include!("dev.sigstore.bundle.v1.rs"); } } pub mod common { pub mod v1 { include!("dev.sigstore.common.v1.rs"); } } pub mod events { pub mod v1 { include!("dev.sigstore.events.v1.rs"); } } pub mod rekor { pub mod v1 { include!("dev.sigstore.rekor.v1.rs"); } } pub mod trustroot { pub mod v1 { include!("dev.sigstore.trustroot.v1.rs"); } } pub mod verification { pub mod v1 { include!("dev.sigstore.verification.v1.rs"); } } } } pub mod google { pub mod api { include!("google.api.rs"); } } pub mod io { pub mod intoto { include!("io.intoto.rs"); } } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/src/lib.rs000066400000000000000000000003011477352757300253650ustar00rootroot00000000000000static FILE_DESCRIPTOR_SET_BYTES: &'static [u8] = include_bytes!(concat!( env!("CARGO_MANIFEST_DIR"), "/src/generated/file_descriptor_set.bin" )); mod generated; pub use generated::*; protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/tests/000077500000000000000000000000001477352757300246325ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/tests/integration.rs000066400000000000000000000042151477352757300275250ustar00rootroot00000000000000use std::io; use sigstore_protobuf_specs::dev::sigstore::bundle::v1::Bundle; macro_rules! include_asset { ($path:literal) => { include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/assets/", $path)) }; } struct SpaceSeparatorFormatter; impl serde_json::ser::Formatter for SpaceSeparatorFormatter { fn begin_array_value(&mut self, writer: &mut W, first: bool) -> io::Result<()> where W: ?Sized + io::Write, { if first { Ok(()) } else { writer.write_all(b", ") } } fn begin_object_value(&mut self, writer: &mut W) -> io::Result<()> where W: ?Sized + io::Write, { writer.write_all(b": ") } fn begin_object_key(&mut self, writer: &mut W, first: bool) -> io::Result<()> where W: ?Sized + io::Write, { if first { Ok(()) } else { writer.write_all(b", ") } } fn write_string_fragment(&mut self, writer: &mut W, fragment: &str) -> io::Result<()> where W: ?Sized + io::Write, { // Replace em-dashes with a unicode escape. serde_json unescapes it. writer.write_all(fragment.replace("\u{2014}", "\\u2014").as_bytes()) } } /// Test re-serializing a known-good bundle from sigstore-python. #[test] fn bundle_roundtrip() { // Deserialize bundle, trimming trailing whitespace. let input = include_asset!("a.txt.sigstore").trim_end(); let bundle: Bundle = serde_json::from_str(input).expect("failed to deserialize Bundle!"); // Re-serialize bundle with our python-like formatter. let formatter = SpaceSeparatorFormatter {}; let mut result = Vec::new(); let mut ser = serde_json::Serializer::with_formatter(&mut result, formatter); serde::Serialize::serialize(&bundle, &mut ser).expect("failed to re-serialize Bundle!"); // Notwithstanding the workarounds above, our serialized bundle should be // byte-for-byte identical to the input bundle. let result = std::str::from_utf8(&result).unwrap(); assert_eq!( input, &result[..], "re-serialized Bundle does not match original!" ); } protobuf-specs-0.4.1/gen/pb-rust/sigstore-protobuf-specs/tests/unit.rs000066400000000000000000000040031477352757300261540ustar00rootroot00000000000000use sigstore_protobuf_specs::dev::sigstore::common::v1::{HashOutput, LogId, MessageSignature}; /// HashOutput, a structure using only primitive types #[test] fn primitives() { let hash_output_json = r#"{"digest":"AQID"}"#; let hash_output_rs = HashOutput { algorithm: 0i32, digest: vec![1u8, 2u8, 3u8], }; let serialize = serde_json::to_string(&hash_output_rs); assert!(serialize.is_ok()); assert_eq!(serialize.unwrap(), hash_output_json); let deserialize = serde_json::from_str::(hash_output_json); assert!(deserialize.is_ok()); assert_eq!(deserialize.unwrap(), hash_output_rs); } /// LogId, a structure with a field using camelCase #[test] fn camel_case() { let log_id_json = r#"{"keyId":"AA=="}"#; let log_id_rs = LogId { key_id: vec![0] }; let serialize = serde_json::to_string(&log_id_rs); assert!(serialize.is_ok()); assert_eq!(serialize.unwrap(), log_id_json); let deserialize = serde_json::from_str::(log_id_json); assert!(deserialize.is_ok()); assert_eq!(deserialize.unwrap(), log_id_rs); } /// MessageSignature, nested structure #[test] fn nested() { let message_signature_json = r#"{ "messageDigest": { "algorithm": "SHA2_256", "digest": "AQID" }, "signature": "AQ==" }"#; let message_signature_rs = MessageSignature { message_digest: Some(HashOutput { algorithm: 1i32, digest: vec![1u8, 2u8, 3u8], }), signature: vec![1u8], }; let serialize = serde_json::to_string(&message_signature_rs); assert!(serialize.is_ok()); assert_eq!( serialize.unwrap(), message_signature_json .chars() .filter(|c| !c.is_whitespace()) .collect::() ); let deserialize = serde_json::from_str::(&message_signature_json); assert!(deserialize.is_ok()); assert_eq!(deserialize.unwrap(), message_signature_rs); } protobuf-specs-0.4.1/gen/pb-typescript/000077500000000000000000000000001477352757300200715ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-typescript/.gitignore000066400000000000000000000000241477352757300220550ustar00rootroot00000000000000dist/ node_modules/ protobuf-specs-0.4.1/gen/pb-typescript/LICENSE000066400000000000000000000261271477352757300211060ustar00rootroot00000000000000 Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright 2023 The Sigstore Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. protobuf-specs-0.4.1/gen/pb-typescript/README.md000066400000000000000000000002711477352757300213500ustar00rootroot00000000000000# @sigstore/protobuf-specs TypeScript language bindings for Sigstore's protobuf specs. See the [repository's README](https://github.com/sigstore/protobuf-specs) for more information. protobuf-specs-0.4.1/gen/pb-typescript/package-lock.json000066400000000000000000000056731477352757300233200ustar00rootroot00000000000000{ "name": "@sigstore/protobuf-specs", "version": "0.4.0", "lockfileVersion": 2, "requires": true, "packages": { "": { "name": "@sigstore/protobuf-specs", "version": "0.4.0", "license": "Apache-2.0", "devDependencies": { "@tsconfig/node18": "^18.2.4", "@types/node": "^18.14.0", "typescript": "^5.7.2" }, "engines": { "node": "^18.17.0 || >=20.5.0" } }, "node_modules/@tsconfig/node18": { "version": "18.2.4", "resolved": "https://registry.npmjs.org/@tsconfig/node18/-/node18-18.2.4.tgz", "integrity": "sha512-5xxU8vVs9/FNcvm3gE07fPbn9tl6tqGGWA9tSlwsUEkBxtRnTsNmwrV8gasZ9F/EobaSv9+nu8AxUKccw77JpQ==", "dev": true }, "node_modules/@types/node": { "version": "18.19.70", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.70.tgz", "integrity": "sha512-RE+K0+KZoEpDUbGGctnGdkrLFwi1eYKTlIHNl2Um98mUkGsm1u2Ff6Ltd0e8DktTtC98uy7rSj+hO8t/QuLoVQ==", "dev": true, "dependencies": { "undici-types": "~5.26.4" } }, "node_modules/typescript": { "version": "5.7.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", "dev": true, "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" }, "engines": { "node": ">=14.17" } }, "node_modules/undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", "dev": true } }, "dependencies": { "@tsconfig/node18": { "version": "18.2.4", "resolved": "https://registry.npmjs.org/@tsconfig/node18/-/node18-18.2.4.tgz", "integrity": "sha512-5xxU8vVs9/FNcvm3gE07fPbn9tl6tqGGWA9tSlwsUEkBxtRnTsNmwrV8gasZ9F/EobaSv9+nu8AxUKccw77JpQ==", "dev": true }, "@types/node": { "version": "18.19.70", "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.70.tgz", "integrity": "sha512-RE+K0+KZoEpDUbGGctnGdkrLFwi1eYKTlIHNl2Um98mUkGsm1u2Ff6Ltd0e8DktTtC98uy7rSj+hO8t/QuLoVQ==", "dev": true, "requires": { "undici-types": "~5.26.4" } }, "typescript": { "version": "5.7.3", "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.7.3.tgz", "integrity": "sha512-84MVSjMEHP+FQRPy3pX9sTVV/INIex71s9TL2Gm5FG/WG1SqXeKyZ0k7/blY/4FdOzI12CBy1vGc4og/eus0fw==", "dev": true }, "undici-types": { "version": "5.26.5", "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", "dev": true } } } protobuf-specs-0.4.1/gen/pb-typescript/package.json000066400000000000000000000013311477352757300223550ustar00rootroot00000000000000{ "name": "@sigstore/protobuf-specs", "version": "0.4.0", "description": "code-signing for npm packages", "main": "dist/index.js", "types": "dist/index.d.ts", "scripts": { "build": "tsc" }, "repository": { "type": "git", "url": "git+https://github.com/sigstore/protobuf-specs.git" }, "files": [ "dist" ], "author": "bdehamer@github.com", "license": "Apache-2.0", "bugs": { "url": "https://github.com/sigstore/protobuf-specs/issues" }, "homepage": "https://github.com/sigstore/protobuf-specs#readme", "devDependencies": { "@tsconfig/node18": "^18.2.4", "@types/node": "^18.14.0", "typescript": "^5.7.2" }, "engines": { "node": "^18.17.0 || >=20.5.0" } } protobuf-specs-0.4.1/gen/pb-typescript/src/000077500000000000000000000000001477352757300206605ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/000077500000000000000000000000001477352757300234125ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/envelope.ts000066400000000000000000000056671477352757300256150ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: envelope.proto /* eslint-disable */ /** An authenticated message of arbitrary type. */ export interface Envelope { /** * Message to be signed. (In JSON, this is encoded as base64.) * REQUIRED. */ payload: Buffer; /** * String unambiguously identifying how to interpret payload. * REQUIRED. */ payloadType: string; /** * Signature over: * PAE(type, payload) * Where PAE is defined as: * PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload * + = concatenation * SP = ASCII space [0x20] * "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] * LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros * REQUIRED (length >= 1). */ signatures: Signature[]; } export interface Signature { /** * Signature itself. (In JSON, this is encoded as base64.) * REQUIRED. */ sig: Buffer; /** * Unauthenticated* hint identifying which public key was used. * OPTIONAL. */ keyid: string; } export const Envelope: MessageFns = { fromJSON(object: any): Envelope { return { payload: isSet(object.payload) ? Buffer.from(bytesFromBase64(object.payload)) : Buffer.alloc(0), payloadType: isSet(object.payloadType) ? globalThis.String(object.payloadType) : "", signatures: globalThis.Array.isArray(object?.signatures) ? object.signatures.map((e: any) => Signature.fromJSON(e)) : [], }; }, toJSON(message: Envelope): unknown { const obj: any = {}; if (message.payload.length !== 0) { obj.payload = base64FromBytes(message.payload); } if (message.payloadType !== "") { obj.payloadType = message.payloadType; } if (message.signatures?.length) { obj.signatures = message.signatures.map((e) => Signature.toJSON(e)); } return obj; }, }; export const Signature: MessageFns = { fromJSON(object: any): Signature { return { sig: isSet(object.sig) ? Buffer.from(bytesFromBase64(object.sig)) : Buffer.alloc(0), keyid: isSet(object.keyid) ? globalThis.String(object.keyid) : "", }; }, toJSON(message: Signature): unknown { const obj: any = {}; if (message.sig.length !== 0) { obj.sig = base64FromBytes(message.sig); } if (message.keyid !== "") { obj.keyid = message.keyid; } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/events.ts000066400000000000000000000162611477352757300252740ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: events.proto /* eslint-disable */ import { Any } from "./google/protobuf/any"; import { Timestamp } from "./google/protobuf/timestamp"; export interface CloudEvent { /** Required Attributes */ id: string; /** URI-reference */ source: string; specVersion: string; type: string; /** Optional & Extension Attributes */ attributes: { [key: string]: CloudEvent_CloudEventAttributeValue }; /** -- CloudEvent Data (Bytes, Text, or Proto) */ data?: { $case: "binaryData"; binaryData: Buffer } | { $case: "textData"; textData: string } | { $case: "protoData"; protoData: Any; } | undefined; } export interface CloudEvent_AttributesEntry { key: string; value: CloudEvent_CloudEventAttributeValue | undefined; } export interface CloudEvent_CloudEventAttributeValue { attr?: | { $case: "ceBoolean"; ceBoolean: boolean } | { $case: "ceInteger"; ceInteger: number } | { $case: "ceString"; ceString: string } | { $case: "ceBytes"; ceBytes: Buffer } | { $case: "ceUri"; ceUri: string } | { $case: "ceUriRef"; ceUriRef: string } | { $case: "ceTimestamp"; ceTimestamp: Date } | undefined; } export interface CloudEventBatch { events: CloudEvent[]; } export const CloudEvent: MessageFns = { fromJSON(object: any): CloudEvent { return { id: isSet(object.id) ? globalThis.String(object.id) : "", source: isSet(object.source) ? globalThis.String(object.source) : "", specVersion: isSet(object.specVersion) ? globalThis.String(object.specVersion) : "", type: isSet(object.type) ? globalThis.String(object.type) : "", attributes: isObject(object.attributes) ? Object.entries(object.attributes).reduce<{ [key: string]: CloudEvent_CloudEventAttributeValue }>( (acc, [key, value]) => { acc[key] = CloudEvent_CloudEventAttributeValue.fromJSON(value); return acc; }, {}, ) : {}, data: isSet(object.binaryData) ? { $case: "binaryData", binaryData: Buffer.from(bytesFromBase64(object.binaryData)) } : isSet(object.textData) ? { $case: "textData", textData: globalThis.String(object.textData) } : isSet(object.protoData) ? { $case: "protoData", protoData: Any.fromJSON(object.protoData) } : undefined, }; }, toJSON(message: CloudEvent): unknown { const obj: any = {}; if (message.id !== "") { obj.id = message.id; } if (message.source !== "") { obj.source = message.source; } if (message.specVersion !== "") { obj.specVersion = message.specVersion; } if (message.type !== "") { obj.type = message.type; } if (message.attributes) { const entries = Object.entries(message.attributes); if (entries.length > 0) { obj.attributes = {}; entries.forEach(([k, v]) => { obj.attributes[k] = CloudEvent_CloudEventAttributeValue.toJSON(v); }); } } if (message.data?.$case === "binaryData") { obj.binaryData = base64FromBytes(message.data.binaryData); } else if (message.data?.$case === "textData") { obj.textData = message.data.textData; } else if (message.data?.$case === "protoData") { obj.protoData = Any.toJSON(message.data.protoData); } return obj; }, }; export const CloudEvent_AttributesEntry: MessageFns = { fromJSON(object: any): CloudEvent_AttributesEntry { return { key: isSet(object.key) ? globalThis.String(object.key) : "", value: isSet(object.value) ? CloudEvent_CloudEventAttributeValue.fromJSON(object.value) : undefined, }; }, toJSON(message: CloudEvent_AttributesEntry): unknown { const obj: any = {}; if (message.key !== "") { obj.key = message.key; } if (message.value !== undefined) { obj.value = CloudEvent_CloudEventAttributeValue.toJSON(message.value); } return obj; }, }; export const CloudEvent_CloudEventAttributeValue: MessageFns = { fromJSON(object: any): CloudEvent_CloudEventAttributeValue { return { attr: isSet(object.ceBoolean) ? { $case: "ceBoolean", ceBoolean: globalThis.Boolean(object.ceBoolean) } : isSet(object.ceInteger) ? { $case: "ceInteger", ceInteger: globalThis.Number(object.ceInteger) } : isSet(object.ceString) ? { $case: "ceString", ceString: globalThis.String(object.ceString) } : isSet(object.ceBytes) ? { $case: "ceBytes", ceBytes: Buffer.from(bytesFromBase64(object.ceBytes)) } : isSet(object.ceUri) ? { $case: "ceUri", ceUri: globalThis.String(object.ceUri) } : isSet(object.ceUriRef) ? { $case: "ceUriRef", ceUriRef: globalThis.String(object.ceUriRef) } : isSet(object.ceTimestamp) ? { $case: "ceTimestamp", ceTimestamp: fromJsonTimestamp(object.ceTimestamp) } : undefined, }; }, toJSON(message: CloudEvent_CloudEventAttributeValue): unknown { const obj: any = {}; if (message.attr?.$case === "ceBoolean") { obj.ceBoolean = message.attr.ceBoolean; } else if (message.attr?.$case === "ceInteger") { obj.ceInteger = Math.round(message.attr.ceInteger); } else if (message.attr?.$case === "ceString") { obj.ceString = message.attr.ceString; } else if (message.attr?.$case === "ceBytes") { obj.ceBytes = base64FromBytes(message.attr.ceBytes); } else if (message.attr?.$case === "ceUri") { obj.ceUri = message.attr.ceUri; } else if (message.attr?.$case === "ceUriRef") { obj.ceUriRef = message.attr.ceUriRef; } else if (message.attr?.$case === "ceTimestamp") { obj.ceTimestamp = message.attr.ceTimestamp.toISOString(); } return obj; }, }; export const CloudEventBatch: MessageFns = { fromJSON(object: any): CloudEventBatch { return { events: globalThis.Array.isArray(object?.events) ? object.events.map((e: any) => CloudEvent.fromJSON(e)) : [], }; }, toJSON(message: CloudEventBatch): unknown { const obj: any = {}; if (message.events?.length) { obj.events = message.events.map((e) => CloudEvent.toJSON(e)); } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function fromTimestamp(t: Timestamp): Date { let millis = (globalThis.Number(t.seconds) || 0) * 1_000; millis += (t.nanos || 0) / 1_000_000; return new globalThis.Date(millis); } function fromJsonTimestamp(o: any): Date { if (o instanceof globalThis.Date) { return o; } else if (typeof o === "string") { return new globalThis.Date(o); } else { return fromTimestamp(Timestamp.fromJSON(o)); } } function isObject(value: any): boolean { return typeof value === "object" && value !== null; } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/000077500000000000000000000000001477352757300246665ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/api/000077500000000000000000000000001477352757300254375ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/api/field_behavior.ts000066400000000000000000000115051477352757300307530ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: google/api/field_behavior.proto /* eslint-disable */ /** * An indicator of the behavior of a given field (for example, that a field * is required in requests, or given as output but ignored as input). * This **does not** change the behavior in protocol buffers itself; it only * denotes the behavior and may affect how API tooling handles the field. * * Note: This enum **may** receive new values in the future. */ export enum FieldBehavior { /** FIELD_BEHAVIOR_UNSPECIFIED - Conventional default for enums. Do not use this. */ FIELD_BEHAVIOR_UNSPECIFIED = 0, /** * OPTIONAL - Specifically denotes a field as optional. * While all fields in protocol buffers are optional, this may be specified * for emphasis if appropriate. */ OPTIONAL = 1, /** * REQUIRED - Denotes a field as required. * This indicates that the field **must** be provided as part of the request, * and failure to do so will cause an error (usually `INVALID_ARGUMENT`). */ REQUIRED = 2, /** * OUTPUT_ONLY - Denotes a field as output only. * This indicates that the field is provided in responses, but including the * field in a request does nothing (the server *must* ignore it and * *must not* throw an error as a result of the field's presence). */ OUTPUT_ONLY = 3, /** * INPUT_ONLY - Denotes a field as input only. * This indicates that the field is provided in requests, and the * corresponding field is not included in output. */ INPUT_ONLY = 4, /** * IMMUTABLE - Denotes a field as immutable. * This indicates that the field may be set once in a request to create a * resource, but may not be changed thereafter. */ IMMUTABLE = 5, /** * UNORDERED_LIST - Denotes that a (repeated) field is an unordered list. * This indicates that the service may provide the elements of the list * in any arbitrary order, rather than the order the user originally * provided. Additionally, the list's order may or may not be stable. */ UNORDERED_LIST = 6, /** * NON_EMPTY_DEFAULT - Denotes that this field returns a non-empty default value if not set. * This indicates that if the user provides the empty value in a request, * a non-empty value will be returned. The user will not be aware of what * non-empty value to expect. */ NON_EMPTY_DEFAULT = 7, /** * IDENTIFIER - Denotes that the field in a resource (a message annotated with * google.api.resource) is used in the resource name to uniquely identify the * resource. For AIP-compliant APIs, this should only be applied to the * `name` field on the resource. * * This behavior should not be applied to references to other resources within * the message. * * The identifier field of resources often have different field behavior * depending on the request it is embedded in (e.g. for Create methods name * is optional and unused, while for Update methods it is required). Instead * of method-specific annotations, only `IDENTIFIER` is required. */ IDENTIFIER = 8, } export function fieldBehaviorFromJSON(object: any): FieldBehavior { switch (object) { case 0: case "FIELD_BEHAVIOR_UNSPECIFIED": return FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED; case 1: case "OPTIONAL": return FieldBehavior.OPTIONAL; case 2: case "REQUIRED": return FieldBehavior.REQUIRED; case 3: case "OUTPUT_ONLY": return FieldBehavior.OUTPUT_ONLY; case 4: case "INPUT_ONLY": return FieldBehavior.INPUT_ONLY; case 5: case "IMMUTABLE": return FieldBehavior.IMMUTABLE; case 6: case "UNORDERED_LIST": return FieldBehavior.UNORDERED_LIST; case 7: case "NON_EMPTY_DEFAULT": return FieldBehavior.NON_EMPTY_DEFAULT; case 8: case "IDENTIFIER": return FieldBehavior.IDENTIFIER; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); } } export function fieldBehaviorToJSON(object: FieldBehavior): string { switch (object) { case FieldBehavior.FIELD_BEHAVIOR_UNSPECIFIED: return "FIELD_BEHAVIOR_UNSPECIFIED"; case FieldBehavior.OPTIONAL: return "OPTIONAL"; case FieldBehavior.REQUIRED: return "REQUIRED"; case FieldBehavior.OUTPUT_ONLY: return "OUTPUT_ONLY"; case FieldBehavior.INPUT_ONLY: return "INPUT_ONLY"; case FieldBehavior.IMMUTABLE: return "IMMUTABLE"; case FieldBehavior.UNORDERED_LIST: return "UNORDERED_LIST"; case FieldBehavior.NON_EMPTY_DEFAULT: return "NON_EMPTY_DEFAULT"; case FieldBehavior.IDENTIFIER: return "IDENTIFIER"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldBehavior"); } } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/protobuf/000077500000000000000000000000001477352757300265265ustar00rootroot00000000000000protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/protobuf/any.ts000066400000000000000000000122771477352757300276760ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: google/protobuf/any.proto /* eslint-disable */ /** * `Any` contains an arbitrary serialized protocol buffer message along with a * URL that describes the type of the serialized message. * * Protobuf library provides support to pack/unpack Any values in the form * of utility functions or additional generated methods of the Any type. * * Example 1: Pack and unpack a message in C++. * * Foo foo = ...; * Any any; * any.PackFrom(foo); * ... * if (any.UnpackTo(&foo)) { * ... * } * * Example 2: Pack and unpack a message in Java. * * Foo foo = ...; * Any any = Any.pack(foo); * ... * if (any.is(Foo.class)) { * foo = any.unpack(Foo.class); * } * // or ... * if (any.isSameTypeAs(Foo.getDefaultInstance())) { * foo = any.unpack(Foo.getDefaultInstance()); * } * * Example 3: Pack and unpack a message in Python. * * foo = Foo(...) * any = Any() * any.Pack(foo) * ... * if any.Is(Foo.DESCRIPTOR): * any.Unpack(foo) * ... * * Example 4: Pack and unpack a message in Go * * foo := &pb.Foo{...} * any, err := anypb.New(foo) * if err != nil { * ... * } * ... * foo := &pb.Foo{} * if err := any.UnmarshalTo(foo); err != nil { * ... * } * * The pack methods provided by protobuf library will by default use * 'type.googleapis.com/full.type.name' as the type URL and the unpack * methods only use the fully qualified type name after the last '/' * in the type URL, for example "foo.bar.com/x/y.z" will yield type * name "y.z". * * JSON * ==== * The JSON representation of an `Any` value uses the regular * representation of the deserialized, embedded message, with an * additional field `@type` which contains the type URL. Example: * * package google.profile; * message Person { * string first_name = 1; * string last_name = 2; * } * * { * "@type": "type.googleapis.com/google.profile.Person", * "firstName": , * "lastName": * } * * If the embedded message type is well-known and has a custom JSON * representation, that representation will be embedded adding a field * `value` which holds the custom JSON in addition to the `@type` * field. Example (for message [google.protobuf.Duration][]): * * { * "@type": "type.googleapis.com/google.protobuf.Duration", * "value": "1.212s" * } */ export interface Any { /** * A URL/resource name that uniquely identifies the type of the serialized * protocol buffer message. This string must contain at least * one "/" character. The last segment of the URL's path must represent * the fully qualified name of the type (as in * `path/google.protobuf.Duration`). The name should be in a canonical form * (e.g., leading "." is not accepted). * * In practice, teams usually precompile into the binary all types that they * expect it to use in the context of Any. However, for URLs which use the * scheme `http`, `https`, or no scheme, one can optionally set up a type * server that maps type URLs to message definitions as follows: * * * If no scheme is provided, `https` is assumed. * * An HTTP GET on the URL must yield a [google.protobuf.Type][] * value in binary format, or produce an error. * * Applications are allowed to cache lookup results based on the * URL, or have them precompiled into a binary to avoid any * lookup. Therefore, binary compatibility needs to be preserved * on changes to types. (Use versioned type names to manage * breaking changes.) * * Note: this functionality is not currently available in the official * protobuf release, and it is not used for type URLs beginning with * type.googleapis.com. As of May 2023, there are no widely used type server * implementations and no plans to implement one. * * Schemes other than `http`, `https` (or the empty scheme) might be * used with implementation specific semantics. */ typeUrl: string; /** Must be a valid serialized protocol buffer of the above specified type. */ value: Buffer; } export const Any: MessageFns = { fromJSON(object: any): Any { return { typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "", value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), }; }, toJSON(message: Any): unknown { const obj: any = {}; if (message.typeUrl !== "") { obj.typeUrl = message.typeUrl; } if (message.value.length !== 0) { obj.value = base64FromBytes(message.value); } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/protobuf/descriptor.ts000066400000000000000000003444341477352757300312700ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: google/protobuf/descriptor.proto /* eslint-disable */ /** The full set of known editions. */ export enum Edition { /** EDITION_UNKNOWN - A placeholder for an unknown edition value. */ EDITION_UNKNOWN = 0, /** * EDITION_LEGACY - A placeholder edition for specifying default behaviors *before* a feature * was first introduced. This is effectively an "infinite past". */ EDITION_LEGACY = 900, /** * EDITION_PROTO2 - Legacy syntax "editions". These pre-date editions, but behave much like * distinct editions. These can't be used to specify the edition of proto * files, but feature definitions must supply proto2/proto3 defaults for * backwards compatibility. */ EDITION_PROTO2 = 998, EDITION_PROTO3 = 999, /** * EDITION_2023 - Editions that have been released. The specific values are arbitrary and * should not be depended on, but they will always be time-ordered for easy * comparison. */ EDITION_2023 = 1000, EDITION_2024 = 1001, /** * EDITION_1_TEST_ONLY - Placeholder editions for testing feature resolution. These should not be * used or relied on outside of tests. */ EDITION_1_TEST_ONLY = 1, EDITION_2_TEST_ONLY = 2, EDITION_99997_TEST_ONLY = 99997, EDITION_99998_TEST_ONLY = 99998, EDITION_99999_TEST_ONLY = 99999, /** * EDITION_MAX - Placeholder for specifying unbounded edition support. This should only * ever be used by plugins that can expect to never require any changes to * support a new edition. */ EDITION_MAX = 2147483647, } export function editionFromJSON(object: any): Edition { switch (object) { case 0: case "EDITION_UNKNOWN": return Edition.EDITION_UNKNOWN; case 900: case "EDITION_LEGACY": return Edition.EDITION_LEGACY; case 998: case "EDITION_PROTO2": return Edition.EDITION_PROTO2; case 999: case "EDITION_PROTO3": return Edition.EDITION_PROTO3; case 1000: case "EDITION_2023": return Edition.EDITION_2023; case 1001: case "EDITION_2024": return Edition.EDITION_2024; case 1: case "EDITION_1_TEST_ONLY": return Edition.EDITION_1_TEST_ONLY; case 2: case "EDITION_2_TEST_ONLY": return Edition.EDITION_2_TEST_ONLY; case 99997: case "EDITION_99997_TEST_ONLY": return Edition.EDITION_99997_TEST_ONLY; case 99998: case "EDITION_99998_TEST_ONLY": return Edition.EDITION_99998_TEST_ONLY; case 99999: case "EDITION_99999_TEST_ONLY": return Edition.EDITION_99999_TEST_ONLY; case 2147483647: case "EDITION_MAX": return Edition.EDITION_MAX; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition"); } } export function editionToJSON(object: Edition): string { switch (object) { case Edition.EDITION_UNKNOWN: return "EDITION_UNKNOWN"; case Edition.EDITION_LEGACY: return "EDITION_LEGACY"; case Edition.EDITION_PROTO2: return "EDITION_PROTO2"; case Edition.EDITION_PROTO3: return "EDITION_PROTO3"; case Edition.EDITION_2023: return "EDITION_2023"; case Edition.EDITION_2024: return "EDITION_2024"; case Edition.EDITION_1_TEST_ONLY: return "EDITION_1_TEST_ONLY"; case Edition.EDITION_2_TEST_ONLY: return "EDITION_2_TEST_ONLY"; case Edition.EDITION_99997_TEST_ONLY: return "EDITION_99997_TEST_ONLY"; case Edition.EDITION_99998_TEST_ONLY: return "EDITION_99998_TEST_ONLY"; case Edition.EDITION_99999_TEST_ONLY: return "EDITION_99999_TEST_ONLY"; case Edition.EDITION_MAX: return "EDITION_MAX"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum Edition"); } } /** * The protocol compiler can output a FileDescriptorSet containing the .proto * files it parses. */ export interface FileDescriptorSet { file: FileDescriptorProto[]; } /** Describes a complete .proto file. */ export interface FileDescriptorProto { /** file name, relative to root of source tree */ name?: | string | undefined; /** e.g. "foo", "foo.bar", etc. */ package?: | string | undefined; /** Names of files imported by this file. */ dependency: string[]; /** Indexes of the public imported files in the dependency list above. */ publicDependency: number[]; /** * Indexes of the weak imported files in the dependency list. * For Google-internal migration only. Do not use. */ weakDependency: number[]; /** All top-level definitions in this file. */ messageType: DescriptorProto[]; enumType: EnumDescriptorProto[]; service: ServiceDescriptorProto[]; extension: FieldDescriptorProto[]; options?: | FileOptions | undefined; /** * This field contains optional information about the original source code. * You may safely remove this entire field without harming runtime * functionality of the descriptors -- the information is needed only by * development tools. */ sourceCodeInfo?: | SourceCodeInfo | undefined; /** * The syntax of the proto file. * The supported values are "proto2", "proto3", and "editions". * * If `edition` is present, this value must be "editions". */ syntax?: | string | undefined; /** The edition of the proto file. */ edition?: Edition | undefined; } /** Describes a message type. */ export interface DescriptorProto { name?: string | undefined; field: FieldDescriptorProto[]; extension: FieldDescriptorProto[]; nestedType: DescriptorProto[]; enumType: EnumDescriptorProto[]; extensionRange: DescriptorProto_ExtensionRange[]; oneofDecl: OneofDescriptorProto[]; options?: MessageOptions | undefined; reservedRange: DescriptorProto_ReservedRange[]; /** * Reserved field names, which may not be used by fields in the same message. * A given name may only be reserved once. */ reservedName: string[]; } export interface DescriptorProto_ExtensionRange { /** Inclusive. */ start?: | number | undefined; /** Exclusive. */ end?: number | undefined; options?: ExtensionRangeOptions | undefined; } /** * Range of reserved tag numbers. Reserved tag numbers may not be used by * fields or extension ranges in the same message. Reserved ranges may * not overlap. */ export interface DescriptorProto_ReservedRange { /** Inclusive. */ start?: | number | undefined; /** Exclusive. */ end?: number | undefined; } export interface ExtensionRangeOptions { /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; /** * For external users: DO NOT USE. We are in the process of open sourcing * extension declaration and executing internal cleanups before it can be * used externally. */ declaration: ExtensionRangeOptions_Declaration[]; /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** * The verification state of the range. * TODO: flip the default to DECLARATION once all empty ranges * are marked as UNVERIFIED. */ verification?: ExtensionRangeOptions_VerificationState | undefined; } /** The verification state of the extension range. */ export enum ExtensionRangeOptions_VerificationState { /** DECLARATION - All the extensions of the range must be declared. */ DECLARATION = 0, UNVERIFIED = 1, } export function extensionRangeOptions_VerificationStateFromJSON(object: any): ExtensionRangeOptions_VerificationState { switch (object) { case 0: case "DECLARATION": return ExtensionRangeOptions_VerificationState.DECLARATION; case 1: case "UNVERIFIED": return ExtensionRangeOptions_VerificationState.UNVERIFIED; default: throw new globalThis.Error( "Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState", ); } } export function extensionRangeOptions_VerificationStateToJSON(object: ExtensionRangeOptions_VerificationState): string { switch (object) { case ExtensionRangeOptions_VerificationState.DECLARATION: return "DECLARATION"; case ExtensionRangeOptions_VerificationState.UNVERIFIED: return "UNVERIFIED"; default: throw new globalThis.Error( "Unrecognized enum value " + object + " for enum ExtensionRangeOptions_VerificationState", ); } } export interface ExtensionRangeOptions_Declaration { /** The extension number declared within the extension range. */ number?: | number | undefined; /** * The fully-qualified name of the extension field. There must be a leading * dot in front of the full name. */ fullName?: | string | undefined; /** * The fully-qualified type name of the extension field. Unlike * Metadata.type, Declaration.type must have a leading dot for messages * and enums. */ type?: | string | undefined; /** * If true, indicates that the number is reserved in the extension range, * and any extension field with the number will fail to compile. Set this * when a declared extension field is deleted. */ reserved?: | boolean | undefined; /** * If true, indicates that the extension must be defined as repeated. * Otherwise the extension must be defined as optional. */ repeated?: boolean | undefined; } /** Describes a field within a message. */ export interface FieldDescriptorProto { name?: string | undefined; number?: number | undefined; label?: | FieldDescriptorProto_Label | undefined; /** * If type_name is set, this need not be set. If both this and type_name * are set, this must be one of TYPE_ENUM, TYPE_MESSAGE or TYPE_GROUP. */ type?: | FieldDescriptorProto_Type | undefined; /** * For message and enum types, this is the name of the type. If the name * starts with a '.', it is fully-qualified. Otherwise, C++-like scoping * rules are used to find the type (i.e. first the nested types within this * message are searched, then within the parent, on up to the root * namespace). */ typeName?: | string | undefined; /** * For extensions, this is the name of the type being extended. It is * resolved in the same manner as type_name. */ extendee?: | string | undefined; /** * For numeric types, contains the original text representation of the value. * For booleans, "true" or "false". * For strings, contains the default text contents (not escaped in any way). * For bytes, contains the C escaped value. All bytes >= 128 are escaped. */ defaultValue?: | string | undefined; /** * If set, gives the index of a oneof in the containing type's oneof_decl * list. This field is a member of that oneof. */ oneofIndex?: | number | undefined; /** * JSON name of this field. The value is set by protocol compiler. If the * user has set a "json_name" option on this field, that option's value * will be used. Otherwise, it's deduced from the field's name by converting * it to camelCase. */ jsonName?: string | undefined; options?: | FieldOptions | undefined; /** * If true, this is a proto3 "optional". When a proto3 field is optional, it * tracks presence regardless of field type. * * When proto3_optional is true, this field must belong to a oneof to signal * to old proto3 clients that presence is tracked for this field. This oneof * is known as a "synthetic" oneof, and this field must be its sole member * (each proto3 optional field gets its own synthetic oneof). Synthetic oneofs * exist in the descriptor only, and do not generate any API. Synthetic oneofs * must be ordered after all "real" oneofs. * * For message fields, proto3_optional doesn't create any semantic change, * since non-repeated message fields always track presence. However it still * indicates the semantic detail of whether the user wrote "optional" or not. * This can be useful for round-tripping the .proto file. For consistency we * give message fields a synthetic oneof also, even though it is not required * to track presence. This is especially important because the parser can't * tell if a field is a message or an enum, so it must always create a * synthetic oneof. * * Proto2 optional fields do not set this flag, because they already indicate * optional with `LABEL_OPTIONAL`. */ proto3Optional?: boolean | undefined; } export enum FieldDescriptorProto_Type { /** * TYPE_DOUBLE - 0 is reserved for errors. * Order is weird for historical reasons. */ TYPE_DOUBLE = 1, TYPE_FLOAT = 2, /** * TYPE_INT64 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if * negative values are likely. */ TYPE_INT64 = 3, TYPE_UINT64 = 4, /** * TYPE_INT32 - Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if * negative values are likely. */ TYPE_INT32 = 5, TYPE_FIXED64 = 6, TYPE_FIXED32 = 7, TYPE_BOOL = 8, TYPE_STRING = 9, /** * TYPE_GROUP - Tag-delimited aggregate. * Group type is deprecated and not supported after google.protobuf. However, Proto3 * implementations should still be able to parse the group wire format and * treat group fields as unknown fields. In Editions, the group wire format * can be enabled via the `message_encoding` feature. */ TYPE_GROUP = 10, /** TYPE_MESSAGE - Length-delimited aggregate. */ TYPE_MESSAGE = 11, /** TYPE_BYTES - New in version 2. */ TYPE_BYTES = 12, TYPE_UINT32 = 13, TYPE_ENUM = 14, TYPE_SFIXED32 = 15, TYPE_SFIXED64 = 16, /** TYPE_SINT32 - Uses ZigZag encoding. */ TYPE_SINT32 = 17, /** TYPE_SINT64 - Uses ZigZag encoding. */ TYPE_SINT64 = 18, } export function fieldDescriptorProto_TypeFromJSON(object: any): FieldDescriptorProto_Type { switch (object) { case 1: case "TYPE_DOUBLE": return FieldDescriptorProto_Type.TYPE_DOUBLE; case 2: case "TYPE_FLOAT": return FieldDescriptorProto_Type.TYPE_FLOAT; case 3: case "TYPE_INT64": return FieldDescriptorProto_Type.TYPE_INT64; case 4: case "TYPE_UINT64": return FieldDescriptorProto_Type.TYPE_UINT64; case 5: case "TYPE_INT32": return FieldDescriptorProto_Type.TYPE_INT32; case 6: case "TYPE_FIXED64": return FieldDescriptorProto_Type.TYPE_FIXED64; case 7: case "TYPE_FIXED32": return FieldDescriptorProto_Type.TYPE_FIXED32; case 8: case "TYPE_BOOL": return FieldDescriptorProto_Type.TYPE_BOOL; case 9: case "TYPE_STRING": return FieldDescriptorProto_Type.TYPE_STRING; case 10: case "TYPE_GROUP": return FieldDescriptorProto_Type.TYPE_GROUP; case 11: case "TYPE_MESSAGE": return FieldDescriptorProto_Type.TYPE_MESSAGE; case 12: case "TYPE_BYTES": return FieldDescriptorProto_Type.TYPE_BYTES; case 13: case "TYPE_UINT32": return FieldDescriptorProto_Type.TYPE_UINT32; case 14: case "TYPE_ENUM": return FieldDescriptorProto_Type.TYPE_ENUM; case 15: case "TYPE_SFIXED32": return FieldDescriptorProto_Type.TYPE_SFIXED32; case 16: case "TYPE_SFIXED64": return FieldDescriptorProto_Type.TYPE_SFIXED64; case 17: case "TYPE_SINT32": return FieldDescriptorProto_Type.TYPE_SINT32; case 18: case "TYPE_SINT64": return FieldDescriptorProto_Type.TYPE_SINT64; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); } } export function fieldDescriptorProto_TypeToJSON(object: FieldDescriptorProto_Type): string { switch (object) { case FieldDescriptorProto_Type.TYPE_DOUBLE: return "TYPE_DOUBLE"; case FieldDescriptorProto_Type.TYPE_FLOAT: return "TYPE_FLOAT"; case FieldDescriptorProto_Type.TYPE_INT64: return "TYPE_INT64"; case FieldDescriptorProto_Type.TYPE_UINT64: return "TYPE_UINT64"; case FieldDescriptorProto_Type.TYPE_INT32: return "TYPE_INT32"; case FieldDescriptorProto_Type.TYPE_FIXED64: return "TYPE_FIXED64"; case FieldDescriptorProto_Type.TYPE_FIXED32: return "TYPE_FIXED32"; case FieldDescriptorProto_Type.TYPE_BOOL: return "TYPE_BOOL"; case FieldDescriptorProto_Type.TYPE_STRING: return "TYPE_STRING"; case FieldDescriptorProto_Type.TYPE_GROUP: return "TYPE_GROUP"; case FieldDescriptorProto_Type.TYPE_MESSAGE: return "TYPE_MESSAGE"; case FieldDescriptorProto_Type.TYPE_BYTES: return "TYPE_BYTES"; case FieldDescriptorProto_Type.TYPE_UINT32: return "TYPE_UINT32"; case FieldDescriptorProto_Type.TYPE_ENUM: return "TYPE_ENUM"; case FieldDescriptorProto_Type.TYPE_SFIXED32: return "TYPE_SFIXED32"; case FieldDescriptorProto_Type.TYPE_SFIXED64: return "TYPE_SFIXED64"; case FieldDescriptorProto_Type.TYPE_SINT32: return "TYPE_SINT32"; case FieldDescriptorProto_Type.TYPE_SINT64: return "TYPE_SINT64"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Type"); } } export enum FieldDescriptorProto_Label { /** LABEL_OPTIONAL - 0 is reserved for errors */ LABEL_OPTIONAL = 1, LABEL_REPEATED = 3, /** * LABEL_REQUIRED - The required label is only allowed in google.protobuf. In proto3 and Editions * it's explicitly prohibited. In Editions, the `field_presence` feature * can be used to get this behavior. */ LABEL_REQUIRED = 2, } export function fieldDescriptorProto_LabelFromJSON(object: any): FieldDescriptorProto_Label { switch (object) { case 1: case "LABEL_OPTIONAL": return FieldDescriptorProto_Label.LABEL_OPTIONAL; case 3: case "LABEL_REPEATED": return FieldDescriptorProto_Label.LABEL_REPEATED; case 2: case "LABEL_REQUIRED": return FieldDescriptorProto_Label.LABEL_REQUIRED; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); } } export function fieldDescriptorProto_LabelToJSON(object: FieldDescriptorProto_Label): string { switch (object) { case FieldDescriptorProto_Label.LABEL_OPTIONAL: return "LABEL_OPTIONAL"; case FieldDescriptorProto_Label.LABEL_REPEATED: return "LABEL_REPEATED"; case FieldDescriptorProto_Label.LABEL_REQUIRED: return "LABEL_REQUIRED"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldDescriptorProto_Label"); } } /** Describes a oneof. */ export interface OneofDescriptorProto { name?: string | undefined; options?: OneofOptions | undefined; } /** Describes an enum type. */ export interface EnumDescriptorProto { name?: string | undefined; value: EnumValueDescriptorProto[]; options?: | EnumOptions | undefined; /** * Range of reserved numeric values. Reserved numeric values may not be used * by enum values in the same enum declaration. Reserved ranges may not * overlap. */ reservedRange: EnumDescriptorProto_EnumReservedRange[]; /** * Reserved enum value names, which may not be reused. A given name may only * be reserved once. */ reservedName: string[]; } /** * Range of reserved numeric values. Reserved values may not be used by * entries in the same enum. Reserved ranges may not overlap. * * Note that this is distinct from DescriptorProto.ReservedRange in that it * is inclusive such that it can appropriately represent the entire int32 * domain. */ export interface EnumDescriptorProto_EnumReservedRange { /** Inclusive. */ start?: | number | undefined; /** Inclusive. */ end?: number | undefined; } /** Describes a value within an enum. */ export interface EnumValueDescriptorProto { name?: string | undefined; number?: number | undefined; options?: EnumValueOptions | undefined; } /** Describes a service. */ export interface ServiceDescriptorProto { name?: string | undefined; method: MethodDescriptorProto[]; options?: ServiceOptions | undefined; } /** Describes a method of a service. */ export interface MethodDescriptorProto { name?: | string | undefined; /** * Input and output type names. These are resolved in the same way as * FieldDescriptorProto.type_name, but must refer to a message type. */ inputType?: string | undefined; outputType?: string | undefined; options?: | MethodOptions | undefined; /** Identifies if client streams multiple client messages */ clientStreaming?: | boolean | undefined; /** Identifies if server streams multiple server messages */ serverStreaming?: boolean | undefined; } export interface FileOptions { /** * Sets the Java package where classes generated from this .proto will be * placed. By default, the proto package is used, but this is often * inappropriate because proto packages do not normally start with backwards * domain names. */ javaPackage?: | string | undefined; /** * Controls the name of the wrapper Java class generated for the .proto file. * That class will always contain the .proto file's getDescriptor() method as * well as any top-level extensions defined in the .proto file. * If java_multiple_files is disabled, then all the other classes from the * .proto file will be nested inside the single wrapper outer class. */ javaOuterClassname?: | string | undefined; /** * If enabled, then the Java code generator will generate a separate .java * file for each top-level message, enum, and service defined in the .proto * file. Thus, these types will *not* be nested inside the wrapper class * named by java_outer_classname. However, the wrapper class will still be * generated to contain the file's getDescriptor() method as well as any * top-level extensions defined in the file. */ javaMultipleFiles?: | boolean | undefined; /** * This option does nothing. * * @deprecated */ javaGenerateEqualsAndHash?: | boolean | undefined; /** * A proto2 file can set this to true to opt in to UTF-8 checking for Java, * which will throw an exception if invalid UTF-8 is parsed from the wire or * assigned to a string field. * * TODO: clarify exactly what kinds of field types this option * applies to, and update these docs accordingly. * * Proto3 files already perform these checks. Setting the option explicitly to * false has no effect: it cannot be used to opt proto3 files out of UTF-8 * checks. */ javaStringCheckUtf8?: boolean | undefined; optimizeFor?: | FileOptions_OptimizeMode | undefined; /** * Sets the Go package where structs generated from this .proto will be * placed. If omitted, the Go package will be derived from the following: * - The basename of the package import path, if provided. * - Otherwise, the package statement in the .proto file, if present. * - Otherwise, the basename of the .proto file, without extension. */ goPackage?: | string | undefined; /** * Should generic services be generated in each language? "Generic" services * are not specific to any particular RPC system. They are generated by the * main code generators in each language (without additional plugins). * Generic services were the only kind of service generation supported by * early versions of google.protobuf. * * Generic services are now considered deprecated in favor of using plugins * that generate code specific to your particular RPC system. Therefore, * these default to false. Old code which depends on generic services should * explicitly set them to true. */ ccGenericServices?: boolean | undefined; javaGenericServices?: boolean | undefined; pyGenericServices?: | boolean | undefined; /** * Is this file deprecated? * Depending on the target platform, this can emit Deprecated annotations * for everything in the file, or it will be completely ignored; in the very * least, this is a formalization for deprecating files. */ deprecated?: | boolean | undefined; /** * Enables the use of arenas for the proto messages in this file. This applies * only to generated classes for C++. */ ccEnableArenas?: | boolean | undefined; /** * Sets the objective c class prefix which is prepended to all objective c * generated classes from this .proto. There is no default. */ objcClassPrefix?: | string | undefined; /** Namespace for generated classes; defaults to the package. */ csharpNamespace?: | string | undefined; /** * By default Swift generators will take the proto package and CamelCase it * replacing '.' with underscore and use that to prefix the types/symbols * defined. When this options is provided, they will use this value instead * to prefix the types/symbols defined. */ swiftPrefix?: | string | undefined; /** * Sets the php class prefix which is prepended to all php generated classes * from this .proto. Default is empty. */ phpClassPrefix?: | string | undefined; /** * Use this option to change the namespace of php generated classes. Default * is empty. When this option is empty, the package name will be used for * determining the namespace. */ phpNamespace?: | string | undefined; /** * Use this option to change the namespace of php generated metadata classes. * Default is empty. When this option is empty, the proto file name will be * used for determining the namespace. */ phpMetadataNamespace?: | string | undefined; /** * Use this option to change the package of ruby generated classes. Default * is empty. When this option is not set, the package name will be used for * determining the ruby package. */ rubyPackage?: | string | undefined; /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** * The parser stores options it doesn't recognize here. * See the documentation for the "Options" section above. */ uninterpretedOption: UninterpretedOption[]; } /** Generated classes can be optimized for speed or code size. */ export enum FileOptions_OptimizeMode { /** SPEED - Generate complete code for parsing, serialization, */ SPEED = 1, /** CODE_SIZE - etc. */ CODE_SIZE = 2, /** LITE_RUNTIME - Generate code using MessageLite and the lite runtime. */ LITE_RUNTIME = 3, } export function fileOptions_OptimizeModeFromJSON(object: any): FileOptions_OptimizeMode { switch (object) { case 1: case "SPEED": return FileOptions_OptimizeMode.SPEED; case 2: case "CODE_SIZE": return FileOptions_OptimizeMode.CODE_SIZE; case 3: case "LITE_RUNTIME": return FileOptions_OptimizeMode.LITE_RUNTIME; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); } } export function fileOptions_OptimizeModeToJSON(object: FileOptions_OptimizeMode): string { switch (object) { case FileOptions_OptimizeMode.SPEED: return "SPEED"; case FileOptions_OptimizeMode.CODE_SIZE: return "CODE_SIZE"; case FileOptions_OptimizeMode.LITE_RUNTIME: return "LITE_RUNTIME"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FileOptions_OptimizeMode"); } } export interface MessageOptions { /** * Set true to use the old proto1 MessageSet wire format for extensions. * This is provided for backwards-compatibility with the MessageSet wire * format. You should not use this for any other reason: It's less * efficient, has fewer features, and is more complicated. * * The message must be defined exactly as follows: * message Foo { * option message_set_wire_format = true; * extensions 4 to max; * } * Note that the message cannot have any defined fields; MessageSets only * have extensions. * * All extensions of your type must be singular messages; e.g. they cannot * be int32s, enums, or repeated messages. * * Because this is an option, the above two restrictions are not enforced by * the protocol compiler. */ messageSetWireFormat?: | boolean | undefined; /** * Disables the generation of the standard "descriptor()" accessor, which can * conflict with a field of the same name. This is meant to make migration * from proto1 easier; new code should avoid fields named "descriptor". */ noStandardDescriptorAccessor?: | boolean | undefined; /** * Is this message deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the message, or it will be completely ignored; in the very least, * this is a formalization for deprecating messages. */ deprecated?: | boolean | undefined; /** * Whether the message is an automatically generated map entry type for the * maps field. * * For maps fields: * map map_field = 1; * The parsed descriptor looks like: * message MapFieldEntry { * option map_entry = true; * optional KeyType key = 1; * optional ValueType value = 2; * } * repeated MapFieldEntry map_field = 1; * * Implementations may choose not to generate the map_entry=true message, but * use a native map in the target language to hold the keys and values. * The reflection APIs in such implementations still need to work as * if the field is a repeated message field. * * NOTE: Do not set the option in .proto files. Always use the maps syntax * instead. The option should only be implicitly set by the proto compiler * parser. */ mapEntry?: | boolean | undefined; /** * Enable the legacy handling of JSON field name conflicts. This lowercases * and strips underscored from the fields before comparison in proto3 only. * The new behavior takes `json_name` into account and applies to proto2 as * well. * * This should only be used as a temporary measure against broken builds due * to the change in behavior for JSON field name conflicts. * * TODO This is legacy behavior we plan to remove once downstream * teams have had time to migrate. * * @deprecated */ deprecatedLegacyJsonFieldConflicts?: | boolean | undefined; /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export interface FieldOptions { /** * NOTE: ctype is deprecated. Use `features.(pb.cpp).string_type` instead. * The ctype option instructs the C++ code generator to use a different * representation of the field than it normally would. See the specific * options below. This option is only implemented to support use of * [ctype=CORD] and [ctype=STRING] (the default) on non-repeated fields of * type "bytes" in the open source release. * TODO: make ctype actually deprecated. */ ctype?: | FieldOptions_CType | undefined; /** * The packed option can be enabled for repeated primitive fields to enable * a more efficient representation on the wire. Rather than repeatedly * writing the tag and type for each element, the entire array is encoded as * a single length-delimited blob. In proto3, only explicit setting it to * false will avoid using packed encoding. This option is prohibited in * Editions, but the `repeated_field_encoding` feature can be used to control * the behavior. */ packed?: | boolean | undefined; /** * The jstype option determines the JavaScript type used for values of the * field. The option is permitted only for 64 bit integral and fixed types * (int64, uint64, sint64, fixed64, sfixed64). A field with jstype JS_STRING * is represented as JavaScript string, which avoids loss of precision that * can happen when a large value is converted to a floating point JavaScript. * Specifying JS_NUMBER for the jstype causes the generated JavaScript code to * use the JavaScript "number" type. The behavior of the default option * JS_NORMAL is implementation dependent. * * This option is an enum to permit additional types to be added, e.g. * goog.math.Integer. */ jstype?: | FieldOptions_JSType | undefined; /** * Should this field be parsed lazily? Lazy applies only to message-type * fields. It means that when the outer message is initially parsed, the * inner message's contents will not be parsed but instead stored in encoded * form. The inner message will actually be parsed when it is first accessed. * * This is only a hint. Implementations are free to choose whether to use * eager or lazy parsing regardless of the value of this option. However, * setting this option true suggests that the protocol author believes that * using lazy parsing on this field is worth the additional bookkeeping * overhead typically needed to implement it. * * This option does not affect the public interface of any generated code; * all method signatures remain the same. Furthermore, thread-safety of the * interface is not affected by this option; const methods remain safe to * call from multiple threads concurrently, while non-const methods continue * to require exclusive access. * * Note that lazy message fields are still eagerly verified to check * ill-formed wireformat or missing required fields. Calling IsInitialized() * on the outer message would fail if the inner message has missing required * fields. Failed verification would result in parsing failure (except when * uninitialized messages are acceptable). */ lazy?: | boolean | undefined; /** * unverified_lazy does no correctness checks on the byte stream. This should * only be used where lazy with verification is prohibitive for performance * reasons. */ unverifiedLazy?: | boolean | undefined; /** * Is this field deprecated? * Depending on the target platform, this can emit Deprecated annotations * for accessors, or it will be completely ignored; in the very least, this * is a formalization for deprecating fields. */ deprecated?: | boolean | undefined; /** For Google-internal migration only. Do not use. */ weak?: | boolean | undefined; /** * Indicate that the field value should not be printed out when using debug * formats, e.g. when the field contains sensitive credentials. */ debugRedact?: boolean | undefined; retention?: FieldOptions_OptionRetention | undefined; targets: FieldOptions_OptionTargetType[]; editionDefaults: FieldOptions_EditionDefault[]; /** Any features defined in the specific edition. */ features?: FeatureSet | undefined; featureSupport?: | FieldOptions_FeatureSupport | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export enum FieldOptions_CType { /** STRING - Default mode. */ STRING = 0, /** * CORD - The option [ctype=CORD] may be applied to a non-repeated field of type * "bytes". It indicates that in C++, the data should be stored in a Cord * instead of a string. For very large strings, this may reduce memory * fragmentation. It may also allow better performance when parsing from a * Cord, or when parsing with aliasing enabled, as the parsed Cord may then * alias the original buffer. */ CORD = 1, STRING_PIECE = 2, } export function fieldOptions_CTypeFromJSON(object: any): FieldOptions_CType { switch (object) { case 0: case "STRING": return FieldOptions_CType.STRING; case 1: case "CORD": return FieldOptions_CType.CORD; case 2: case "STRING_PIECE": return FieldOptions_CType.STRING_PIECE; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); } } export function fieldOptions_CTypeToJSON(object: FieldOptions_CType): string { switch (object) { case FieldOptions_CType.STRING: return "STRING"; case FieldOptions_CType.CORD: return "CORD"; case FieldOptions_CType.STRING_PIECE: return "STRING_PIECE"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_CType"); } } export enum FieldOptions_JSType { /** JS_NORMAL - Use the default type. */ JS_NORMAL = 0, /** JS_STRING - Use JavaScript strings. */ JS_STRING = 1, /** JS_NUMBER - Use JavaScript numbers. */ JS_NUMBER = 2, } export function fieldOptions_JSTypeFromJSON(object: any): FieldOptions_JSType { switch (object) { case 0: case "JS_NORMAL": return FieldOptions_JSType.JS_NORMAL; case 1: case "JS_STRING": return FieldOptions_JSType.JS_STRING; case 2: case "JS_NUMBER": return FieldOptions_JSType.JS_NUMBER; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); } } export function fieldOptions_JSTypeToJSON(object: FieldOptions_JSType): string { switch (object) { case FieldOptions_JSType.JS_NORMAL: return "JS_NORMAL"; case FieldOptions_JSType.JS_STRING: return "JS_STRING"; case FieldOptions_JSType.JS_NUMBER: return "JS_NUMBER"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_JSType"); } } /** If set to RETENTION_SOURCE, the option will be omitted from the binary. */ export enum FieldOptions_OptionRetention { RETENTION_UNKNOWN = 0, RETENTION_RUNTIME = 1, RETENTION_SOURCE = 2, } export function fieldOptions_OptionRetentionFromJSON(object: any): FieldOptions_OptionRetention { switch (object) { case 0: case "RETENTION_UNKNOWN": return FieldOptions_OptionRetention.RETENTION_UNKNOWN; case 1: case "RETENTION_RUNTIME": return FieldOptions_OptionRetention.RETENTION_RUNTIME; case 2: case "RETENTION_SOURCE": return FieldOptions_OptionRetention.RETENTION_SOURCE; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention"); } } export function fieldOptions_OptionRetentionToJSON(object: FieldOptions_OptionRetention): string { switch (object) { case FieldOptions_OptionRetention.RETENTION_UNKNOWN: return "RETENTION_UNKNOWN"; case FieldOptions_OptionRetention.RETENTION_RUNTIME: return "RETENTION_RUNTIME"; case FieldOptions_OptionRetention.RETENTION_SOURCE: return "RETENTION_SOURCE"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionRetention"); } } /** * This indicates the types of entities that the field may apply to when used * as an option. If it is unset, then the field may be freely used as an * option on any kind of entity. */ export enum FieldOptions_OptionTargetType { TARGET_TYPE_UNKNOWN = 0, TARGET_TYPE_FILE = 1, TARGET_TYPE_EXTENSION_RANGE = 2, TARGET_TYPE_MESSAGE = 3, TARGET_TYPE_FIELD = 4, TARGET_TYPE_ONEOF = 5, TARGET_TYPE_ENUM = 6, TARGET_TYPE_ENUM_ENTRY = 7, TARGET_TYPE_SERVICE = 8, TARGET_TYPE_METHOD = 9, } export function fieldOptions_OptionTargetTypeFromJSON(object: any): FieldOptions_OptionTargetType { switch (object) { case 0: case "TARGET_TYPE_UNKNOWN": return FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN; case 1: case "TARGET_TYPE_FILE": return FieldOptions_OptionTargetType.TARGET_TYPE_FILE; case 2: case "TARGET_TYPE_EXTENSION_RANGE": return FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE; case 3: case "TARGET_TYPE_MESSAGE": return FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE; case 4: case "TARGET_TYPE_FIELD": return FieldOptions_OptionTargetType.TARGET_TYPE_FIELD; case 5: case "TARGET_TYPE_ONEOF": return FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF; case 6: case "TARGET_TYPE_ENUM": return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM; case 7: case "TARGET_TYPE_ENUM_ENTRY": return FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY; case 8: case "TARGET_TYPE_SERVICE": return FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE; case 9: case "TARGET_TYPE_METHOD": return FieldOptions_OptionTargetType.TARGET_TYPE_METHOD; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType"); } } export function fieldOptions_OptionTargetTypeToJSON(object: FieldOptions_OptionTargetType): string { switch (object) { case FieldOptions_OptionTargetType.TARGET_TYPE_UNKNOWN: return "TARGET_TYPE_UNKNOWN"; case FieldOptions_OptionTargetType.TARGET_TYPE_FILE: return "TARGET_TYPE_FILE"; case FieldOptions_OptionTargetType.TARGET_TYPE_EXTENSION_RANGE: return "TARGET_TYPE_EXTENSION_RANGE"; case FieldOptions_OptionTargetType.TARGET_TYPE_MESSAGE: return "TARGET_TYPE_MESSAGE"; case FieldOptions_OptionTargetType.TARGET_TYPE_FIELD: return "TARGET_TYPE_FIELD"; case FieldOptions_OptionTargetType.TARGET_TYPE_ONEOF: return "TARGET_TYPE_ONEOF"; case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM: return "TARGET_TYPE_ENUM"; case FieldOptions_OptionTargetType.TARGET_TYPE_ENUM_ENTRY: return "TARGET_TYPE_ENUM_ENTRY"; case FieldOptions_OptionTargetType.TARGET_TYPE_SERVICE: return "TARGET_TYPE_SERVICE"; case FieldOptions_OptionTargetType.TARGET_TYPE_METHOD: return "TARGET_TYPE_METHOD"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FieldOptions_OptionTargetType"); } } export interface FieldOptions_EditionDefault { edition?: | Edition | undefined; /** Textproto value. */ value?: string | undefined; } /** Information about the support window of a feature. */ export interface FieldOptions_FeatureSupport { /** * The edition that this feature was first available in. In editions * earlier than this one, the default assigned to EDITION_LEGACY will be * used, and proto files will not be able to override it. */ editionIntroduced?: | Edition | undefined; /** * The edition this feature becomes deprecated in. Using this after this * edition may trigger warnings. */ editionDeprecated?: | Edition | undefined; /** * The deprecation warning text if this feature is used after the edition it * was marked deprecated in. */ deprecationWarning?: | string | undefined; /** * The edition this feature is no longer available in. In editions after * this one, the last default assigned will be used, and proto files will * not be able to override it. */ editionRemoved?: Edition | undefined; } export interface OneofOptions { /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export interface EnumOptions { /** * Set this option to true to allow mapping different tag names to the same * value. */ allowAlias?: | boolean | undefined; /** * Is this enum deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum, or it will be completely ignored; in the very least, this * is a formalization for deprecating enums. */ deprecated?: | boolean | undefined; /** * Enable the legacy handling of JSON field name conflicts. This lowercases * and strips underscored from the fields before comparison in proto3 only. * The new behavior takes `json_name` into account and applies to proto2 as * well. * TODO Remove this legacy behavior once downstream teams have * had time to migrate. * * @deprecated */ deprecatedLegacyJsonFieldConflicts?: | boolean | undefined; /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export interface EnumValueOptions { /** * Is this enum value deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the enum value, or it will be completely ignored; in the very least, * this is a formalization for deprecating enum values. */ deprecated?: | boolean | undefined; /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** * Indicate that fields annotated with this enum value should not be printed * out when using debug formats, e.g. when the field contains sensitive * credentials. */ debugRedact?: | boolean | undefined; /** Information about the support window of a feature value. */ featureSupport?: | FieldOptions_FeatureSupport | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export interface ServiceOptions { /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** * Is this service deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the service, or it will be completely ignored; in the very least, * this is a formalization for deprecating services. */ deprecated?: | boolean | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } export interface MethodOptions { /** * Is this method deprecated? * Depending on the target platform, this can emit Deprecated annotations * for the method, or it will be completely ignored; in the very least, * this is a formalization for deprecating methods. */ deprecated?: boolean | undefined; idempotencyLevel?: | MethodOptions_IdempotencyLevel | undefined; /** Any features defined in the specific edition. */ features?: | FeatureSet | undefined; /** The parser stores options it doesn't recognize here. See above. */ uninterpretedOption: UninterpretedOption[]; } /** * Is this method side-effect-free (or safe in HTTP parlance), or idempotent, * or neither? HTTP based RPC implementation may choose GET verb for safe * methods, and PUT verb for idempotent methods instead of the default POST. */ export enum MethodOptions_IdempotencyLevel { IDEMPOTENCY_UNKNOWN = 0, /** NO_SIDE_EFFECTS - implies idempotent */ NO_SIDE_EFFECTS = 1, /** IDEMPOTENT - idempotent, but may have side effects */ IDEMPOTENT = 2, } export function methodOptions_IdempotencyLevelFromJSON(object: any): MethodOptions_IdempotencyLevel { switch (object) { case 0: case "IDEMPOTENCY_UNKNOWN": return MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN; case 1: case "NO_SIDE_EFFECTS": return MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS; case 2: case "IDEMPOTENT": return MethodOptions_IdempotencyLevel.IDEMPOTENT; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); } } export function methodOptions_IdempotencyLevelToJSON(object: MethodOptions_IdempotencyLevel): string { switch (object) { case MethodOptions_IdempotencyLevel.IDEMPOTENCY_UNKNOWN: return "IDEMPOTENCY_UNKNOWN"; case MethodOptions_IdempotencyLevel.NO_SIDE_EFFECTS: return "NO_SIDE_EFFECTS"; case MethodOptions_IdempotencyLevel.IDEMPOTENT: return "IDEMPOTENT"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum MethodOptions_IdempotencyLevel"); } } /** * A message representing a option the parser does not recognize. This only * appears in options protos created by the compiler::Parser class. * DescriptorPool resolves these when building Descriptor objects. Therefore, * options protos in descriptor objects (e.g. returned by Descriptor::options(), * or produced by Descriptor::CopyTo()) will never have UninterpretedOptions * in them. */ export interface UninterpretedOption { name: UninterpretedOption_NamePart[]; /** * The value of the uninterpreted option, in whatever type the tokenizer * identified it as during parsing. Exactly one of these should be set. */ identifierValue?: string | undefined; positiveIntValue?: string | undefined; negativeIntValue?: string | undefined; doubleValue?: number | undefined; stringValue?: Buffer | undefined; aggregateValue?: string | undefined; } /** * The name of the uninterpreted option. Each string represents a segment in * a dot-separated name. is_extension is true iff a segment represents an * extension (denoted with parentheses in options specs in .proto files). * E.g.,{ ["foo", false], ["bar.baz", true], ["moo", false] } represents * "foo.(bar.baz).moo". */ export interface UninterpretedOption_NamePart { namePart: string; isExtension: boolean; } /** * TODO Enums in C++ gencode (and potentially other languages) are * not well scoped. This means that each of the feature enums below can clash * with each other. The short names we've chosen maximize call-site * readability, but leave us very open to this scenario. A future feature will * be designed and implemented to handle this, hopefully before we ever hit a * conflict here. */ export interface FeatureSet { fieldPresence?: FeatureSet_FieldPresence | undefined; enumType?: FeatureSet_EnumType | undefined; repeatedFieldEncoding?: FeatureSet_RepeatedFieldEncoding | undefined; utf8Validation?: FeatureSet_Utf8Validation | undefined; messageEncoding?: FeatureSet_MessageEncoding | undefined; jsonFormat?: FeatureSet_JsonFormat | undefined; } export enum FeatureSet_FieldPresence { FIELD_PRESENCE_UNKNOWN = 0, EXPLICIT = 1, IMPLICIT = 2, LEGACY_REQUIRED = 3, } export function featureSet_FieldPresenceFromJSON(object: any): FeatureSet_FieldPresence { switch (object) { case 0: case "FIELD_PRESENCE_UNKNOWN": return FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN; case 1: case "EXPLICIT": return FeatureSet_FieldPresence.EXPLICIT; case 2: case "IMPLICIT": return FeatureSet_FieldPresence.IMPLICIT; case 3: case "LEGACY_REQUIRED": return FeatureSet_FieldPresence.LEGACY_REQUIRED; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence"); } } export function featureSet_FieldPresenceToJSON(object: FeatureSet_FieldPresence): string { switch (object) { case FeatureSet_FieldPresence.FIELD_PRESENCE_UNKNOWN: return "FIELD_PRESENCE_UNKNOWN"; case FeatureSet_FieldPresence.EXPLICIT: return "EXPLICIT"; case FeatureSet_FieldPresence.IMPLICIT: return "IMPLICIT"; case FeatureSet_FieldPresence.LEGACY_REQUIRED: return "LEGACY_REQUIRED"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_FieldPresence"); } } export enum FeatureSet_EnumType { ENUM_TYPE_UNKNOWN = 0, OPEN = 1, CLOSED = 2, } export function featureSet_EnumTypeFromJSON(object: any): FeatureSet_EnumType { switch (object) { case 0: case "ENUM_TYPE_UNKNOWN": return FeatureSet_EnumType.ENUM_TYPE_UNKNOWN; case 1: case "OPEN": return FeatureSet_EnumType.OPEN; case 2: case "CLOSED": return FeatureSet_EnumType.CLOSED; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType"); } } export function featureSet_EnumTypeToJSON(object: FeatureSet_EnumType): string { switch (object) { case FeatureSet_EnumType.ENUM_TYPE_UNKNOWN: return "ENUM_TYPE_UNKNOWN"; case FeatureSet_EnumType.OPEN: return "OPEN"; case FeatureSet_EnumType.CLOSED: return "CLOSED"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_EnumType"); } } export enum FeatureSet_RepeatedFieldEncoding { REPEATED_FIELD_ENCODING_UNKNOWN = 0, PACKED = 1, EXPANDED = 2, } export function featureSet_RepeatedFieldEncodingFromJSON(object: any): FeatureSet_RepeatedFieldEncoding { switch (object) { case 0: case "REPEATED_FIELD_ENCODING_UNKNOWN": return FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN; case 1: case "PACKED": return FeatureSet_RepeatedFieldEncoding.PACKED; case 2: case "EXPANDED": return FeatureSet_RepeatedFieldEncoding.EXPANDED; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding"); } } export function featureSet_RepeatedFieldEncodingToJSON(object: FeatureSet_RepeatedFieldEncoding): string { switch (object) { case FeatureSet_RepeatedFieldEncoding.REPEATED_FIELD_ENCODING_UNKNOWN: return "REPEATED_FIELD_ENCODING_UNKNOWN"; case FeatureSet_RepeatedFieldEncoding.PACKED: return "PACKED"; case FeatureSet_RepeatedFieldEncoding.EXPANDED: return "EXPANDED"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_RepeatedFieldEncoding"); } } export enum FeatureSet_Utf8Validation { UTF8_VALIDATION_UNKNOWN = 0, VERIFY = 2, NONE = 3, } export function featureSet_Utf8ValidationFromJSON(object: any): FeatureSet_Utf8Validation { switch (object) { case 0: case "UTF8_VALIDATION_UNKNOWN": return FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN; case 2: case "VERIFY": return FeatureSet_Utf8Validation.VERIFY; case 3: case "NONE": return FeatureSet_Utf8Validation.NONE; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation"); } } export function featureSet_Utf8ValidationToJSON(object: FeatureSet_Utf8Validation): string { switch (object) { case FeatureSet_Utf8Validation.UTF8_VALIDATION_UNKNOWN: return "UTF8_VALIDATION_UNKNOWN"; case FeatureSet_Utf8Validation.VERIFY: return "VERIFY"; case FeatureSet_Utf8Validation.NONE: return "NONE"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_Utf8Validation"); } } export enum FeatureSet_MessageEncoding { MESSAGE_ENCODING_UNKNOWN = 0, LENGTH_PREFIXED = 1, DELIMITED = 2, } export function featureSet_MessageEncodingFromJSON(object: any): FeatureSet_MessageEncoding { switch (object) { case 0: case "MESSAGE_ENCODING_UNKNOWN": return FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN; case 1: case "LENGTH_PREFIXED": return FeatureSet_MessageEncoding.LENGTH_PREFIXED; case 2: case "DELIMITED": return FeatureSet_MessageEncoding.DELIMITED; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding"); } } export function featureSet_MessageEncodingToJSON(object: FeatureSet_MessageEncoding): string { switch (object) { case FeatureSet_MessageEncoding.MESSAGE_ENCODING_UNKNOWN: return "MESSAGE_ENCODING_UNKNOWN"; case FeatureSet_MessageEncoding.LENGTH_PREFIXED: return "LENGTH_PREFIXED"; case FeatureSet_MessageEncoding.DELIMITED: return "DELIMITED"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_MessageEncoding"); } } export enum FeatureSet_JsonFormat { JSON_FORMAT_UNKNOWN = 0, ALLOW = 1, LEGACY_BEST_EFFORT = 2, } export function featureSet_JsonFormatFromJSON(object: any): FeatureSet_JsonFormat { switch (object) { case 0: case "JSON_FORMAT_UNKNOWN": return FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN; case 1: case "ALLOW": return FeatureSet_JsonFormat.ALLOW; case 2: case "LEGACY_BEST_EFFORT": return FeatureSet_JsonFormat.LEGACY_BEST_EFFORT; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat"); } } export function featureSet_JsonFormatToJSON(object: FeatureSet_JsonFormat): string { switch (object) { case FeatureSet_JsonFormat.JSON_FORMAT_UNKNOWN: return "JSON_FORMAT_UNKNOWN"; case FeatureSet_JsonFormat.ALLOW: return "ALLOW"; case FeatureSet_JsonFormat.LEGACY_BEST_EFFORT: return "LEGACY_BEST_EFFORT"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum FeatureSet_JsonFormat"); } } /** * A compiled specification for the defaults of a set of features. These * messages are generated from FeatureSet extensions and can be used to seed * feature resolution. The resolution with this object becomes a simple search * for the closest matching edition, followed by proto merges. */ export interface FeatureSetDefaults { defaults: FeatureSetDefaults_FeatureSetEditionDefault[]; /** * The minimum supported edition (inclusive) when this was constructed. * Editions before this will not have defaults. */ minimumEdition?: | Edition | undefined; /** * The maximum known edition (inclusive) when this was constructed. Editions * after this will not have reliable defaults. */ maximumEdition?: Edition | undefined; } /** * A map from every known edition with a unique set of defaults to its * defaults. Not all editions may be contained here. For a given edition, * the defaults at the closest matching edition ordered at or before it should * be used. This field must be in strict ascending order by edition. */ export interface FeatureSetDefaults_FeatureSetEditionDefault { edition?: | Edition | undefined; /** Defaults of features that can be overridden in this edition. */ overridableFeatures?: | FeatureSet | undefined; /** Defaults of features that can't be overridden in this edition. */ fixedFeatures?: FeatureSet | undefined; } /** * Encapsulates information about the original source file from which a * FileDescriptorProto was generated. */ export interface SourceCodeInfo { /** * A Location identifies a piece of source code in a .proto file which * corresponds to a particular definition. This information is intended * to be useful to IDEs, code indexers, documentation generators, and similar * tools. * * For example, say we have a file like: * message Foo { * optional string foo = 1; * } * Let's look at just the field definition: * optional string foo = 1; * ^ ^^ ^^ ^ ^^^ * a bc de f ghi * We have the following locations: * span path represents * [a,i) [ 4, 0, 2, 0 ] The whole field definition. * [a,b) [ 4, 0, 2, 0, 4 ] The label (optional). * [c,d) [ 4, 0, 2, 0, 5 ] The type (string). * [e,f) [ 4, 0, 2, 0, 1 ] The name (foo). * [g,h) [ 4, 0, 2, 0, 3 ] The number (1). * * Notes: * - A location may refer to a repeated field itself (i.e. not to any * particular index within it). This is used whenever a set of elements are * logically enclosed in a single code segment. For example, an entire * extend block (possibly containing multiple extension definitions) will * have an outer location whose path refers to the "extensions" repeated * field without an index. * - Multiple locations may have the same path. This happens when a single * logical declaration is spread out across multiple places. The most * obvious example is the "extend" block again -- there may be multiple * extend blocks in the same scope, each of which will have the same path. * - A location's span is not always a subset of its parent's span. For * example, the "extendee" of an extension declaration appears at the * beginning of the "extend" block and is shared by all extensions within * the block. * - Just because a location's span is a subset of some other location's span * does not mean that it is a descendant. For example, a "group" defines * both a type and a field in a single declaration. Thus, the locations * corresponding to the type and field and their components will overlap. * - Code which tries to interpret locations should probably be designed to * ignore those that it doesn't understand, as more types of locations could * be recorded in the future. */ location: SourceCodeInfo_Location[]; } export interface SourceCodeInfo_Location { /** * Identifies which part of the FileDescriptorProto was defined at this * location. * * Each element is a field number or an index. They form a path from * the root FileDescriptorProto to the place where the definition appears. * For example, this path: * [ 4, 3, 2, 7, 1 ] * refers to: * file.message_type(3) // 4, 3 * .field(7) // 2, 7 * .name() // 1 * This is because FileDescriptorProto.message_type has field number 4: * repeated DescriptorProto message_type = 4; * and DescriptorProto.field has field number 2: * repeated FieldDescriptorProto field = 2; * and FieldDescriptorProto.name has field number 1: * optional string name = 1; * * Thus, the above path gives the location of a field name. If we removed * the last element: * [ 4, 3, 2, 7 ] * this path refers to the whole field declaration (from the beginning * of the label to the terminating semicolon). */ path: number[]; /** * Always has exactly three or four elements: start line, start column, * end line (optional, otherwise assumed same as start line), end column. * These are packed into a single field for efficiency. Note that line * and column numbers are zero-based -- typically you will want to add * 1 to each before displaying to a user. */ span: number[]; /** * If this SourceCodeInfo represents a complete declaration, these are any * comments appearing before and after the declaration which appear to be * attached to the declaration. * * A series of line comments appearing on consecutive lines, with no other * tokens appearing on those lines, will be treated as a single comment. * * leading_detached_comments will keep paragraphs of comments that appear * before (but not connected to) the current element. Each paragraph, * separated by empty lines, will be one comment element in the repeated * field. * * Only the comment content is provided; comment markers (e.g. //) are * stripped out. For block comments, leading whitespace and an asterisk * will be stripped from the beginning of each line other than the first. * Newlines are included in the output. * * Examples: * * optional int32 foo = 1; // Comment attached to foo. * // Comment attached to bar. * optional int32 bar = 2; * * optional string baz = 3; * // Comment attached to baz. * // Another line attached to baz. * * // Comment attached to moo. * // * // Another line attached to moo. * optional double moo = 4; * * // Detached comment for corge. This is not leading or trailing comments * // to moo or corge because there are blank lines separating it from * // both. * * // Detached comment for corge paragraph 2. * * optional string corge = 5; * /* Block comment attached * * to corge. Leading asterisks * * will be removed. * / * /* Block comment attached to * * grault. * / * optional int32 grault = 6; * * // ignored detached comments. */ leadingComments?: string | undefined; trailingComments?: string | undefined; leadingDetachedComments: string[]; } /** * Describes the relationship between generated code and its original source * file. A GeneratedCodeInfo message is associated with only one generated * source file, but may contain references to different source .proto files. */ export interface GeneratedCodeInfo { /** * An Annotation connects some span of text in generated code to an element * of its generating .proto file. */ annotation: GeneratedCodeInfo_Annotation[]; } export interface GeneratedCodeInfo_Annotation { /** * Identifies the element in the original source .proto file. This field * is formatted the same as SourceCodeInfo.Location.path. */ path: number[]; /** Identifies the filesystem path to the original source .proto. */ sourceFile?: | string | undefined; /** * Identifies the starting offset in bytes in the generated code * that relates to the identified object. */ begin?: | number | undefined; /** * Identifies the ending offset in bytes in the generated code that * relates to the identified object. The end offset should be one past * the last relevant byte (so the length of the text = end - begin). */ end?: number | undefined; semantic?: GeneratedCodeInfo_Annotation_Semantic | undefined; } /** * Represents the identified object's effect on the element in the original * .proto file. */ export enum GeneratedCodeInfo_Annotation_Semantic { /** NONE - There is no effect or the effect is indescribable. */ NONE = 0, /** SET - The element is set or otherwise mutated. */ SET = 1, /** ALIAS - An alias to the element is returned. */ ALIAS = 2, } export function generatedCodeInfo_Annotation_SemanticFromJSON(object: any): GeneratedCodeInfo_Annotation_Semantic { switch (object) { case 0: case "NONE": return GeneratedCodeInfo_Annotation_Semantic.NONE; case 1: case "SET": return GeneratedCodeInfo_Annotation_Semantic.SET; case 2: case "ALIAS": return GeneratedCodeInfo_Annotation_Semantic.ALIAS; default: throw new globalThis.Error( "Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic", ); } } export function generatedCodeInfo_Annotation_SemanticToJSON(object: GeneratedCodeInfo_Annotation_Semantic): string { switch (object) { case GeneratedCodeInfo_Annotation_Semantic.NONE: return "NONE"; case GeneratedCodeInfo_Annotation_Semantic.SET: return "SET"; case GeneratedCodeInfo_Annotation_Semantic.ALIAS: return "ALIAS"; default: throw new globalThis.Error( "Unrecognized enum value " + object + " for enum GeneratedCodeInfo_Annotation_Semantic", ); } } export const FileDescriptorSet: MessageFns = { fromJSON(object: any): FileDescriptorSet { return { file: globalThis.Array.isArray(object?.file) ? object.file.map((e: any) => FileDescriptorProto.fromJSON(e)) : [], }; }, toJSON(message: FileDescriptorSet): unknown { const obj: any = {}; if (message.file?.length) { obj.file = message.file.map((e) => FileDescriptorProto.toJSON(e)); } return obj; }, }; export const FileDescriptorProto: MessageFns = { fromJSON(object: any): FileDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", package: isSet(object.package) ? globalThis.String(object.package) : "", dependency: globalThis.Array.isArray(object?.dependency) ? object.dependency.map((e: any) => globalThis.String(e)) : [], publicDependency: globalThis.Array.isArray(object?.publicDependency) ? object.publicDependency.map((e: any) => globalThis.Number(e)) : [], weakDependency: globalThis.Array.isArray(object?.weakDependency) ? object.weakDependency.map((e: any) => globalThis.Number(e)) : [], messageType: globalThis.Array.isArray(object?.messageType) ? object.messageType.map((e: any) => DescriptorProto.fromJSON(e)) : [], enumType: globalThis.Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], service: globalThis.Array.isArray(object?.service) ? object.service.map((e: any) => ServiceDescriptorProto.fromJSON(e)) : [], extension: globalThis.Array.isArray(object?.extension) ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], options: isSet(object.options) ? FileOptions.fromJSON(object.options) : undefined, sourceCodeInfo: isSet(object.sourceCodeInfo) ? SourceCodeInfo.fromJSON(object.sourceCodeInfo) : undefined, syntax: isSet(object.syntax) ? globalThis.String(object.syntax) : "", edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, }; }, toJSON(message: FileDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.package !== undefined && message.package !== "") { obj.package = message.package; } if (message.dependency?.length) { obj.dependency = message.dependency; } if (message.publicDependency?.length) { obj.publicDependency = message.publicDependency.map((e) => Math.round(e)); } if (message.weakDependency?.length) { obj.weakDependency = message.weakDependency.map((e) => Math.round(e)); } if (message.messageType?.length) { obj.messageType = message.messageType.map((e) => DescriptorProto.toJSON(e)); } if (message.enumType?.length) { obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); } if (message.service?.length) { obj.service = message.service.map((e) => ServiceDescriptorProto.toJSON(e)); } if (message.extension?.length) { obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); } if (message.options !== undefined) { obj.options = FileOptions.toJSON(message.options); } if (message.sourceCodeInfo !== undefined) { obj.sourceCodeInfo = SourceCodeInfo.toJSON(message.sourceCodeInfo); } if (message.syntax !== undefined && message.syntax !== "") { obj.syntax = message.syntax; } if (message.edition !== undefined && message.edition !== 0) { obj.edition = editionToJSON(message.edition); } return obj; }, }; export const DescriptorProto: MessageFns = { fromJSON(object: any): DescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", field: globalThis.Array.isArray(object?.field) ? object.field.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], extension: globalThis.Array.isArray(object?.extension) ? object.extension.map((e: any) => FieldDescriptorProto.fromJSON(e)) : [], nestedType: globalThis.Array.isArray(object?.nestedType) ? object.nestedType.map((e: any) => DescriptorProto.fromJSON(e)) : [], enumType: globalThis.Array.isArray(object?.enumType) ? object.enumType.map((e: any) => EnumDescriptorProto.fromJSON(e)) : [], extensionRange: globalThis.Array.isArray(object?.extensionRange) ? object.extensionRange.map((e: any) => DescriptorProto_ExtensionRange.fromJSON(e)) : [], oneofDecl: globalThis.Array.isArray(object?.oneofDecl) ? object.oneofDecl.map((e: any) => OneofDescriptorProto.fromJSON(e)) : [], options: isSet(object.options) ? MessageOptions.fromJSON(object.options) : undefined, reservedRange: globalThis.Array.isArray(object?.reservedRange) ? object.reservedRange.map((e: any) => DescriptorProto_ReservedRange.fromJSON(e)) : [], reservedName: globalThis.Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => globalThis.String(e)) : [], }; }, toJSON(message: DescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.field?.length) { obj.field = message.field.map((e) => FieldDescriptorProto.toJSON(e)); } if (message.extension?.length) { obj.extension = message.extension.map((e) => FieldDescriptorProto.toJSON(e)); } if (message.nestedType?.length) { obj.nestedType = message.nestedType.map((e) => DescriptorProto.toJSON(e)); } if (message.enumType?.length) { obj.enumType = message.enumType.map((e) => EnumDescriptorProto.toJSON(e)); } if (message.extensionRange?.length) { obj.extensionRange = message.extensionRange.map((e) => DescriptorProto_ExtensionRange.toJSON(e)); } if (message.oneofDecl?.length) { obj.oneofDecl = message.oneofDecl.map((e) => OneofDescriptorProto.toJSON(e)); } if (message.options !== undefined) { obj.options = MessageOptions.toJSON(message.options); } if (message.reservedRange?.length) { obj.reservedRange = message.reservedRange.map((e) => DescriptorProto_ReservedRange.toJSON(e)); } if (message.reservedName?.length) { obj.reservedName = message.reservedName; } return obj; }, }; export const DescriptorProto_ExtensionRange: MessageFns = { fromJSON(object: any): DescriptorProto_ExtensionRange { return { start: isSet(object.start) ? globalThis.Number(object.start) : 0, end: isSet(object.end) ? globalThis.Number(object.end) : 0, options: isSet(object.options) ? ExtensionRangeOptions.fromJSON(object.options) : undefined, }; }, toJSON(message: DescriptorProto_ExtensionRange): unknown { const obj: any = {}; if (message.start !== undefined && message.start !== 0) { obj.start = Math.round(message.start); } if (message.end !== undefined && message.end !== 0) { obj.end = Math.round(message.end); } if (message.options !== undefined) { obj.options = ExtensionRangeOptions.toJSON(message.options); } return obj; }, }; export const DescriptorProto_ReservedRange: MessageFns = { fromJSON(object: any): DescriptorProto_ReservedRange { return { start: isSet(object.start) ? globalThis.Number(object.start) : 0, end: isSet(object.end) ? globalThis.Number(object.end) : 0, }; }, toJSON(message: DescriptorProto_ReservedRange): unknown { const obj: any = {}; if (message.start !== undefined && message.start !== 0) { obj.start = Math.round(message.start); } if (message.end !== undefined && message.end !== 0) { obj.end = Math.round(message.end); } return obj; }, }; export const ExtensionRangeOptions: MessageFns = { fromJSON(object: any): ExtensionRangeOptions { return { uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], declaration: globalThis.Array.isArray(object?.declaration) ? object.declaration.map((e: any) => ExtensionRangeOptions_Declaration.fromJSON(e)) : [], features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, verification: isSet(object.verification) ? extensionRangeOptions_VerificationStateFromJSON(object.verification) : 1, }; }, toJSON(message: ExtensionRangeOptions): unknown { const obj: any = {}; if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } if (message.declaration?.length) { obj.declaration = message.declaration.map((e) => ExtensionRangeOptions_Declaration.toJSON(e)); } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.verification !== undefined && message.verification !== 1) { obj.verification = extensionRangeOptions_VerificationStateToJSON(message.verification); } return obj; }, }; export const ExtensionRangeOptions_Declaration: MessageFns = { fromJSON(object: any): ExtensionRangeOptions_Declaration { return { number: isSet(object.number) ? globalThis.Number(object.number) : 0, fullName: isSet(object.fullName) ? globalThis.String(object.fullName) : "", type: isSet(object.type) ? globalThis.String(object.type) : "", reserved: isSet(object.reserved) ? globalThis.Boolean(object.reserved) : false, repeated: isSet(object.repeated) ? globalThis.Boolean(object.repeated) : false, }; }, toJSON(message: ExtensionRangeOptions_Declaration): unknown { const obj: any = {}; if (message.number !== undefined && message.number !== 0) { obj.number = Math.round(message.number); } if (message.fullName !== undefined && message.fullName !== "") { obj.fullName = message.fullName; } if (message.type !== undefined && message.type !== "") { obj.type = message.type; } if (message.reserved !== undefined && message.reserved !== false) { obj.reserved = message.reserved; } if (message.repeated !== undefined && message.repeated !== false) { obj.repeated = message.repeated; } return obj; }, }; export const FieldDescriptorProto: MessageFns = { fromJSON(object: any): FieldDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", number: isSet(object.number) ? globalThis.Number(object.number) : 0, label: isSet(object.label) ? fieldDescriptorProto_LabelFromJSON(object.label) : 1, type: isSet(object.type) ? fieldDescriptorProto_TypeFromJSON(object.type) : 1, typeName: isSet(object.typeName) ? globalThis.String(object.typeName) : "", extendee: isSet(object.extendee) ? globalThis.String(object.extendee) : "", defaultValue: isSet(object.defaultValue) ? globalThis.String(object.defaultValue) : "", oneofIndex: isSet(object.oneofIndex) ? globalThis.Number(object.oneofIndex) : 0, jsonName: isSet(object.jsonName) ? globalThis.String(object.jsonName) : "", options: isSet(object.options) ? FieldOptions.fromJSON(object.options) : undefined, proto3Optional: isSet(object.proto3Optional) ? globalThis.Boolean(object.proto3Optional) : false, }; }, toJSON(message: FieldDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.number !== undefined && message.number !== 0) { obj.number = Math.round(message.number); } if (message.label !== undefined && message.label !== 1) { obj.label = fieldDescriptorProto_LabelToJSON(message.label); } if (message.type !== undefined && message.type !== 1) { obj.type = fieldDescriptorProto_TypeToJSON(message.type); } if (message.typeName !== undefined && message.typeName !== "") { obj.typeName = message.typeName; } if (message.extendee !== undefined && message.extendee !== "") { obj.extendee = message.extendee; } if (message.defaultValue !== undefined && message.defaultValue !== "") { obj.defaultValue = message.defaultValue; } if (message.oneofIndex !== undefined && message.oneofIndex !== 0) { obj.oneofIndex = Math.round(message.oneofIndex); } if (message.jsonName !== undefined && message.jsonName !== "") { obj.jsonName = message.jsonName; } if (message.options !== undefined) { obj.options = FieldOptions.toJSON(message.options); } if (message.proto3Optional !== undefined && message.proto3Optional !== false) { obj.proto3Optional = message.proto3Optional; } return obj; }, }; export const OneofDescriptorProto: MessageFns = { fromJSON(object: any): OneofDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", options: isSet(object.options) ? OneofOptions.fromJSON(object.options) : undefined, }; }, toJSON(message: OneofDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.options !== undefined) { obj.options = OneofOptions.toJSON(message.options); } return obj; }, }; export const EnumDescriptorProto: MessageFns = { fromJSON(object: any): EnumDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", value: globalThis.Array.isArray(object?.value) ? object.value.map((e: any) => EnumValueDescriptorProto.fromJSON(e)) : [], options: isSet(object.options) ? EnumOptions.fromJSON(object.options) : undefined, reservedRange: globalThis.Array.isArray(object?.reservedRange) ? object.reservedRange.map((e: any) => EnumDescriptorProto_EnumReservedRange.fromJSON(e)) : [], reservedName: globalThis.Array.isArray(object?.reservedName) ? object.reservedName.map((e: any) => globalThis.String(e)) : [], }; }, toJSON(message: EnumDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.value?.length) { obj.value = message.value.map((e) => EnumValueDescriptorProto.toJSON(e)); } if (message.options !== undefined) { obj.options = EnumOptions.toJSON(message.options); } if (message.reservedRange?.length) { obj.reservedRange = message.reservedRange.map((e) => EnumDescriptorProto_EnumReservedRange.toJSON(e)); } if (message.reservedName?.length) { obj.reservedName = message.reservedName; } return obj; }, }; export const EnumDescriptorProto_EnumReservedRange: MessageFns = { fromJSON(object: any): EnumDescriptorProto_EnumReservedRange { return { start: isSet(object.start) ? globalThis.Number(object.start) : 0, end: isSet(object.end) ? globalThis.Number(object.end) : 0, }; }, toJSON(message: EnumDescriptorProto_EnumReservedRange): unknown { const obj: any = {}; if (message.start !== undefined && message.start !== 0) { obj.start = Math.round(message.start); } if (message.end !== undefined && message.end !== 0) { obj.end = Math.round(message.end); } return obj; }, }; export const EnumValueDescriptorProto: MessageFns = { fromJSON(object: any): EnumValueDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", number: isSet(object.number) ? globalThis.Number(object.number) : 0, options: isSet(object.options) ? EnumValueOptions.fromJSON(object.options) : undefined, }; }, toJSON(message: EnumValueDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.number !== undefined && message.number !== 0) { obj.number = Math.round(message.number); } if (message.options !== undefined) { obj.options = EnumValueOptions.toJSON(message.options); } return obj; }, }; export const ServiceDescriptorProto: MessageFns = { fromJSON(object: any): ServiceDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", method: globalThis.Array.isArray(object?.method) ? object.method.map((e: any) => MethodDescriptorProto.fromJSON(e)) : [], options: isSet(object.options) ? ServiceOptions.fromJSON(object.options) : undefined, }; }, toJSON(message: ServiceDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.method?.length) { obj.method = message.method.map((e) => MethodDescriptorProto.toJSON(e)); } if (message.options !== undefined) { obj.options = ServiceOptions.toJSON(message.options); } return obj; }, }; export const MethodDescriptorProto: MessageFns = { fromJSON(object: any): MethodDescriptorProto { return { name: isSet(object.name) ? globalThis.String(object.name) : "", inputType: isSet(object.inputType) ? globalThis.String(object.inputType) : "", outputType: isSet(object.outputType) ? globalThis.String(object.outputType) : "", options: isSet(object.options) ? MethodOptions.fromJSON(object.options) : undefined, clientStreaming: isSet(object.clientStreaming) ? globalThis.Boolean(object.clientStreaming) : false, serverStreaming: isSet(object.serverStreaming) ? globalThis.Boolean(object.serverStreaming) : false, }; }, toJSON(message: MethodDescriptorProto): unknown { const obj: any = {}; if (message.name !== undefined && message.name !== "") { obj.name = message.name; } if (message.inputType !== undefined && message.inputType !== "") { obj.inputType = message.inputType; } if (message.outputType !== undefined && message.outputType !== "") { obj.outputType = message.outputType; } if (message.options !== undefined) { obj.options = MethodOptions.toJSON(message.options); } if (message.clientStreaming !== undefined && message.clientStreaming !== false) { obj.clientStreaming = message.clientStreaming; } if (message.serverStreaming !== undefined && message.serverStreaming !== false) { obj.serverStreaming = message.serverStreaming; } return obj; }, }; export const FileOptions: MessageFns = { fromJSON(object: any): FileOptions { return { javaPackage: isSet(object.javaPackage) ? globalThis.String(object.javaPackage) : "", javaOuterClassname: isSet(object.javaOuterClassname) ? globalThis.String(object.javaOuterClassname) : "", javaMultipleFiles: isSet(object.javaMultipleFiles) ? globalThis.Boolean(object.javaMultipleFiles) : false, javaGenerateEqualsAndHash: isSet(object.javaGenerateEqualsAndHash) ? globalThis.Boolean(object.javaGenerateEqualsAndHash) : false, javaStringCheckUtf8: isSet(object.javaStringCheckUtf8) ? globalThis.Boolean(object.javaStringCheckUtf8) : false, optimizeFor: isSet(object.optimizeFor) ? fileOptions_OptimizeModeFromJSON(object.optimizeFor) : 1, goPackage: isSet(object.goPackage) ? globalThis.String(object.goPackage) : "", ccGenericServices: isSet(object.ccGenericServices) ? globalThis.Boolean(object.ccGenericServices) : false, javaGenericServices: isSet(object.javaGenericServices) ? globalThis.Boolean(object.javaGenericServices) : false, pyGenericServices: isSet(object.pyGenericServices) ? globalThis.Boolean(object.pyGenericServices) : false, deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, ccEnableArenas: isSet(object.ccEnableArenas) ? globalThis.Boolean(object.ccEnableArenas) : true, objcClassPrefix: isSet(object.objcClassPrefix) ? globalThis.String(object.objcClassPrefix) : "", csharpNamespace: isSet(object.csharpNamespace) ? globalThis.String(object.csharpNamespace) : "", swiftPrefix: isSet(object.swiftPrefix) ? globalThis.String(object.swiftPrefix) : "", phpClassPrefix: isSet(object.phpClassPrefix) ? globalThis.String(object.phpClassPrefix) : "", phpNamespace: isSet(object.phpNamespace) ? globalThis.String(object.phpNamespace) : "", phpMetadataNamespace: isSet(object.phpMetadataNamespace) ? globalThis.String(object.phpMetadataNamespace) : "", rubyPackage: isSet(object.rubyPackage) ? globalThis.String(object.rubyPackage) : "", features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: FileOptions): unknown { const obj: any = {}; if (message.javaPackage !== undefined && message.javaPackage !== "") { obj.javaPackage = message.javaPackage; } if (message.javaOuterClassname !== undefined && message.javaOuterClassname !== "") { obj.javaOuterClassname = message.javaOuterClassname; } if (message.javaMultipleFiles !== undefined && message.javaMultipleFiles !== false) { obj.javaMultipleFiles = message.javaMultipleFiles; } if (message.javaGenerateEqualsAndHash !== undefined && message.javaGenerateEqualsAndHash !== false) { obj.javaGenerateEqualsAndHash = message.javaGenerateEqualsAndHash; } if (message.javaStringCheckUtf8 !== undefined && message.javaStringCheckUtf8 !== false) { obj.javaStringCheckUtf8 = message.javaStringCheckUtf8; } if (message.optimizeFor !== undefined && message.optimizeFor !== 1) { obj.optimizeFor = fileOptions_OptimizeModeToJSON(message.optimizeFor); } if (message.goPackage !== undefined && message.goPackage !== "") { obj.goPackage = message.goPackage; } if (message.ccGenericServices !== undefined && message.ccGenericServices !== false) { obj.ccGenericServices = message.ccGenericServices; } if (message.javaGenericServices !== undefined && message.javaGenericServices !== false) { obj.javaGenericServices = message.javaGenericServices; } if (message.pyGenericServices !== undefined && message.pyGenericServices !== false) { obj.pyGenericServices = message.pyGenericServices; } if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if (message.ccEnableArenas !== undefined && message.ccEnableArenas !== true) { obj.ccEnableArenas = message.ccEnableArenas; } if (message.objcClassPrefix !== undefined && message.objcClassPrefix !== "") { obj.objcClassPrefix = message.objcClassPrefix; } if (message.csharpNamespace !== undefined && message.csharpNamespace !== "") { obj.csharpNamespace = message.csharpNamespace; } if (message.swiftPrefix !== undefined && message.swiftPrefix !== "") { obj.swiftPrefix = message.swiftPrefix; } if (message.phpClassPrefix !== undefined && message.phpClassPrefix !== "") { obj.phpClassPrefix = message.phpClassPrefix; } if (message.phpNamespace !== undefined && message.phpNamespace !== "") { obj.phpNamespace = message.phpNamespace; } if (message.phpMetadataNamespace !== undefined && message.phpMetadataNamespace !== "") { obj.phpMetadataNamespace = message.phpMetadataNamespace; } if (message.rubyPackage !== undefined && message.rubyPackage !== "") { obj.rubyPackage = message.rubyPackage; } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const MessageOptions: MessageFns = { fromJSON(object: any): MessageOptions { return { messageSetWireFormat: isSet(object.messageSetWireFormat) ? globalThis.Boolean(object.messageSetWireFormat) : false, noStandardDescriptorAccessor: isSet(object.noStandardDescriptorAccessor) ? globalThis.Boolean(object.noStandardDescriptorAccessor) : false, deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, mapEntry: isSet(object.mapEntry) ? globalThis.Boolean(object.mapEntry) : false, deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) : false, features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: MessageOptions): unknown { const obj: any = {}; if (message.messageSetWireFormat !== undefined && message.messageSetWireFormat !== false) { obj.messageSetWireFormat = message.messageSetWireFormat; } if (message.noStandardDescriptorAccessor !== undefined && message.noStandardDescriptorAccessor !== false) { obj.noStandardDescriptorAccessor = message.noStandardDescriptorAccessor; } if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if (message.mapEntry !== undefined && message.mapEntry !== false) { obj.mapEntry = message.mapEntry; } if ( message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false ) { obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const FieldOptions: MessageFns = { fromJSON(object: any): FieldOptions { return { ctype: isSet(object.ctype) ? fieldOptions_CTypeFromJSON(object.ctype) : 0, packed: isSet(object.packed) ? globalThis.Boolean(object.packed) : false, jstype: isSet(object.jstype) ? fieldOptions_JSTypeFromJSON(object.jstype) : 0, lazy: isSet(object.lazy) ? globalThis.Boolean(object.lazy) : false, unverifiedLazy: isSet(object.unverifiedLazy) ? globalThis.Boolean(object.unverifiedLazy) : false, deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, weak: isSet(object.weak) ? globalThis.Boolean(object.weak) : false, debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false, retention: isSet(object.retention) ? fieldOptions_OptionRetentionFromJSON(object.retention) : 0, targets: globalThis.Array.isArray(object?.targets) ? object.targets.map((e: any) => fieldOptions_OptionTargetTypeFromJSON(e)) : [], editionDefaults: globalThis.Array.isArray(object?.editionDefaults) ? object.editionDefaults.map((e: any) => FieldOptions_EditionDefault.fromJSON(e)) : [], features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, featureSupport: isSet(object.featureSupport) ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: FieldOptions): unknown { const obj: any = {}; if (message.ctype !== undefined && message.ctype !== 0) { obj.ctype = fieldOptions_CTypeToJSON(message.ctype); } if (message.packed !== undefined && message.packed !== false) { obj.packed = message.packed; } if (message.jstype !== undefined && message.jstype !== 0) { obj.jstype = fieldOptions_JSTypeToJSON(message.jstype); } if (message.lazy !== undefined && message.lazy !== false) { obj.lazy = message.lazy; } if (message.unverifiedLazy !== undefined && message.unverifiedLazy !== false) { obj.unverifiedLazy = message.unverifiedLazy; } if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if (message.weak !== undefined && message.weak !== false) { obj.weak = message.weak; } if (message.debugRedact !== undefined && message.debugRedact !== false) { obj.debugRedact = message.debugRedact; } if (message.retention !== undefined && message.retention !== 0) { obj.retention = fieldOptions_OptionRetentionToJSON(message.retention); } if (message.targets?.length) { obj.targets = message.targets.map((e) => fieldOptions_OptionTargetTypeToJSON(e)); } if (message.editionDefaults?.length) { obj.editionDefaults = message.editionDefaults.map((e) => FieldOptions_EditionDefault.toJSON(e)); } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.featureSupport !== undefined) { obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const FieldOptions_EditionDefault: MessageFns = { fromJSON(object: any): FieldOptions_EditionDefault { return { edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, value: isSet(object.value) ? globalThis.String(object.value) : "", }; }, toJSON(message: FieldOptions_EditionDefault): unknown { const obj: any = {}; if (message.edition !== undefined && message.edition !== 0) { obj.edition = editionToJSON(message.edition); } if (message.value !== undefined && message.value !== "") { obj.value = message.value; } return obj; }, }; export const FieldOptions_FeatureSupport: MessageFns = { fromJSON(object: any): FieldOptions_FeatureSupport { return { editionIntroduced: isSet(object.editionIntroduced) ? editionFromJSON(object.editionIntroduced) : 0, editionDeprecated: isSet(object.editionDeprecated) ? editionFromJSON(object.editionDeprecated) : 0, deprecationWarning: isSet(object.deprecationWarning) ? globalThis.String(object.deprecationWarning) : "", editionRemoved: isSet(object.editionRemoved) ? editionFromJSON(object.editionRemoved) : 0, }; }, toJSON(message: FieldOptions_FeatureSupport): unknown { const obj: any = {}; if (message.editionIntroduced !== undefined && message.editionIntroduced !== 0) { obj.editionIntroduced = editionToJSON(message.editionIntroduced); } if (message.editionDeprecated !== undefined && message.editionDeprecated !== 0) { obj.editionDeprecated = editionToJSON(message.editionDeprecated); } if (message.deprecationWarning !== undefined && message.deprecationWarning !== "") { obj.deprecationWarning = message.deprecationWarning; } if (message.editionRemoved !== undefined && message.editionRemoved !== 0) { obj.editionRemoved = editionToJSON(message.editionRemoved); } return obj; }, }; export const OneofOptions: MessageFns = { fromJSON(object: any): OneofOptions { return { features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: OneofOptions): unknown { const obj: any = {}; if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const EnumOptions: MessageFns = { fromJSON(object: any): EnumOptions { return { allowAlias: isSet(object.allowAlias) ? globalThis.Boolean(object.allowAlias) : false, deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, deprecatedLegacyJsonFieldConflicts: isSet(object.deprecatedLegacyJsonFieldConflicts) ? globalThis.Boolean(object.deprecatedLegacyJsonFieldConflicts) : false, features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: EnumOptions): unknown { const obj: any = {}; if (message.allowAlias !== undefined && message.allowAlias !== false) { obj.allowAlias = message.allowAlias; } if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if ( message.deprecatedLegacyJsonFieldConflicts !== undefined && message.deprecatedLegacyJsonFieldConflicts !== false ) { obj.deprecatedLegacyJsonFieldConflicts = message.deprecatedLegacyJsonFieldConflicts; } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const EnumValueOptions: MessageFns = { fromJSON(object: any): EnumValueOptions { return { deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, debugRedact: isSet(object.debugRedact) ? globalThis.Boolean(object.debugRedact) : false, featureSupport: isSet(object.featureSupport) ? FieldOptions_FeatureSupport.fromJSON(object.featureSupport) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: EnumValueOptions): unknown { const obj: any = {}; if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.debugRedact !== undefined && message.debugRedact !== false) { obj.debugRedact = message.debugRedact; } if (message.featureSupport !== undefined) { obj.featureSupport = FieldOptions_FeatureSupport.toJSON(message.featureSupport); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const ServiceOptions: MessageFns = { fromJSON(object: any): ServiceOptions { return { features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: ServiceOptions): unknown { const obj: any = {}; if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const MethodOptions: MessageFns = { fromJSON(object: any): MethodOptions { return { deprecated: isSet(object.deprecated) ? globalThis.Boolean(object.deprecated) : false, idempotencyLevel: isSet(object.idempotencyLevel) ? methodOptions_IdempotencyLevelFromJSON(object.idempotencyLevel) : 0, features: isSet(object.features) ? FeatureSet.fromJSON(object.features) : undefined, uninterpretedOption: globalThis.Array.isArray(object?.uninterpretedOption) ? object.uninterpretedOption.map((e: any) => UninterpretedOption.fromJSON(e)) : [], }; }, toJSON(message: MethodOptions): unknown { const obj: any = {}; if (message.deprecated !== undefined && message.deprecated !== false) { obj.deprecated = message.deprecated; } if (message.idempotencyLevel !== undefined && message.idempotencyLevel !== 0) { obj.idempotencyLevel = methodOptions_IdempotencyLevelToJSON(message.idempotencyLevel); } if (message.features !== undefined) { obj.features = FeatureSet.toJSON(message.features); } if (message.uninterpretedOption?.length) { obj.uninterpretedOption = message.uninterpretedOption.map((e) => UninterpretedOption.toJSON(e)); } return obj; }, }; export const UninterpretedOption: MessageFns = { fromJSON(object: any): UninterpretedOption { return { name: globalThis.Array.isArray(object?.name) ? object.name.map((e: any) => UninterpretedOption_NamePart.fromJSON(e)) : [], identifierValue: isSet(object.identifierValue) ? globalThis.String(object.identifierValue) : "", positiveIntValue: isSet(object.positiveIntValue) ? globalThis.String(object.positiveIntValue) : "0", negativeIntValue: isSet(object.negativeIntValue) ? globalThis.String(object.negativeIntValue) : "0", doubleValue: isSet(object.doubleValue) ? globalThis.Number(object.doubleValue) : 0, stringValue: isSet(object.stringValue) ? Buffer.from(bytesFromBase64(object.stringValue)) : Buffer.alloc(0), aggregateValue: isSet(object.aggregateValue) ? globalThis.String(object.aggregateValue) : "", }; }, toJSON(message: UninterpretedOption): unknown { const obj: any = {}; if (message.name?.length) { obj.name = message.name.map((e) => UninterpretedOption_NamePart.toJSON(e)); } if (message.identifierValue !== undefined && message.identifierValue !== "") { obj.identifierValue = message.identifierValue; } if (message.positiveIntValue !== undefined && message.positiveIntValue !== "0") { obj.positiveIntValue = message.positiveIntValue; } if (message.negativeIntValue !== undefined && message.negativeIntValue !== "0") { obj.negativeIntValue = message.negativeIntValue; } if (message.doubleValue !== undefined && message.doubleValue !== 0) { obj.doubleValue = message.doubleValue; } if (message.stringValue !== undefined && message.stringValue.length !== 0) { obj.stringValue = base64FromBytes(message.stringValue); } if (message.aggregateValue !== undefined && message.aggregateValue !== "") { obj.aggregateValue = message.aggregateValue; } return obj; }, }; export const UninterpretedOption_NamePart: MessageFns = { fromJSON(object: any): UninterpretedOption_NamePart { return { namePart: isSet(object.namePart) ? globalThis.String(object.namePart) : "", isExtension: isSet(object.isExtension) ? globalThis.Boolean(object.isExtension) : false, }; }, toJSON(message: UninterpretedOption_NamePart): unknown { const obj: any = {}; if (message.namePart !== "") { obj.namePart = message.namePart; } if (message.isExtension !== false) { obj.isExtension = message.isExtension; } return obj; }, }; export const FeatureSet: MessageFns = { fromJSON(object: any): FeatureSet { return { fieldPresence: isSet(object.fieldPresence) ? featureSet_FieldPresenceFromJSON(object.fieldPresence) : 0, enumType: isSet(object.enumType) ? featureSet_EnumTypeFromJSON(object.enumType) : 0, repeatedFieldEncoding: isSet(object.repeatedFieldEncoding) ? featureSet_RepeatedFieldEncodingFromJSON(object.repeatedFieldEncoding) : 0, utf8Validation: isSet(object.utf8Validation) ? featureSet_Utf8ValidationFromJSON(object.utf8Validation) : 0, messageEncoding: isSet(object.messageEncoding) ? featureSet_MessageEncodingFromJSON(object.messageEncoding) : 0, jsonFormat: isSet(object.jsonFormat) ? featureSet_JsonFormatFromJSON(object.jsonFormat) : 0, }; }, toJSON(message: FeatureSet): unknown { const obj: any = {}; if (message.fieldPresence !== undefined && message.fieldPresence !== 0) { obj.fieldPresence = featureSet_FieldPresenceToJSON(message.fieldPresence); } if (message.enumType !== undefined && message.enumType !== 0) { obj.enumType = featureSet_EnumTypeToJSON(message.enumType); } if (message.repeatedFieldEncoding !== undefined && message.repeatedFieldEncoding !== 0) { obj.repeatedFieldEncoding = featureSet_RepeatedFieldEncodingToJSON(message.repeatedFieldEncoding); } if (message.utf8Validation !== undefined && message.utf8Validation !== 0) { obj.utf8Validation = featureSet_Utf8ValidationToJSON(message.utf8Validation); } if (message.messageEncoding !== undefined && message.messageEncoding !== 0) { obj.messageEncoding = featureSet_MessageEncodingToJSON(message.messageEncoding); } if (message.jsonFormat !== undefined && message.jsonFormat !== 0) { obj.jsonFormat = featureSet_JsonFormatToJSON(message.jsonFormat); } return obj; }, }; export const FeatureSetDefaults: MessageFns = { fromJSON(object: any): FeatureSetDefaults { return { defaults: globalThis.Array.isArray(object?.defaults) ? object.defaults.map((e: any) => FeatureSetDefaults_FeatureSetEditionDefault.fromJSON(e)) : [], minimumEdition: isSet(object.minimumEdition) ? editionFromJSON(object.minimumEdition) : 0, maximumEdition: isSet(object.maximumEdition) ? editionFromJSON(object.maximumEdition) : 0, }; }, toJSON(message: FeatureSetDefaults): unknown { const obj: any = {}; if (message.defaults?.length) { obj.defaults = message.defaults.map((e) => FeatureSetDefaults_FeatureSetEditionDefault.toJSON(e)); } if (message.minimumEdition !== undefined && message.minimumEdition !== 0) { obj.minimumEdition = editionToJSON(message.minimumEdition); } if (message.maximumEdition !== undefined && message.maximumEdition !== 0) { obj.maximumEdition = editionToJSON(message.maximumEdition); } return obj; }, }; export const FeatureSetDefaults_FeatureSetEditionDefault: MessageFns = { fromJSON(object: any): FeatureSetDefaults_FeatureSetEditionDefault { return { edition: isSet(object.edition) ? editionFromJSON(object.edition) : 0, overridableFeatures: isSet(object.overridableFeatures) ? FeatureSet.fromJSON(object.overridableFeatures) : undefined, fixedFeatures: isSet(object.fixedFeatures) ? FeatureSet.fromJSON(object.fixedFeatures) : undefined, }; }, toJSON(message: FeatureSetDefaults_FeatureSetEditionDefault): unknown { const obj: any = {}; if (message.edition !== undefined && message.edition !== 0) { obj.edition = editionToJSON(message.edition); } if (message.overridableFeatures !== undefined) { obj.overridableFeatures = FeatureSet.toJSON(message.overridableFeatures); } if (message.fixedFeatures !== undefined) { obj.fixedFeatures = FeatureSet.toJSON(message.fixedFeatures); } return obj; }, }; export const SourceCodeInfo: MessageFns = { fromJSON(object: any): SourceCodeInfo { return { location: globalThis.Array.isArray(object?.location) ? object.location.map((e: any) => SourceCodeInfo_Location.fromJSON(e)) : [], }; }, toJSON(message: SourceCodeInfo): unknown { const obj: any = {}; if (message.location?.length) { obj.location = message.location.map((e) => SourceCodeInfo_Location.toJSON(e)); } return obj; }, }; export const SourceCodeInfo_Location: MessageFns = { fromJSON(object: any): SourceCodeInfo_Location { return { path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], span: globalThis.Array.isArray(object?.span) ? object.span.map((e: any) => globalThis.Number(e)) : [], leadingComments: isSet(object.leadingComments) ? globalThis.String(object.leadingComments) : "", trailingComments: isSet(object.trailingComments) ? globalThis.String(object.trailingComments) : "", leadingDetachedComments: globalThis.Array.isArray(object?.leadingDetachedComments) ? object.leadingDetachedComments.map((e: any) => globalThis.String(e)) : [], }; }, toJSON(message: SourceCodeInfo_Location): unknown { const obj: any = {}; if (message.path?.length) { obj.path = message.path.map((e) => Math.round(e)); } if (message.span?.length) { obj.span = message.span.map((e) => Math.round(e)); } if (message.leadingComments !== undefined && message.leadingComments !== "") { obj.leadingComments = message.leadingComments; } if (message.trailingComments !== undefined && message.trailingComments !== "") { obj.trailingComments = message.trailingComments; } if (message.leadingDetachedComments?.length) { obj.leadingDetachedComments = message.leadingDetachedComments; } return obj; }, }; export const GeneratedCodeInfo: MessageFns = { fromJSON(object: any): GeneratedCodeInfo { return { annotation: globalThis.Array.isArray(object?.annotation) ? object.annotation.map((e: any) => GeneratedCodeInfo_Annotation.fromJSON(e)) : [], }; }, toJSON(message: GeneratedCodeInfo): unknown { const obj: any = {}; if (message.annotation?.length) { obj.annotation = message.annotation.map((e) => GeneratedCodeInfo_Annotation.toJSON(e)); } return obj; }, }; export const GeneratedCodeInfo_Annotation: MessageFns = { fromJSON(object: any): GeneratedCodeInfo_Annotation { return { path: globalThis.Array.isArray(object?.path) ? object.path.map((e: any) => globalThis.Number(e)) : [], sourceFile: isSet(object.sourceFile) ? globalThis.String(object.sourceFile) : "", begin: isSet(object.begin) ? globalThis.Number(object.begin) : 0, end: isSet(object.end) ? globalThis.Number(object.end) : 0, semantic: isSet(object.semantic) ? generatedCodeInfo_Annotation_SemanticFromJSON(object.semantic) : 0, }; }, toJSON(message: GeneratedCodeInfo_Annotation): unknown { const obj: any = {}; if (message.path?.length) { obj.path = message.path.map((e) => Math.round(e)); } if (message.sourceFile !== undefined && message.sourceFile !== "") { obj.sourceFile = message.sourceFile; } if (message.begin !== undefined && message.begin !== 0) { obj.begin = Math.round(message.begin); } if (message.end !== undefined && message.end !== 0) { obj.end = Math.round(message.end); } if (message.semantic !== undefined && message.semantic !== 0) { obj.semantic = generatedCodeInfo_Annotation_SemanticToJSON(message.semantic); } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/google/protobuf/timestamp.ts000066400000000000000000000123341477352757300311040ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: google/protobuf/timestamp.proto /* eslint-disable */ /** * A Timestamp represents a point in time independent of any time zone or local * calendar, encoded as a count of seconds and fractions of seconds at * nanosecond resolution. The count is relative to an epoch at UTC midnight on * January 1, 1970, in the proleptic Gregorian calendar which extends the * Gregorian calendar backwards to year one. * * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap * second table is needed for interpretation, using a [24-hour linear * smear](https://developers.google.com/time/smear). * * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By * restricting to that range, we ensure that we can convert to and from [RFC * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings. * * # Examples * * Example 1: Compute Timestamp from POSIX `time()`. * * Timestamp timestamp; * timestamp.set_seconds(time(NULL)); * timestamp.set_nanos(0); * * Example 2: Compute Timestamp from POSIX `gettimeofday()`. * * struct timeval tv; * gettimeofday(&tv, NULL); * * Timestamp timestamp; * timestamp.set_seconds(tv.tv_sec); * timestamp.set_nanos(tv.tv_usec * 1000); * * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`. * * FILETIME ft; * GetSystemTimeAsFileTime(&ft); * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime; * * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z. * Timestamp timestamp; * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL)); * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100)); * * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`. * * long millis = System.currentTimeMillis(); * * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000) * .setNanos((int) ((millis % 1000) * 1000000)).build(); * * Example 5: Compute Timestamp from Java `Instant.now()`. * * Instant now = Instant.now(); * * Timestamp timestamp = * Timestamp.newBuilder().setSeconds(now.getEpochSecond()) * .setNanos(now.getNano()).build(); * * Example 6: Compute Timestamp from current time in Python. * * timestamp = Timestamp() * timestamp.GetCurrentTime() * * # JSON Mapping * * In JSON format, the Timestamp type is encoded as a string in the * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z" * where {year} is always expressed using four digits while {month}, {day}, * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution), * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone * is required. A proto3 JSON serializer should always use UTC (as indicated by * "Z") when printing the Timestamp type and a proto3 JSON parser should be * able to accept both UTC and other timezones (as indicated by an offset). * * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past * 01:30 UTC on January 15, 2017. * * In JavaScript, one can convert a Date object to this format using the * standard * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString) * method. In Python, a standard `datetime.datetime` object can be converted * to this format using * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use * the Joda Time's [`ISODateTimeFormat.dateTime()`]( * http://joda-time.sourceforge.net/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime() * ) to obtain a formatter capable of generating timestamps in this format. */ export interface Timestamp { /** * Represents seconds of UTC time since Unix epoch * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to * 9999-12-31T23:59:59Z inclusive. */ seconds: string; /** * Non-negative fractions of a second at nanosecond resolution. Negative * second values with fractions must still have non-negative nanos values * that count forward in time. Must be from 0 to 999,999,999 * inclusive. */ nanos: number; } export const Timestamp: MessageFns = { fromJSON(object: any): Timestamp { return { seconds: isSet(object.seconds) ? globalThis.String(object.seconds) : "0", nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0, }; }, toJSON(message: Timestamp): unknown { const obj: any = {}; if (message.seconds !== "0") { obj.seconds = message.seconds; } if (message.nanos !== 0) { obj.nanos = Math.round(message.nanos); } return obj; }, }; function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/sigstore_bundle.ts000066400000000000000000000235341477352757300271610ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: sigstore_bundle.proto /* eslint-disable */ import { Envelope } from "./envelope"; import { MessageSignature, PublicKeyIdentifier, RFC3161SignedTimestamp, X509Certificate, X509CertificateChain, } from "./sigstore_common"; import { TransparencyLogEntry } from "./sigstore_rekor"; /** * Various timestamped counter signatures over the artifacts signature. * Currently only RFC3161 signatures are provided. More formats may be added * in the future. */ export interface TimestampVerificationData { /** * A list of RFC3161 signed timestamps provided by the user. * This can be used when the entry has not been stored on a * transparency log, or in conjunction for a stronger trust model. * Clients MUST verify the hashed message in the message imprint * against the signature in the bundle. */ rfc3161Timestamps: RFC3161SignedTimestamp[]; } /** * VerificationMaterial captures details on the materials used to verify * signatures. This message may be embedded in a DSSE envelope as a signature * extension. Specifically, the `ext` field of the extension will expect this * message when the signature extension is for Sigstore. This is identified by * the `kind` field in the extension, which must be set to * application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. * When used as a DSSE extension, if the `public_key` field is used to indicate * the key identifier, it MUST match the `keyid` field of the signature the * extension is attached to. */ export interface VerificationMaterial { /** * The key material for verification purposes. * * This allows key material to be conveyed in one of three forms: * * 1. An unspecified public key identifier, for retrieving a key * from an out-of-band mechanism (such as a keyring); * * 2. A sequence of one or more X.509 certificates, of which the first member * MUST be a leaf certificate conveying the signing key. Subsequent members * SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`. * * Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT * include intermediate CA certificates that appear in an independent root of trust * (such as the Public Good Instance's trusted root). * * Verifiers MUST validate the chain carefully to ensure that it chains up * to a CA certificate that they independently trust. Verifiers SHOULD * handle old or non-complying bundles that have superfluous intermediate and/or * root CA certificates by either ignoring them or explicitly considering them * untrusted for the purposes of chain building. * * 3. A single X.509 certificate, which MUST be a leaf certificate conveying * the signing key. * * When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing * via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1) * MAY be used with the PGI for self-managed keys. * * When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing, * form (2) MUST be used. * * When used in a `0.3` bundle with the PGI and "keyless" signing, * form (3) MUST be used. */ content?: | { $case: "publicKey"; publicKey: PublicKeyIdentifier } | { $case: "x509CertificateChain"; x509CertificateChain: X509CertificateChain } | { $case: "certificate"; certificate: X509Certificate } | undefined; /** * An inclusion proof and an optional signed timestamp from the log. * Client verification libraries MAY provide an option to support v0.1 * bundles for backwards compatibility, which may contain an inclusion * promise and not an inclusion proof. In this case, the client MUST * validate the promise. * Verifiers SHOULD NOT allow v0.1 bundles if they're used in an * ecosystem which never produced them. */ tlogEntries: TransparencyLogEntry[]; /** * Timestamp may also come from * tlog_entries.inclusion_promise.signed_entry_timestamp. */ timestampVerificationData: TimestampVerificationData | undefined; } export interface Bundle { /** * MUST be application/vnd.dev.sigstore.bundle.v0.3+json when * when encoded as JSON. * Clients must to be able to accept media type using the previously * defined formats: * * application/vnd.dev.sigstore.bundle+json;version=0.1 * * application/vnd.dev.sigstore.bundle+json;version=0.2 * * application/vnd.dev.sigstore.bundle+json;version=0.3 */ mediaType: string; /** * When a signer is identified by a X.509 certificate, a verifier MUST * verify that the signature was computed at the time the certificate * was valid as described in the Sigstore client spec: "Verification * using a Bundle". * * If the verification material contains a public key identifier * (key hint) and the `content` is a DSSE envelope, the key hints * MUST be exactly the same in the verification material and in the * DSSE envelope. */ verificationMaterial: VerificationMaterial | undefined; content?: { $case: "messageSignature"; messageSignature: MessageSignature } | // /** * A DSSE envelope can contain arbitrary payloads. * Verifiers must verify that the payload type is a * supported and expected type. This is part of the DSSE * protocol which is defined here: * * DSSE envelopes in a bundle MUST have exactly one signature. * This is a limitation from the DSSE spec, as it can contain * multiple signatures. There are two primary reasons: * 1. It simplifies the verification logic and policy * 2. The bundle (currently) can only contain a single * instance of the required verification materials * During verification a client MUST reject an envelope if * the number of signatures is not equal to one. */ { $case: "dsseEnvelope"; dsseEnvelope: Envelope } | undefined; } export const TimestampVerificationData: MessageFns = { fromJSON(object: any): TimestampVerificationData { return { rfc3161Timestamps: globalThis.Array.isArray(object?.rfc3161Timestamps) ? object.rfc3161Timestamps.map((e: any) => RFC3161SignedTimestamp.fromJSON(e)) : [], }; }, toJSON(message: TimestampVerificationData): unknown { const obj: any = {}; if (message.rfc3161Timestamps?.length) { obj.rfc3161Timestamps = message.rfc3161Timestamps.map((e) => RFC3161SignedTimestamp.toJSON(e)); } return obj; }, }; export const VerificationMaterial: MessageFns = { fromJSON(object: any): VerificationMaterial { return { content: isSet(object.publicKey) ? { $case: "publicKey", publicKey: PublicKeyIdentifier.fromJSON(object.publicKey) } : isSet(object.x509CertificateChain) ? { $case: "x509CertificateChain", x509CertificateChain: X509CertificateChain.fromJSON(object.x509CertificateChain), } : isSet(object.certificate) ? { $case: "certificate", certificate: X509Certificate.fromJSON(object.certificate) } : undefined, tlogEntries: globalThis.Array.isArray(object?.tlogEntries) ? object.tlogEntries.map((e: any) => TransparencyLogEntry.fromJSON(e)) : [], timestampVerificationData: isSet(object.timestampVerificationData) ? TimestampVerificationData.fromJSON(object.timestampVerificationData) : undefined, }; }, toJSON(message: VerificationMaterial): unknown { const obj: any = {}; if (message.content?.$case === "publicKey") { obj.publicKey = PublicKeyIdentifier.toJSON(message.content.publicKey); } else if (message.content?.$case === "x509CertificateChain") { obj.x509CertificateChain = X509CertificateChain.toJSON(message.content.x509CertificateChain); } else if (message.content?.$case === "certificate") { obj.certificate = X509Certificate.toJSON(message.content.certificate); } if (message.tlogEntries?.length) { obj.tlogEntries = message.tlogEntries.map((e) => TransparencyLogEntry.toJSON(e)); } if (message.timestampVerificationData !== undefined) { obj.timestampVerificationData = TimestampVerificationData.toJSON(message.timestampVerificationData); } return obj; }, }; export const Bundle: MessageFns = { fromJSON(object: any): Bundle { return { mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", verificationMaterial: isSet(object.verificationMaterial) ? VerificationMaterial.fromJSON(object.verificationMaterial) : undefined, content: isSet(object.messageSignature) ? { $case: "messageSignature", messageSignature: MessageSignature.fromJSON(object.messageSignature) } : isSet(object.dsseEnvelope) ? { $case: "dsseEnvelope", dsseEnvelope: Envelope.fromJSON(object.dsseEnvelope) } : undefined, }; }, toJSON(message: Bundle): unknown { const obj: any = {}; if (message.mediaType !== "") { obj.mediaType = message.mediaType; } if (message.verificationMaterial !== undefined) { obj.verificationMaterial = VerificationMaterial.toJSON(message.verificationMaterial); } if (message.content?.$case === "messageSignature") { obj.messageSignature = MessageSignature.toJSON(message.content.messageSignature); } else if (message.content?.$case === "dsseEnvelope") { obj.dsseEnvelope = Envelope.toJSON(message.content.dsseEnvelope); } return obj; }, }; function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/sigstore_common.ts000066400000000000000000000551631477352757300272030ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: sigstore_common.proto /* eslint-disable */ import { Timestamp } from "./google/protobuf/timestamp"; /** * Only a subset of the secure hash standard algorithms are supported. * See for more * details. * UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force * any proto JSON serialization to emit the used hash algorithm, as default * option is to *omit* the default value of an enum (which is the first * value, represented by '0'. */ export enum HashAlgorithm { HASH_ALGORITHM_UNSPECIFIED = 0, SHA2_256 = 1, SHA2_384 = 2, SHA2_512 = 3, SHA3_256 = 4, SHA3_384 = 5, } export function hashAlgorithmFromJSON(object: any): HashAlgorithm { switch (object) { case 0: case "HASH_ALGORITHM_UNSPECIFIED": return HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED; case 1: case "SHA2_256": return HashAlgorithm.SHA2_256; case 2: case "SHA2_384": return HashAlgorithm.SHA2_384; case 3: case "SHA2_512": return HashAlgorithm.SHA2_512; case 4: case "SHA3_256": return HashAlgorithm.SHA3_256; case 5: case "SHA3_384": return HashAlgorithm.SHA3_384; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); } } export function hashAlgorithmToJSON(object: HashAlgorithm): string { switch (object) { case HashAlgorithm.HASH_ALGORITHM_UNSPECIFIED: return "HASH_ALGORITHM_UNSPECIFIED"; case HashAlgorithm.SHA2_256: return "SHA2_256"; case HashAlgorithm.SHA2_384: return "SHA2_384"; case HashAlgorithm.SHA2_512: return "SHA2_512"; case HashAlgorithm.SHA3_256: return "SHA3_256"; case HashAlgorithm.SHA3_384: return "SHA3_384"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum HashAlgorithm"); } } /** * Details of a specific public key, capturing the the key encoding method, * and signature algorithm. * * PublicKeyDetails captures the public key/hash algorithm combinations * recommended in the Sigstore ecosystem. * * This is modelled as a linear set as we want to provide a small number of * opinionated options instead of allowing every possible permutation. * * Any changes to this enum MUST be reflected in the algorithm registry. * See: docs/algorithm-registry.md * * To avoid the possibility of contradicting formats such as PKCS1 with * ED25519 the valid permutations are listed as a linear set instead of a * cartesian set (i.e one combined variable instead of two, one for encoding * and one for the signature algorithm). */ export enum PublicKeyDetails { PUBLIC_KEY_DETAILS_UNSPECIFIED = 0, /** * PKCS1_RSA_PKCS1V5 - RSA * * @deprecated */ PKCS1_RSA_PKCS1V5 = 1, /** * PKCS1_RSA_PSS - See RFC8017 * * @deprecated */ PKCS1_RSA_PSS = 2, /** @deprecated */ PKIX_RSA_PKCS1V5 = 3, /** @deprecated */ PKIX_RSA_PSS = 4, /** PKIX_RSA_PKCS1V15_2048_SHA256 - RSA public key in PKIX format, PKCS#1v1.5 signature */ PKIX_RSA_PKCS1V15_2048_SHA256 = 9, PKIX_RSA_PKCS1V15_3072_SHA256 = 10, PKIX_RSA_PKCS1V15_4096_SHA256 = 11, /** PKIX_RSA_PSS_2048_SHA256 - RSA public key in PKIX format, RSASSA-PSS signature */ PKIX_RSA_PSS_2048_SHA256 = 16, PKIX_RSA_PSS_3072_SHA256 = 17, PKIX_RSA_PSS_4096_SHA256 = 18, /** * PKIX_ECDSA_P256_HMAC_SHA_256 - ECDSA * * @deprecated */ PKIX_ECDSA_P256_HMAC_SHA_256 = 6, /** PKIX_ECDSA_P256_SHA_256 - See NIST FIPS 186-4 */ PKIX_ECDSA_P256_SHA_256 = 5, PKIX_ECDSA_P384_SHA_384 = 12, PKIX_ECDSA_P521_SHA_512 = 13, /** PKIX_ED25519 - Ed 25519 */ PKIX_ED25519 = 7, PKIX_ED25519_PH = 8, /** * PKIX_ECDSA_P384_SHA_256 - These algorithms are deprecated and should not be used, but they * were/are being used by most Sigstore clients implementations. * * @deprecated */ PKIX_ECDSA_P384_SHA_256 = 19, /** @deprecated */ PKIX_ECDSA_P521_SHA_256 = 20, /** * LMS_SHA256 - LMS and LM-OTS * * These keys and signatures may be used by private Sigstore * deployments, but are not currently supported by the public * good instance. * * USER WARNING: LMS and LM-OTS are both stateful signature schemes. * Using them correctly requires discretion and careful consideration * to ensure that individual secret keys are not used more than once. * In addition, LM-OTS is a single-use scheme, meaning that it * MUST NOT be used for more than one signature per LM-OTS key. * If you cannot maintain these invariants, you MUST NOT use these * schemes. */ LMS_SHA256 = 14, LMOTS_SHA256 = 15, } export function publicKeyDetailsFromJSON(object: any): PublicKeyDetails { switch (object) { case 0: case "PUBLIC_KEY_DETAILS_UNSPECIFIED": return PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED; case 1: case "PKCS1_RSA_PKCS1V5": return PublicKeyDetails.PKCS1_RSA_PKCS1V5; case 2: case "PKCS1_RSA_PSS": return PublicKeyDetails.PKCS1_RSA_PSS; case 3: case "PKIX_RSA_PKCS1V5": return PublicKeyDetails.PKIX_RSA_PKCS1V5; case 4: case "PKIX_RSA_PSS": return PublicKeyDetails.PKIX_RSA_PSS; case 9: case "PKIX_RSA_PKCS1V15_2048_SHA256": return PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256; case 10: case "PKIX_RSA_PKCS1V15_3072_SHA256": return PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256; case 11: case "PKIX_RSA_PKCS1V15_4096_SHA256": return PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256; case 16: case "PKIX_RSA_PSS_2048_SHA256": return PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256; case 17: case "PKIX_RSA_PSS_3072_SHA256": return PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256; case 18: case "PKIX_RSA_PSS_4096_SHA256": return PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256; case 6: case "PKIX_ECDSA_P256_HMAC_SHA_256": return PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256; case 5: case "PKIX_ECDSA_P256_SHA_256": return PublicKeyDetails.PKIX_ECDSA_P256_SHA_256; case 12: case "PKIX_ECDSA_P384_SHA_384": return PublicKeyDetails.PKIX_ECDSA_P384_SHA_384; case 13: case "PKIX_ECDSA_P521_SHA_512": return PublicKeyDetails.PKIX_ECDSA_P521_SHA_512; case 7: case "PKIX_ED25519": return PublicKeyDetails.PKIX_ED25519; case 8: case "PKIX_ED25519_PH": return PublicKeyDetails.PKIX_ED25519_PH; case 19: case "PKIX_ECDSA_P384_SHA_256": return PublicKeyDetails.PKIX_ECDSA_P384_SHA_256; case 20: case "PKIX_ECDSA_P521_SHA_256": return PublicKeyDetails.PKIX_ECDSA_P521_SHA_256; case 14: case "LMS_SHA256": return PublicKeyDetails.LMS_SHA256; case 15: case "LMOTS_SHA256": return PublicKeyDetails.LMOTS_SHA256; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); } } export function publicKeyDetailsToJSON(object: PublicKeyDetails): string { switch (object) { case PublicKeyDetails.PUBLIC_KEY_DETAILS_UNSPECIFIED: return "PUBLIC_KEY_DETAILS_UNSPECIFIED"; case PublicKeyDetails.PKCS1_RSA_PKCS1V5: return "PKCS1_RSA_PKCS1V5"; case PublicKeyDetails.PKCS1_RSA_PSS: return "PKCS1_RSA_PSS"; case PublicKeyDetails.PKIX_RSA_PKCS1V5: return "PKIX_RSA_PKCS1V5"; case PublicKeyDetails.PKIX_RSA_PSS: return "PKIX_RSA_PSS"; case PublicKeyDetails.PKIX_RSA_PKCS1V15_2048_SHA256: return "PKIX_RSA_PKCS1V15_2048_SHA256"; case PublicKeyDetails.PKIX_RSA_PKCS1V15_3072_SHA256: return "PKIX_RSA_PKCS1V15_3072_SHA256"; case PublicKeyDetails.PKIX_RSA_PKCS1V15_4096_SHA256: return "PKIX_RSA_PKCS1V15_4096_SHA256"; case PublicKeyDetails.PKIX_RSA_PSS_2048_SHA256: return "PKIX_RSA_PSS_2048_SHA256"; case PublicKeyDetails.PKIX_RSA_PSS_3072_SHA256: return "PKIX_RSA_PSS_3072_SHA256"; case PublicKeyDetails.PKIX_RSA_PSS_4096_SHA256: return "PKIX_RSA_PSS_4096_SHA256"; case PublicKeyDetails.PKIX_ECDSA_P256_HMAC_SHA_256: return "PKIX_ECDSA_P256_HMAC_SHA_256"; case PublicKeyDetails.PKIX_ECDSA_P256_SHA_256: return "PKIX_ECDSA_P256_SHA_256"; case PublicKeyDetails.PKIX_ECDSA_P384_SHA_384: return "PKIX_ECDSA_P384_SHA_384"; case PublicKeyDetails.PKIX_ECDSA_P521_SHA_512: return "PKIX_ECDSA_P521_SHA_512"; case PublicKeyDetails.PKIX_ED25519: return "PKIX_ED25519"; case PublicKeyDetails.PKIX_ED25519_PH: return "PKIX_ED25519_PH"; case PublicKeyDetails.PKIX_ECDSA_P384_SHA_256: return "PKIX_ECDSA_P384_SHA_256"; case PublicKeyDetails.PKIX_ECDSA_P521_SHA_256: return "PKIX_ECDSA_P521_SHA_256"; case PublicKeyDetails.LMS_SHA256: return "LMS_SHA256"; case PublicKeyDetails.LMOTS_SHA256: return "LMOTS_SHA256"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum PublicKeyDetails"); } } export enum SubjectAlternativeNameType { SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED = 0, EMAIL = 1, URI = 2, /** * OTHER_NAME - OID 1.3.6.1.4.1.57264.1.7 * See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san * for more details. */ OTHER_NAME = 3, } export function subjectAlternativeNameTypeFromJSON(object: any): SubjectAlternativeNameType { switch (object) { case 0: case "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED": return SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED; case 1: case "EMAIL": return SubjectAlternativeNameType.EMAIL; case 2: case "URI": return SubjectAlternativeNameType.URI; case 3: case "OTHER_NAME": return SubjectAlternativeNameType.OTHER_NAME; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); } } export function subjectAlternativeNameTypeToJSON(object: SubjectAlternativeNameType): string { switch (object) { case SubjectAlternativeNameType.SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED: return "SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED"; case SubjectAlternativeNameType.EMAIL: return "EMAIL"; case SubjectAlternativeNameType.URI: return "URI"; case SubjectAlternativeNameType.OTHER_NAME: return "OTHER_NAME"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum SubjectAlternativeNameType"); } } /** * HashOutput captures a digest of a 'message' (generic octet sequence) * and the corresponding hash algorithm used. */ export interface HashOutput { algorithm: HashAlgorithm; /** * This is the raw octets of the message digest as computed by * the hash algorithm. */ digest: Buffer; } /** MessageSignature stores the computed signature over a message. */ export interface MessageSignature { /** * Message digest can be used to identify the artifact. * Clients MUST NOT attempt to use this digest to verify the associated * signature; it is intended solely for identification. */ messageDigest: | HashOutput | undefined; /** * The raw bytes as returned from the signature algorithm. * The signature algorithm (and so the format of the signature bytes) * are determined by the contents of the 'verification_material', * either a key-pair or a certificate. If using a certificate, the * certificate contains the required information on the signature * algorithm. * When using a key pair, the algorithm MUST be part of the public * key, which MUST be communicated out-of-band. */ signature: Buffer; } /** LogId captures the identity of a transparency log. */ export interface LogId { /** The unique identity of the log, represented by its public key. */ keyId: Buffer; } /** This message holds a RFC 3161 timestamp. */ export interface RFC3161SignedTimestamp { /** * Signed timestamp is the DER encoded TimeStampResponse. * See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2 */ signedTimestamp: Buffer; } export interface PublicKey { /** * DER-encoded public key, encoding method is specified by the * key_details attribute. */ rawBytes?: | Buffer | undefined; /** Key encoding and signature algorithm to use for this key. */ keyDetails: PublicKeyDetails; /** Optional validity period for this key, *inclusive* of the endpoints. */ validFor?: TimeRange | undefined; } /** * PublicKeyIdentifier can be used to identify an (out of band) delivered * key, to verify a signature. */ export interface PublicKeyIdentifier { /** * Optional unauthenticated hint on which key to use. * The format of the hint must be agreed upon out of band by the * signer and the verifiers, and so is not subject to this * specification. * Example use-case is to specify the public key to use, from a * trusted key-ring. * Implementors are RECOMMENDED to derive the value from the public * key as described in RFC 6962. * See: */ hint: string; } /** An ASN.1 OBJECT IDENTIFIER */ export interface ObjectIdentifier { id: number[]; } /** An OID and the corresponding (byte) value. */ export interface ObjectIdentifierValuePair { oid: ObjectIdentifier | undefined; value: Buffer; } export interface DistinguishedName { organization: string; commonName: string; } export interface X509Certificate { /** DER-encoded X.509 certificate. */ rawBytes: Buffer; } export interface SubjectAlternativeName { type: SubjectAlternativeNameType; identity?: | // /** * A regular expression describing the expected value for * the SAN. */ { $case: "regexp"; regexp: string } | // /** The exact value to match against. */ { $case: "value"; value: string } | undefined; } /** * A collection of X.509 certificates. * * This "chain" can be used in multiple contexts, such as providing a root CA * certificate within a TUF root of trust or multiple untrusted certificates for * the purpose of chain building. */ export interface X509CertificateChain { /** * One or more DER-encoded certificates. * * In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence * has an imposed order. Unless explicitly specified, there is otherwise no * guaranteed order. */ certificates: X509Certificate[]; } /** * The time range is closed and includes both the start and end times, * (i.e., [start, end]). * End is optional to be able to capture a period that has started but * has no known end. */ export interface TimeRange { start: Date | undefined; end?: Date | undefined; } export const HashOutput: MessageFns = { fromJSON(object: any): HashOutput { return { algorithm: isSet(object.algorithm) ? hashAlgorithmFromJSON(object.algorithm) : 0, digest: isSet(object.digest) ? Buffer.from(bytesFromBase64(object.digest)) : Buffer.alloc(0), }; }, toJSON(message: HashOutput): unknown { const obj: any = {}; if (message.algorithm !== 0) { obj.algorithm = hashAlgorithmToJSON(message.algorithm); } if (message.digest.length !== 0) { obj.digest = base64FromBytes(message.digest); } return obj; }, }; export const MessageSignature: MessageFns = { fromJSON(object: any): MessageSignature { return { messageDigest: isSet(object.messageDigest) ? HashOutput.fromJSON(object.messageDigest) : undefined, signature: isSet(object.signature) ? Buffer.from(bytesFromBase64(object.signature)) : Buffer.alloc(0), }; }, toJSON(message: MessageSignature): unknown { const obj: any = {}; if (message.messageDigest !== undefined) { obj.messageDigest = HashOutput.toJSON(message.messageDigest); } if (message.signature.length !== 0) { obj.signature = base64FromBytes(message.signature); } return obj; }, }; export const LogId: MessageFns = { fromJSON(object: any): LogId { return { keyId: isSet(object.keyId) ? Buffer.from(bytesFromBase64(object.keyId)) : Buffer.alloc(0) }; }, toJSON(message: LogId): unknown { const obj: any = {}; if (message.keyId.length !== 0) { obj.keyId = base64FromBytes(message.keyId); } return obj; }, }; export const RFC3161SignedTimestamp: MessageFns = { fromJSON(object: any): RFC3161SignedTimestamp { return { signedTimestamp: isSet(object.signedTimestamp) ? Buffer.from(bytesFromBase64(object.signedTimestamp)) : Buffer.alloc(0), }; }, toJSON(message: RFC3161SignedTimestamp): unknown { const obj: any = {}; if (message.signedTimestamp.length !== 0) { obj.signedTimestamp = base64FromBytes(message.signedTimestamp); } return obj; }, }; export const PublicKey: MessageFns = { fromJSON(object: any): PublicKey { return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : undefined, keyDetails: isSet(object.keyDetails) ? publicKeyDetailsFromJSON(object.keyDetails) : 0, validFor: isSet(object.validFor) ? TimeRange.fromJSON(object.validFor) : undefined, }; }, toJSON(message: PublicKey): unknown { const obj: any = {}; if (message.rawBytes !== undefined) { obj.rawBytes = base64FromBytes(message.rawBytes); } if (message.keyDetails !== 0) { obj.keyDetails = publicKeyDetailsToJSON(message.keyDetails); } if (message.validFor !== undefined) { obj.validFor = TimeRange.toJSON(message.validFor); } return obj; }, }; export const PublicKeyIdentifier: MessageFns = { fromJSON(object: any): PublicKeyIdentifier { return { hint: isSet(object.hint) ? globalThis.String(object.hint) : "" }; }, toJSON(message: PublicKeyIdentifier): unknown { const obj: any = {}; if (message.hint !== "") { obj.hint = message.hint; } return obj; }, }; export const ObjectIdentifier: MessageFns = { fromJSON(object: any): ObjectIdentifier { return { id: globalThis.Array.isArray(object?.id) ? object.id.map((e: any) => globalThis.Number(e)) : [] }; }, toJSON(message: ObjectIdentifier): unknown { const obj: any = {}; if (message.id?.length) { obj.id = message.id.map((e) => Math.round(e)); } return obj; }, }; export const ObjectIdentifierValuePair: MessageFns = { fromJSON(object: any): ObjectIdentifierValuePair { return { oid: isSet(object.oid) ? ObjectIdentifier.fromJSON(object.oid) : undefined, value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0), }; }, toJSON(message: ObjectIdentifierValuePair): unknown { const obj: any = {}; if (message.oid !== undefined) { obj.oid = ObjectIdentifier.toJSON(message.oid); } if (message.value.length !== 0) { obj.value = base64FromBytes(message.value); } return obj; }, }; export const DistinguishedName: MessageFns = { fromJSON(object: any): DistinguishedName { return { organization: isSet(object.organization) ? globalThis.String(object.organization) : "", commonName: isSet(object.commonName) ? globalThis.String(object.commonName) : "", }; }, toJSON(message: DistinguishedName): unknown { const obj: any = {}; if (message.organization !== "") { obj.organization = message.organization; } if (message.commonName !== "") { obj.commonName = message.commonName; } return obj; }, }; export const X509Certificate: MessageFns = { fromJSON(object: any): X509Certificate { return { rawBytes: isSet(object.rawBytes) ? Buffer.from(bytesFromBase64(object.rawBytes)) : Buffer.alloc(0) }; }, toJSON(message: X509Certificate): unknown { const obj: any = {}; if (message.rawBytes.length !== 0) { obj.rawBytes = base64FromBytes(message.rawBytes); } return obj; }, }; export const SubjectAlternativeName: MessageFns = { fromJSON(object: any): SubjectAlternativeName { return { type: isSet(object.type) ? subjectAlternativeNameTypeFromJSON(object.type) : 0, identity: isSet(object.regexp) ? { $case: "regexp", regexp: globalThis.String(object.regexp) } : isSet(object.value) ? { $case: "value", value: globalThis.String(object.value) } : undefined, }; }, toJSON(message: SubjectAlternativeName): unknown { const obj: any = {}; if (message.type !== 0) { obj.type = subjectAlternativeNameTypeToJSON(message.type); } if (message.identity?.$case === "regexp") { obj.regexp = message.identity.regexp; } else if (message.identity?.$case === "value") { obj.value = message.identity.value; } return obj; }, }; export const X509CertificateChain: MessageFns = { fromJSON(object: any): X509CertificateChain { return { certificates: globalThis.Array.isArray(object?.certificates) ? object.certificates.map((e: any) => X509Certificate.fromJSON(e)) : [], }; }, toJSON(message: X509CertificateChain): unknown { const obj: any = {}; if (message.certificates?.length) { obj.certificates = message.certificates.map((e) => X509Certificate.toJSON(e)); } return obj; }, }; export const TimeRange: MessageFns = { fromJSON(object: any): TimeRange { return { start: isSet(object.start) ? fromJsonTimestamp(object.start) : undefined, end: isSet(object.end) ? fromJsonTimestamp(object.end) : undefined, }; }, toJSON(message: TimeRange): unknown { const obj: any = {}; if (message.start !== undefined) { obj.start = message.start.toISOString(); } if (message.end !== undefined) { obj.end = message.end.toISOString(); } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function fromTimestamp(t: Timestamp): Date { let millis = (globalThis.Number(t.seconds) || 0) * 1_000; millis += (t.nanos || 0) / 1_000_000; return new globalThis.Date(millis); } function fromJsonTimestamp(o: any): Date { if (o instanceof globalThis.Date) { return o; } else if (typeof o === "string") { return new globalThis.Date(o); } else { return fromTimestamp(Timestamp.fromJSON(o)); } } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/sigstore_rekor.ts000066400000000000000000000252531477352757300270320ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: sigstore_rekor.proto /* eslint-disable */ import { LogId } from "./sigstore_common"; /** KindVersion contains the entry's kind and api version. */ export interface KindVersion { /** * Kind is the type of entry being stored in the log. * See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types */ kind: string; /** The specific api version of the type. */ version: string; } /** * The checkpoint MUST contain an origin string as a unique log identifier, * the tree size, and the root hash. It MAY also be followed by optional data, * and clients MUST NOT assume optional data. The checkpoint MUST also contain * a signature over the root hash (tree head). The checkpoint MAY contain additional * signatures, but the first SHOULD be the signature from the log. Checkpoint contents * are concatenated with newlines into a single string. * The checkpoint format is described in * https://github.com/transparency-dev/formats/blob/main/log/README.md * and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. * An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go */ export interface Checkpoint { envelope: string; } /** * InclusionProof is the proof returned from the transparency log. Can * be used for offline or online verification against the log. */ export interface InclusionProof { /** The index of the entry in the tree it was written to. */ logIndex: string; /** * The hash digest stored at the root of the merkle tree at the time * the proof was generated. */ rootHash: Buffer; /** The size of the merkle tree at the time the proof was generated. */ treeSize: string; /** * A list of hashes required to compute the inclusion proof, sorted * in order from leaf to root. * Note that leaf and root hashes are not included. * The root hash is available separately in this message, and the * leaf hash should be calculated by the client. */ hashes: Buffer[]; /** * Signature of the tree head, as of the time of this proof was * generated. See above info on 'Checkpoint' for more details. */ checkpoint: Checkpoint | undefined; } /** * The inclusion promise is calculated by Rekor. It's calculated as a * signature over a canonical JSON serialization of the persisted entry, the * log ID, log index and the integration timestamp. * See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 * The format of the signature depends on the transparency log's public key. * If the signature algorithm requires a hash function and/or a signature * scheme (e.g. RSA) those has to be retrieved out-of-band from the log's * operators, together with the public key. * This is used to verify the integration timestamp's value and that the log * has promised to include the entry. */ export interface InclusionPromise { signedEntryTimestamp: Buffer; } /** * TransparencyLogEntry captures all the details required from Rekor to * reconstruct an entry, given that the payload is provided via other means. * This type can easily be created from the existing response from Rekor. * Future iterations could rely on Rekor returning the minimal set of * attributes (excluding the payload) that are required for verifying the * inclusion promise. The inclusion promise (called SignedEntryTimestamp in * the response from Rekor) is similar to a Signed Certificate Timestamp * as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2. */ export interface TransparencyLogEntry { /** The global index of the entry, used when querying the log by index. */ logIndex: string; /** The unique identifier of the log. */ logId: | LogId | undefined; /** * The kind (type) and version of the object associated with this * entry. These values are required to construct the entry during * verification. */ kindVersion: | KindVersion | undefined; /** * The UNIX timestamp from the log when the entry was persisted. * The integration time MUST NOT be trusted if inclusion_promise * is omitted. */ integratedTime: string; /** * The inclusion promise/signed entry timestamp from the log. * Required for v0.1 bundles, and MUST be verified. * Optional for >= v0.2 bundles if another suitable source of * time is present (such as another source of signed time, * or the current system time for long-lived certificates). * MUST be verified if no other suitable source of time is present, * and SHOULD be verified otherwise. */ inclusionPromise: | InclusionPromise | undefined; /** * The inclusion proof can be used for offline or online verification * that the entry was appended to the log, and that the log has not been * altered. */ inclusionProof: | InclusionProof | undefined; /** * Optional. The canonicalized transparency log entry, used to * reconstruct the Signed Entry Timestamp (SET) during verification. * The contents of this field are the same as the `body` field in * a Rekor response, meaning that it does **not** include the "full" * canonicalized form (of log index, ID, etc.) which are * exposed as separate fields. The verifier is responsible for * combining the `canonicalized_body`, `log_index`, `log_id`, * and `integrated_time` into the payload that the SET's signature * is generated over. * This field is intended to be used in cases where the SET cannot be * produced determinisitically (e.g. inconsistent JSON field ordering, * differing whitespace, etc). * * If set, clients MUST verify that the signature referenced in the * `canonicalized_body` matches the signature provided in the * `Bundle.content`. * If not set, clients are responsible for constructing an equivalent * payload from other sources to verify the signature. */ canonicalizedBody: Buffer; } export const KindVersion: MessageFns = { fromJSON(object: any): KindVersion { return { kind: isSet(object.kind) ? globalThis.String(object.kind) : "", version: isSet(object.version) ? globalThis.String(object.version) : "", }; }, toJSON(message: KindVersion): unknown { const obj: any = {}; if (message.kind !== "") { obj.kind = message.kind; } if (message.version !== "") { obj.version = message.version; } return obj; }, }; export const Checkpoint: MessageFns = { fromJSON(object: any): Checkpoint { return { envelope: isSet(object.envelope) ? globalThis.String(object.envelope) : "" }; }, toJSON(message: Checkpoint): unknown { const obj: any = {}; if (message.envelope !== "") { obj.envelope = message.envelope; } return obj; }, }; export const InclusionProof: MessageFns = { fromJSON(object: any): InclusionProof { return { logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0", rootHash: isSet(object.rootHash) ? Buffer.from(bytesFromBase64(object.rootHash)) : Buffer.alloc(0), treeSize: isSet(object.treeSize) ? globalThis.String(object.treeSize) : "0", hashes: globalThis.Array.isArray(object?.hashes) ? object.hashes.map((e: any) => Buffer.from(bytesFromBase64(e))) : [], checkpoint: isSet(object.checkpoint) ? Checkpoint.fromJSON(object.checkpoint) : undefined, }; }, toJSON(message: InclusionProof): unknown { const obj: any = {}; if (message.logIndex !== "0") { obj.logIndex = message.logIndex; } if (message.rootHash.length !== 0) { obj.rootHash = base64FromBytes(message.rootHash); } if (message.treeSize !== "0") { obj.treeSize = message.treeSize; } if (message.hashes?.length) { obj.hashes = message.hashes.map((e) => base64FromBytes(e)); } if (message.checkpoint !== undefined) { obj.checkpoint = Checkpoint.toJSON(message.checkpoint); } return obj; }, }; export const InclusionPromise: MessageFns = { fromJSON(object: any): InclusionPromise { return { signedEntryTimestamp: isSet(object.signedEntryTimestamp) ? Buffer.from(bytesFromBase64(object.signedEntryTimestamp)) : Buffer.alloc(0), }; }, toJSON(message: InclusionPromise): unknown { const obj: any = {}; if (message.signedEntryTimestamp.length !== 0) { obj.signedEntryTimestamp = base64FromBytes(message.signedEntryTimestamp); } return obj; }, }; export const TransparencyLogEntry: MessageFns = { fromJSON(object: any): TransparencyLogEntry { return { logIndex: isSet(object.logIndex) ? globalThis.String(object.logIndex) : "0", logId: isSet(object.logId) ? LogId.fromJSON(object.logId) : undefined, kindVersion: isSet(object.kindVersion) ? KindVersion.fromJSON(object.kindVersion) : undefined, integratedTime: isSet(object.integratedTime) ? globalThis.String(object.integratedTime) : "0", inclusionPromise: isSet(object.inclusionPromise) ? InclusionPromise.fromJSON(object.inclusionPromise) : undefined, inclusionProof: isSet(object.inclusionProof) ? InclusionProof.fromJSON(object.inclusionProof) : undefined, canonicalizedBody: isSet(object.canonicalizedBody) ? Buffer.from(bytesFromBase64(object.canonicalizedBody)) : Buffer.alloc(0), }; }, toJSON(message: TransparencyLogEntry): unknown { const obj: any = {}; if (message.logIndex !== "0") { obj.logIndex = message.logIndex; } if (message.logId !== undefined) { obj.logId = LogId.toJSON(message.logId); } if (message.kindVersion !== undefined) { obj.kindVersion = KindVersion.toJSON(message.kindVersion); } if (message.integratedTime !== "0") { obj.integratedTime = message.integratedTime; } if (message.inclusionPromise !== undefined) { obj.inclusionPromise = InclusionPromise.toJSON(message.inclusionPromise); } if (message.inclusionProof !== undefined) { obj.inclusionProof = InclusionProof.toJSON(message.inclusionProof); } if (message.canonicalizedBody.length !== 0) { obj.canonicalizedBody = base64FromBytes(message.canonicalizedBody); } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/sigstore_trustroot.ts000066400000000000000000000530551477352757300277760ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: sigstore_trustroot.proto /* eslint-disable */ import { DistinguishedName, HashAlgorithm, hashAlgorithmFromJSON, hashAlgorithmToJSON, LogId, PublicKey, TimeRange, X509CertificateChain, } from "./sigstore_common"; /** * ServiceSelector specifies how a client SHOULD select a set of * Services to connect to. A client SHOULD throw an error if * the value is SERVICE_SELECTOR_UNDEFINED. */ export enum ServiceSelector { SERVICE_SELECTOR_UNDEFINED = 0, /** * ALL - Clients SHOULD select all Services based on supported API version * and validity window. */ ALL = 1, /** * ANY - Clients SHOULD select one Service based on supported API version * and validity window. It is up to the client implementation to * decide how to select the Service, e.g. random or round-robin. */ ANY = 2, /** * EXACT - Clients SHOULD select a specific number of Services based on * supported API version and validity window, using the provided * `count`. It is up to the client implementation to decide how to * select the Service, e.g. random or round-robin. */ EXACT = 3, } export function serviceSelectorFromJSON(object: any): ServiceSelector { switch (object) { case 0: case "SERVICE_SELECTOR_UNDEFINED": return ServiceSelector.SERVICE_SELECTOR_UNDEFINED; case 1: case "ALL": return ServiceSelector.ALL; case 2: case "ANY": return ServiceSelector.ANY; case 3: case "EXACT": return ServiceSelector.EXACT; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector"); } } export function serviceSelectorToJSON(object: ServiceSelector): string { switch (object) { case ServiceSelector.SERVICE_SELECTOR_UNDEFINED: return "SERVICE_SELECTOR_UNDEFINED"; case ServiceSelector.ALL: return "ALL"; case ServiceSelector.ANY: return "ANY"; case ServiceSelector.EXACT: return "EXACT"; default: throw new globalThis.Error("Unrecognized enum value " + object + " for enum ServiceSelector"); } } /** * TransparencyLogInstance describes the immutable parameters from a * transparency log. * See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters * for more details. * The included parameters are the minimal set required to identify a log, * and verify an inclusion proof/promise. */ export interface TransparencyLogInstance { /** The base URL at which can be used to URLs for the client. */ baseUrl: string; /** The hash algorithm used for the Merkle Tree. */ hashAlgorithm: HashAlgorithm; /** * The public key used to verify signatures generated by the log. * This attribute contains the signature algorithm used by the log. */ publicKey: | PublicKey | undefined; /** * The unique identifier for this transparency log. * Represented as the SHA-256 hash of the log's public key, * calculated over the DER encoding of the key represented as * SubjectPublicKeyInfo. * See https://www.rfc-editor.org/rfc/rfc6962#section-3.2 */ logId: | LogId | undefined; /** * The checkpoint key identifier for the log used in a checkpoint. * Optional, not provided for logs that do not generate checkpoints. * For logs that do generate checkpoints, if not set, assume * log_id equals checkpoint_key_id. * Follows the specification described here * for ECDSA and Ed25519 signatures: * https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures * For RSA signatures, the key ID will match the ECDSA format, the * hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT * use RSA-signed checkpoints, since witnesses do not support * RSA signatures. * This is provided for convenience. Clients can also calculate the * checkpoint key ID given the log's public key. * SHOULD be set for logs generating Ed25519 signatures. * SHOULD be 4 bytes long, as a truncated hash. */ checkpointKeyId: LogId | undefined; } /** * CertificateAuthority enlists the information required to identify which * CA to use and perform signature verification. */ export interface CertificateAuthority { /** * The root certificate MUST be self-signed, and so the subject and * issuer are the same. */ subject: | DistinguishedName | undefined; /** * The URI identifies the certificate authority. * * It is RECOMMENDED that the URI is the base URL for the certificate * authority, that can be provided to any SDK/client provided * by the certificate authority to interact with the certificate * authority. */ uri: string; /** * The certificate chain for this CA. The last certificate in the chain * MUST be the trust anchor. The trust anchor MAY be a self-signed root * CA certificate or MAY be an intermediate CA certificate. */ certChain: | X509CertificateChain | undefined; /** * The time the *entire* chain was valid. This is at max the * longest interval when *all* certificates in the chain were valid, * but it MAY be shorter. Clients MUST check timestamps against *both* * the `valid_for` time range *and* the entire certificate chain. * * The TimeRange should be considered valid *inclusive* of the * endpoints. */ validFor: TimeRange | undefined; } /** * TrustedRoot describes the client's complete set of trusted entities. * How the TrustedRoot is populated is not specified, but can be a * combination of many sources such as TUF repositories, files on disk etc. * * The TrustedRoot is not meant to be used for any artifact verification, only * to capture the complete/global set of trusted verification materials. * When verifying an artifact, based on the artifact and policies, a selection * of keys/authorities are expected to be extracted and provided to the * verification function. This way the set of keys/authorities can be kept to * a minimal set by the policy to gain better control over what signatures * that are allowed. * * The embedded transparency logs, CT logs, CAs and TSAs MUST include any * previously used instance -- otherwise signatures made in the past cannot * be verified. * * All the listed instances SHOULD be sorted by the 'valid_for' in ascending * order, that is, the oldest instance first. Only the last instance is * allowed to have their 'end' timestamp unset. All previous instances MUST * have a closed interval of validity. The last instance MAY have a closed * interval. Clients MUST accept instances that overlaps in time, if not * clients may experience problems during rotations of verification * materials. * * To be able to manage planned rotations of either transparency logs or * certificate authorities, clienst MUST accept lists of instances where * the last instance have a 'valid_for' that belongs to the future. * This should not be a problem as clients SHOULD first seek the trust root * for a suitable instance before creating a per artifact trust root (that * is, a sub-set of the complete trust root) that is used for verification. */ export interface TrustedRoot { /** * MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json * when encoded as JSON. * Clients MUST be able to process and parse content with the media * type defined in the old format: * application/vnd.dev.sigstore.trustedroot+json;version=0.1 */ mediaType: string; /** A set of trusted Rekor servers. */ tlogs: TransparencyLogInstance[]; /** * A set of trusted certificate authorities (e.g Fulcio), and any * intermediate certificates they provide. * If a CA is issuing multiple intermediate certificate, each * combination shall be represented as separate chain. I.e, a single * root cert may appear in multiple chains but with different * intermediate and/or leaf certificates. * The certificates are intended to be used for verifying artifact * signatures. */ certificateAuthorities: CertificateAuthority[]; /** A set of trusted certificate transparency logs. */ ctlogs: TransparencyLogInstance[]; /** A set of trusted timestamping authorities. */ timestampAuthorities: CertificateAuthority[]; } /** * SigningConfig represents the trusted entities/state needed by Sigstore * signing. In particular, it primarily contains service URLs that a Sigstore * signer may need to connect to for the online aspects of signing. */ export interface SigningConfig { /** * MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json * Clients MAY choose to also support * application/vnd.dev.sigstore.signingconfig.v0.1+json */ mediaType: string; /** * URLs to Fulcio-compatible CAs, capable of receiving * Certificate Signing Requests (CSRs) and responding with * issued certificates. * * These URLs MUST be the "base" URL for the CAs, which clients * should construct an appropriate CSR endpoint on top of. * For example, if a CA URL is `https://example.com/ca`, then * the client MAY construct the CSR endpoint as * `https://example.com/ca/api/v2/signingCert`. * * Clients MUST select only one Service with the highest API version * that the client is compatible with, that is within its * validity period, and has the newest validity start date. * Client SHOULD select the first Service that meets this requirement. * All listed Services SHOULD be sorted by the `valid_for` window in * descending order, with the newest instance first. */ caUrls: Service[]; /** * URLs to OpenID Connect identity providers. * * These URLs MUST be the "base" URLs for the OIDC IdPs, which clients * should perform well-known OpenID Connect discovery against. * * Clients MUST select only one Service with the highest API version * that the client is compatible with, that is within its * validity period, and has the newest validity start date. * Client SHOULD select the first Service that meets this requirement. * All listed Services SHOULD be sorted by the `valid_for` window in * descending order, with the newest instance first. */ oidcUrls: Service[]; /** * URLs to Rekor transparency logs. * * These URL MUST be the "base" URLs for the transparency logs, * which clients should construct appropriate API endpoints on top of. * * Clients MUST select Services with the highest API version * that the client is compatible with, that are within its * validity period, and have the newest validity start dates. * All listed Services SHOULD be sorted by the `valid_for` window in * descending order, with the newest instance first. * * Clients MUST select Services based on the selector value of * `rekor_tlog_config`. */ rekorTlogUrls: Service[]; /** * Specifies how a client should select the set of Rekor transparency * logs to write to. */ rekorTlogConfig: | ServiceConfiguration | undefined; /** * URLs to RFC 3161 Time Stamping Authorities (TSA). * * These URLs MUST be the *full* URL for the TSA, meaning that it * should be suitable for submitting Time Stamp Requests (TSRs) to * via HTTP, per RFC 3161. * * Clients MUST select Services with the highest API version * that the client is compatible with, that are within its * validity period, and have the newest validity start dates. * All listed Services SHOULD be sorted by the `valid_for` window in * descending order, with the newest instance first. * * Clients MUST select Services based on the selector value of * `tsa_config`. */ tsaUrls: Service[]; /** * Specifies how a client should select the set of TSAs to request * signed timestamps from. */ tsaConfig: ServiceConfiguration | undefined; } /** * Service represents an instance of a service that is a part of Sigstore infrastructure. * Clients MUST use the API version hint to determine the service with the * highest API version that the client is compatible with. Clients MUST also * only connect to services within the specified validity period and that has the * newest validity start date. */ export interface Service { /** URL of the service. MUST include scheme and authority. MAY include path. */ url: string; /** * Specifies the major API version. A value of 0 represents a service that * has not yet been released. */ majorApiVersion: number; /** * Validity period of a service. A service that has only a start date * SHOULD be considered the most recent instance of that service, but * the client MUST NOT assume there is only one valid instance. * The TimeRange MUST be considered valid *inclusive* of the * endpoints. */ validFor: TimeRange | undefined; } /** * ServiceConfiguration specifies how a client should select a set of * Services to connect to, along with a count when a specific number * of Services is requested. */ export interface ServiceConfiguration { /** How a client should select a set of Services to connect to. */ selector: ServiceSelector; /** * count specifies the number of Services the client should use. * Only used when selector is set to EXACT, and count MUST be greater * than 0. count MUST be less than or equal to the number of Services. */ count: number; } /** * ClientTrustConfig describes the complete state needed by a client * to perform both signing and verification operations against a particular * instance of Sigstore. */ export interface ClientTrustConfig { /** MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json */ mediaType: string; /** The root of trust, which MUST be present. */ trustedRoot: | TrustedRoot | undefined; /** Configuration for signing clients, which MUST be present. */ signingConfig: SigningConfig | undefined; } export const TransparencyLogInstance: MessageFns = { fromJSON(object: any): TransparencyLogInstance { return { baseUrl: isSet(object.baseUrl) ? globalThis.String(object.baseUrl) : "", hashAlgorithm: isSet(object.hashAlgorithm) ? hashAlgorithmFromJSON(object.hashAlgorithm) : 0, publicKey: isSet(object.publicKey) ? PublicKey.fromJSON(object.publicKey) : undefined, logId: isSet(object.logId) ? LogId.fromJSON(object.logId) : undefined, checkpointKeyId: isSet(object.checkpointKeyId) ? LogId.fromJSON(object.checkpointKeyId) : undefined, }; }, toJSON(message: TransparencyLogInstance): unknown { const obj: any = {}; if (message.baseUrl !== "") { obj.baseUrl = message.baseUrl; } if (message.hashAlgorithm !== 0) { obj.hashAlgorithm = hashAlgorithmToJSON(message.hashAlgorithm); } if (message.publicKey !== undefined) { obj.publicKey = PublicKey.toJSON(message.publicKey); } if (message.logId !== undefined) { obj.logId = LogId.toJSON(message.logId); } if (message.checkpointKeyId !== undefined) { obj.checkpointKeyId = LogId.toJSON(message.checkpointKeyId); } return obj; }, }; export const CertificateAuthority: MessageFns = { fromJSON(object: any): CertificateAuthority { return { subject: isSet(object.subject) ? DistinguishedName.fromJSON(object.subject) : undefined, uri: isSet(object.uri) ? globalThis.String(object.uri) : "", certChain: isSet(object.certChain) ? X509CertificateChain.fromJSON(object.certChain) : undefined, validFor: isSet(object.validFor) ? TimeRange.fromJSON(object.validFor) : undefined, }; }, toJSON(message: CertificateAuthority): unknown { const obj: any = {}; if (message.subject !== undefined) { obj.subject = DistinguishedName.toJSON(message.subject); } if (message.uri !== "") { obj.uri = message.uri; } if (message.certChain !== undefined) { obj.certChain = X509CertificateChain.toJSON(message.certChain); } if (message.validFor !== undefined) { obj.validFor = TimeRange.toJSON(message.validFor); } return obj; }, }; export const TrustedRoot: MessageFns = { fromJSON(object: any): TrustedRoot { return { mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", tlogs: globalThis.Array.isArray(object?.tlogs) ? object.tlogs.map((e: any) => TransparencyLogInstance.fromJSON(e)) : [], certificateAuthorities: globalThis.Array.isArray(object?.certificateAuthorities) ? object.certificateAuthorities.map((e: any) => CertificateAuthority.fromJSON(e)) : [], ctlogs: globalThis.Array.isArray(object?.ctlogs) ? object.ctlogs.map((e: any) => TransparencyLogInstance.fromJSON(e)) : [], timestampAuthorities: globalThis.Array.isArray(object?.timestampAuthorities) ? object.timestampAuthorities.map((e: any) => CertificateAuthority.fromJSON(e)) : [], }; }, toJSON(message: TrustedRoot): unknown { const obj: any = {}; if (message.mediaType !== "") { obj.mediaType = message.mediaType; } if (message.tlogs?.length) { obj.tlogs = message.tlogs.map((e) => TransparencyLogInstance.toJSON(e)); } if (message.certificateAuthorities?.length) { obj.certificateAuthorities = message.certificateAuthorities.map((e) => CertificateAuthority.toJSON(e)); } if (message.ctlogs?.length) { obj.ctlogs = message.ctlogs.map((e) => TransparencyLogInstance.toJSON(e)); } if (message.timestampAuthorities?.length) { obj.timestampAuthorities = message.timestampAuthorities.map((e) => CertificateAuthority.toJSON(e)); } return obj; }, }; export const SigningConfig: MessageFns = { fromJSON(object: any): SigningConfig { return { mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", caUrls: globalThis.Array.isArray(object?.caUrls) ? object.caUrls.map((e: any) => Service.fromJSON(e)) : [], oidcUrls: globalThis.Array.isArray(object?.oidcUrls) ? object.oidcUrls.map((e: any) => Service.fromJSON(e)) : [], rekorTlogUrls: globalThis.Array.isArray(object?.rekorTlogUrls) ? object.rekorTlogUrls.map((e: any) => Service.fromJSON(e)) : [], rekorTlogConfig: isSet(object.rekorTlogConfig) ? ServiceConfiguration.fromJSON(object.rekorTlogConfig) : undefined, tsaUrls: globalThis.Array.isArray(object?.tsaUrls) ? object.tsaUrls.map((e: any) => Service.fromJSON(e)) : [], tsaConfig: isSet(object.tsaConfig) ? ServiceConfiguration.fromJSON(object.tsaConfig) : undefined, }; }, toJSON(message: SigningConfig): unknown { const obj: any = {}; if (message.mediaType !== "") { obj.mediaType = message.mediaType; } if (message.caUrls?.length) { obj.caUrls = message.caUrls.map((e) => Service.toJSON(e)); } if (message.oidcUrls?.length) { obj.oidcUrls = message.oidcUrls.map((e) => Service.toJSON(e)); } if (message.rekorTlogUrls?.length) { obj.rekorTlogUrls = message.rekorTlogUrls.map((e) => Service.toJSON(e)); } if (message.rekorTlogConfig !== undefined) { obj.rekorTlogConfig = ServiceConfiguration.toJSON(message.rekorTlogConfig); } if (message.tsaUrls?.length) { obj.tsaUrls = message.tsaUrls.map((e) => Service.toJSON(e)); } if (message.tsaConfig !== undefined) { obj.tsaConfig = ServiceConfiguration.toJSON(message.tsaConfig); } return obj; }, }; export const Service: MessageFns = { fromJSON(object: any): Service { return { url: isSet(object.url) ? globalThis.String(object.url) : "", majorApiVersion: isSet(object.majorApiVersion) ? globalThis.Number(object.majorApiVersion) : 0, validFor: isSet(object.validFor) ? TimeRange.fromJSON(object.validFor) : undefined, }; }, toJSON(message: Service): unknown { const obj: any = {}; if (message.url !== "") { obj.url = message.url; } if (message.majorApiVersion !== 0) { obj.majorApiVersion = Math.round(message.majorApiVersion); } if (message.validFor !== undefined) { obj.validFor = TimeRange.toJSON(message.validFor); } return obj; }, }; export const ServiceConfiguration: MessageFns = { fromJSON(object: any): ServiceConfiguration { return { selector: isSet(object.selector) ? serviceSelectorFromJSON(object.selector) : 0, count: isSet(object.count) ? globalThis.Number(object.count) : 0, }; }, toJSON(message: ServiceConfiguration): unknown { const obj: any = {}; if (message.selector !== 0) { obj.selector = serviceSelectorToJSON(message.selector); } if (message.count !== 0) { obj.count = Math.round(message.count); } return obj; }, }; export const ClientTrustConfig: MessageFns = { fromJSON(object: any): ClientTrustConfig { return { mediaType: isSet(object.mediaType) ? globalThis.String(object.mediaType) : "", trustedRoot: isSet(object.trustedRoot) ? TrustedRoot.fromJSON(object.trustedRoot) : undefined, signingConfig: isSet(object.signingConfig) ? SigningConfig.fromJSON(object.signingConfig) : undefined, }; }, toJSON(message: ClientTrustConfig): unknown { const obj: any = {}; if (message.mediaType !== "") { obj.mediaType = message.mediaType; } if (message.trustedRoot !== undefined) { obj.trustedRoot = TrustedRoot.toJSON(message.trustedRoot); } if (message.signingConfig !== undefined) { obj.signingConfig = SigningConfig.toJSON(message.signingConfig); } return obj; }, }; function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/__generated__/sigstore_verification.ts000066400000000000000000000415451477352757300303740ustar00rootroot00000000000000// Code generated by protoc-gen-ts_proto. DO NOT EDIT. // versions: // protoc-gen-ts_proto v2.6.1 // protoc v5.29.4 // source: sigstore_verification.proto /* eslint-disable */ import { Bundle } from "./sigstore_bundle"; import { HashOutput, ObjectIdentifierValuePair, PublicKey, SubjectAlternativeName } from "./sigstore_common"; import { TrustedRoot } from "./sigstore_trustroot"; /** The identity of a X.509 Certificate signer. */ export interface CertificateIdentity { /** The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) */ issuer: string; san: | SubjectAlternativeName | undefined; /** * An unordered list of OIDs that must be verified. * All OID/values provided in this list MUST exactly match against * the values in the certificate for verification to be successful. */ oids: ObjectIdentifierValuePair[]; } export interface CertificateIdentities { identities: CertificateIdentity[]; } export interface PublicKeyIdentities { publicKeys: PublicKey[]; } /** * A light-weight set of options/policies for identifying trusted signers, * used during verification of a single artifact. */ export interface ArtifactVerificationOptions { /** * At least one identity MUST be provided. Providing zero identities * is an error. If at least one provided identity is found as a * signer, the verification is considered successful. */ signers?: | { $case: "certificateIdentities"; certificateIdentities: CertificateIdentities } | // /** * To simplify verification implementation, the logic for * bundle verification should be implemented as a * higher-order function, where one of argument should be an * interface over the set of trusted public keys, like this: * `Verify(bytes artifact, bytes signature, string key_id)`. * This way the caller is in full control of mapping the * identified (or hinted) key in the bundle to one of the * trusted keys, as this process is inherently application * specific. */ { $case: "publicKeys"; publicKeys: PublicKeyIdentities } | undefined; /** * Optional options for artifact transparency log verification. * If none is provided, the default verification options are: * Threshold: 1 * Online verification: false * Disable: false */ tlogOptions?: | ArtifactVerificationOptions_TlogOptions | undefined; /** * Optional options for certificate transparency log verification. * If none is provided, the default verification options are: * Threshold: 1 * Disable: false */ ctlogOptions?: | ArtifactVerificationOptions_CtlogOptions | undefined; /** * Optional options for certificate signed timestamp verification. * If none is provided, the default verification options are: * Threshold: 0 * Disable: true */ tsaOptions?: | ArtifactVerificationOptions_TimestampAuthorityOptions | undefined; /** * Optional options for integrated timestamp verification. * If none is provided, the default verification options are: * Threshold: 0 * Disable: true */ integratedTsOptions?: | ArtifactVerificationOptions_TlogIntegratedTimestampOptions | undefined; /** * Optional options for observed timestamp verification. * If none is provided, the default verification options are: * Threshold 1 * Disable: false */ observerOptions?: ArtifactVerificationOptions_ObserverTimestampOptions | undefined; } export interface ArtifactVerificationOptions_TlogOptions { /** Number of transparency logs the entry must appear on. */ threshold: number; /** Perform an online inclusion proof. */ performOnlineVerification: boolean; /** Disable verification for transparency logs. */ disable: boolean; } export interface ArtifactVerificationOptions_CtlogOptions { /** * The number of ct transparency logs the certificate must * appear on. */ threshold: number; /** Disable ct transparency log verification */ disable: boolean; } export interface ArtifactVerificationOptions_TimestampAuthorityOptions { /** The number of signed timestamps that are expected. */ threshold: number; /** Disable signed timestamp verification. */ disable: boolean; } export interface ArtifactVerificationOptions_TlogIntegratedTimestampOptions { /** The number of integrated timestamps that are expected. */ threshold: number; /** Disable integrated timestamp verification. */ disable: boolean; } export interface ArtifactVerificationOptions_ObserverTimestampOptions { /** * The number of external observers of the timestamp. * This is a union of RFC3161 signed timestamps, and * integrated timestamps from a transparency log, that * could include additional timestamp sources in the * future. */ threshold: number; /** Disable observer timestamp verification. */ disable: boolean; } export interface Artifact { data?: | // /** Location of the artifact */ { $case: "artifactUri"; artifactUri: string } | // /** The raw bytes of the artifact */ { $case: "artifact"; artifact: Buffer } | // /** * Digest of the artifact. SHOULD NOT be used when verifying an * in-toto attestation as the subject digest cannot be * reconstructed. This option will not work with Ed25519 * signatures, use Ed25519Ph or another algorithm instead. */ { $case: "artifactDigest"; artifactDigest: HashOutput } | undefined; } /** * Input captures all that is needed to call the bundle verification method, * to verify a single artifact referenced by the bundle. */ export interface Input { /** * The verification materials provided during a bundle verification. * The running process is usually preloaded with a "global" * dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to * verifying an artifact (i.e a bundle), and/or based on current * policy, some selection is expected to happen, to filter out the * exact certificate authority to use, which transparency logs are * relevant etc. The result should b ecaptured in the * `artifact_trust_root`. */ artifactTrustRoot: TrustedRoot | undefined; artifactVerificationOptions: ArtifactVerificationOptions | undefined; bundle: | Bundle | undefined; /** * If the bundle contains a message signature, the artifact must be * provided. */ artifact?: Artifact | undefined; } export const CertificateIdentity: MessageFns = { fromJSON(object: any): CertificateIdentity { return { issuer: isSet(object.issuer) ? globalThis.String(object.issuer) : "", san: isSet(object.san) ? SubjectAlternativeName.fromJSON(object.san) : undefined, oids: globalThis.Array.isArray(object?.oids) ? object.oids.map((e: any) => ObjectIdentifierValuePair.fromJSON(e)) : [], }; }, toJSON(message: CertificateIdentity): unknown { const obj: any = {}; if (message.issuer !== "") { obj.issuer = message.issuer; } if (message.san !== undefined) { obj.san = SubjectAlternativeName.toJSON(message.san); } if (message.oids?.length) { obj.oids = message.oids.map((e) => ObjectIdentifierValuePair.toJSON(e)); } return obj; }, }; export const CertificateIdentities: MessageFns = { fromJSON(object: any): CertificateIdentities { return { identities: globalThis.Array.isArray(object?.identities) ? object.identities.map((e: any) => CertificateIdentity.fromJSON(e)) : [], }; }, toJSON(message: CertificateIdentities): unknown { const obj: any = {}; if (message.identities?.length) { obj.identities = message.identities.map((e) => CertificateIdentity.toJSON(e)); } return obj; }, }; export const PublicKeyIdentities: MessageFns = { fromJSON(object: any): PublicKeyIdentities { return { publicKeys: globalThis.Array.isArray(object?.publicKeys) ? object.publicKeys.map((e: any) => PublicKey.fromJSON(e)) : [], }; }, toJSON(message: PublicKeyIdentities): unknown { const obj: any = {}; if (message.publicKeys?.length) { obj.publicKeys = message.publicKeys.map((e) => PublicKey.toJSON(e)); } return obj; }, }; export const ArtifactVerificationOptions: MessageFns = { fromJSON(object: any): ArtifactVerificationOptions { return { signers: isSet(object.certificateIdentities) ? { $case: "certificateIdentities", certificateIdentities: CertificateIdentities.fromJSON(object.certificateIdentities), } : isSet(object.publicKeys) ? { $case: "publicKeys", publicKeys: PublicKeyIdentities.fromJSON(object.publicKeys) } : undefined, tlogOptions: isSet(object.tlogOptions) ? ArtifactVerificationOptions_TlogOptions.fromJSON(object.tlogOptions) : undefined, ctlogOptions: isSet(object.ctlogOptions) ? ArtifactVerificationOptions_CtlogOptions.fromJSON(object.ctlogOptions) : undefined, tsaOptions: isSet(object.tsaOptions) ? ArtifactVerificationOptions_TimestampAuthorityOptions.fromJSON(object.tsaOptions) : undefined, integratedTsOptions: isSet(object.integratedTsOptions) ? ArtifactVerificationOptions_TlogIntegratedTimestampOptions.fromJSON(object.integratedTsOptions) : undefined, observerOptions: isSet(object.observerOptions) ? ArtifactVerificationOptions_ObserverTimestampOptions.fromJSON(object.observerOptions) : undefined, }; }, toJSON(message: ArtifactVerificationOptions): unknown { const obj: any = {}; if (message.signers?.$case === "certificateIdentities") { obj.certificateIdentities = CertificateIdentities.toJSON(message.signers.certificateIdentities); } else if (message.signers?.$case === "publicKeys") { obj.publicKeys = PublicKeyIdentities.toJSON(message.signers.publicKeys); } if (message.tlogOptions !== undefined) { obj.tlogOptions = ArtifactVerificationOptions_TlogOptions.toJSON(message.tlogOptions); } if (message.ctlogOptions !== undefined) { obj.ctlogOptions = ArtifactVerificationOptions_CtlogOptions.toJSON(message.ctlogOptions); } if (message.tsaOptions !== undefined) { obj.tsaOptions = ArtifactVerificationOptions_TimestampAuthorityOptions.toJSON(message.tsaOptions); } if (message.integratedTsOptions !== undefined) { obj.integratedTsOptions = ArtifactVerificationOptions_TlogIntegratedTimestampOptions.toJSON( message.integratedTsOptions, ); } if (message.observerOptions !== undefined) { obj.observerOptions = ArtifactVerificationOptions_ObserverTimestampOptions.toJSON(message.observerOptions); } return obj; }, }; export const ArtifactVerificationOptions_TlogOptions: MessageFns = { fromJSON(object: any): ArtifactVerificationOptions_TlogOptions { return { threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, performOnlineVerification: isSet(object.performOnlineVerification) ? globalThis.Boolean(object.performOnlineVerification) : false, disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, }; }, toJSON(message: ArtifactVerificationOptions_TlogOptions): unknown { const obj: any = {}; if (message.threshold !== 0) { obj.threshold = Math.round(message.threshold); } if (message.performOnlineVerification !== false) { obj.performOnlineVerification = message.performOnlineVerification; } if (message.disable !== false) { obj.disable = message.disable; } return obj; }, }; export const ArtifactVerificationOptions_CtlogOptions: MessageFns = { fromJSON(object: any): ArtifactVerificationOptions_CtlogOptions { return { threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, }; }, toJSON(message: ArtifactVerificationOptions_CtlogOptions): unknown { const obj: any = {}; if (message.threshold !== 0) { obj.threshold = Math.round(message.threshold); } if (message.disable !== false) { obj.disable = message.disable; } return obj; }, }; export const ArtifactVerificationOptions_TimestampAuthorityOptions: MessageFns< ArtifactVerificationOptions_TimestampAuthorityOptions > = { fromJSON(object: any): ArtifactVerificationOptions_TimestampAuthorityOptions { return { threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, }; }, toJSON(message: ArtifactVerificationOptions_TimestampAuthorityOptions): unknown { const obj: any = {}; if (message.threshold !== 0) { obj.threshold = Math.round(message.threshold); } if (message.disable !== false) { obj.disable = message.disable; } return obj; }, }; export const ArtifactVerificationOptions_TlogIntegratedTimestampOptions: MessageFns< ArtifactVerificationOptions_TlogIntegratedTimestampOptions > = { fromJSON(object: any): ArtifactVerificationOptions_TlogIntegratedTimestampOptions { return { threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, }; }, toJSON(message: ArtifactVerificationOptions_TlogIntegratedTimestampOptions): unknown { const obj: any = {}; if (message.threshold !== 0) { obj.threshold = Math.round(message.threshold); } if (message.disable !== false) { obj.disable = message.disable; } return obj; }, }; export const ArtifactVerificationOptions_ObserverTimestampOptions: MessageFns< ArtifactVerificationOptions_ObserverTimestampOptions > = { fromJSON(object: any): ArtifactVerificationOptions_ObserverTimestampOptions { return { threshold: isSet(object.threshold) ? globalThis.Number(object.threshold) : 0, disable: isSet(object.disable) ? globalThis.Boolean(object.disable) : false, }; }, toJSON(message: ArtifactVerificationOptions_ObserverTimestampOptions): unknown { const obj: any = {}; if (message.threshold !== 0) { obj.threshold = Math.round(message.threshold); } if (message.disable !== false) { obj.disable = message.disable; } return obj; }, }; export const Artifact: MessageFns = { fromJSON(object: any): Artifact { return { data: isSet(object.artifactUri) ? { $case: "artifactUri", artifactUri: globalThis.String(object.artifactUri) } : isSet(object.artifact) ? { $case: "artifact", artifact: Buffer.from(bytesFromBase64(object.artifact)) } : isSet(object.artifactDigest) ? { $case: "artifactDigest", artifactDigest: HashOutput.fromJSON(object.artifactDigest) } : undefined, }; }, toJSON(message: Artifact): unknown { const obj: any = {}; if (message.data?.$case === "artifactUri") { obj.artifactUri = message.data.artifactUri; } else if (message.data?.$case === "artifact") { obj.artifact = base64FromBytes(message.data.artifact); } else if (message.data?.$case === "artifactDigest") { obj.artifactDigest = HashOutput.toJSON(message.data.artifactDigest); } return obj; }, }; export const Input: MessageFns = { fromJSON(object: any): Input { return { artifactTrustRoot: isSet(object.artifactTrustRoot) ? TrustedRoot.fromJSON(object.artifactTrustRoot) : undefined, artifactVerificationOptions: isSet(object.artifactVerificationOptions) ? ArtifactVerificationOptions.fromJSON(object.artifactVerificationOptions) : undefined, bundle: isSet(object.bundle) ? Bundle.fromJSON(object.bundle) : undefined, artifact: isSet(object.artifact) ? Artifact.fromJSON(object.artifact) : undefined, }; }, toJSON(message: Input): unknown { const obj: any = {}; if (message.artifactTrustRoot !== undefined) { obj.artifactTrustRoot = TrustedRoot.toJSON(message.artifactTrustRoot); } if (message.artifactVerificationOptions !== undefined) { obj.artifactVerificationOptions = ArtifactVerificationOptions.toJSON(message.artifactVerificationOptions); } if (message.bundle !== undefined) { obj.bundle = Bundle.toJSON(message.bundle); } if (message.artifact !== undefined) { obj.artifact = Artifact.toJSON(message.artifact); } return obj; }, }; function bytesFromBase64(b64: string): Uint8Array { return Uint8Array.from(globalThis.Buffer.from(b64, "base64")); } function base64FromBytes(arr: Uint8Array): string { return globalThis.Buffer.from(arr).toString("base64"); } function isSet(value: any): boolean { return value !== null && value !== undefined; } interface MessageFns { fromJSON(object: any): T; toJSON(message: T): unknown; } protobuf-specs-0.4.1/gen/pb-typescript/src/index.ts000066400000000000000000000015371477352757300223450ustar00rootroot00000000000000/* Copyright 2023 The Sigstore Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ export * from './__generated__/envelope'; export * from './__generated__/sigstore_bundle'; export * from './__generated__/sigstore_common'; export * from './__generated__/sigstore_rekor'; export * from './__generated__/sigstore_trustroot'; export * from './__generated__/sigstore_verification'; protobuf-specs-0.4.1/gen/pb-typescript/tsconfig.json000066400000000000000000000003231477352757300225760ustar00rootroot00000000000000{ "extends": "@tsconfig/node18/tsconfig.json", "compilerOptions": { "declaration": true, "noImplicitAny": true, "outDir": "./dist" }, "include": ["src/**/*"], "exclude": ["node_modules"] } protobuf-specs-0.4.1/go.mod000066400000000000000000000004711477352757300156230ustar00rootroot00000000000000module github.com/sigstore/protobuf-specs go 1.22.0 require ( google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e google.golang.org/protobuf v1.36.6 ) require ( github.com/google/go-cmp v0.5.8 // indirect google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130 // indirect ) protobuf-specs-0.4.1/go.sum000066400000000000000000000014701477352757300156500ustar00rootroot00000000000000github.com/google/go-cmp v0.5.8 h1:e6P7q2lk1O+qJJb4BtCQXlK8vWEO8V1ZeuEdJNOqZyg= github.com/google/go-cmp v0.5.8/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130 h1:Au6te5hbKUV8pIYWHqOUZ1pva5qK/rwbIhoXEUB9Lu8= google.golang.org/genproto v0.0.0-20230706204954-ccb25ca9f130/go.mod h1:O9kGHb51iE/nOGvQaDUuadVYqovW56s5emA88lQnj6Y= google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e h1:z3vDksarJxsAKM5dmEGv0GHwE2hKJ096wZra71Vs4sw= google.golang.org/genproto/googleapis/api v0.0.0-20230726155614-23370e0ffb3e/go.mod h1:rsr7RhLuwsDKL7RmgDDCUc6yaGr1iqceVb5Wv6f6YvQ= google.golang.org/protobuf v1.36.6 h1:z1NpPI8ku2WgiWnf+t9wTPsn6eP1L7ksHUlkfLvd9xY= google.golang.org/protobuf v1.36.6/go.mod h1:jduwjTPXsFjZGTmRluh+L6NjiWu7pchiJ2/5YcXBHnY= protobuf-specs-0.4.1/java/000077500000000000000000000000001477352757300154345ustar00rootroot00000000000000protobuf-specs-0.4.1/java/.gitattributes000066400000000000000000000003261477352757300203300ustar00rootroot00000000000000# # https://help.github.com/articles/dealing-with-line-endings/ # # Linux start script should use lf /gradlew text eol=lf # These are Windows script files and should use crlf *.bat text eol=crlf protobuf-specs-0.4.1/java/.gitignore000066400000000000000000000000551477352757300174240ustar00rootroot00000000000000.gradle /build !gradle-wrapper.jar /.idea protobuf-specs-0.4.1/java/README.md000066400000000000000000000036721477352757300167230ustar00rootroot00000000000000# Java generator This directory contains the necessary build config for java code generation. The gradle build takes the protos defined in `../protos` and using the grade protobug plugin will generate a single jar. To generate a jar from the protobuf spec run ``` ./gradlew assemble ``` A jar file will be created at `./build/libs/protobuf-specs-SNAPSHOT.jar` ## Releasing ### Generate Release artifacts 1. On creation of a tag in the style `release/java/v1.2.3`, new artifacts will be built and uploaded to a github release `release/java/v1.2.3` 1. Once a release is created, check it and remove the draft label on the github release page. 1. On a machine with your pgp key, `gpg`, `bash` and `cosign`, go to `protobuf-specs/java/scripts` 1. Run `./sign_and_bundle_release.sh v1.2.3` to generate a release bundle for `release/java/v1.2.3` ### Publish on Maven Central 1. Log into https://s01.oss.sonatype.org with credentials that have permissions to upload to `dev.sigstore` 1. Take the release bundle, `release_java_v1.2.3/protobuf-specs-1.2.3-bundle.jar` and upload via the `Staging Upload -> (Upload Mode) Artifact Bundle` 1. Once the bundle is validated and checked, release it via `Staging Repositories`, if any issues occur, drop it and fix the issues before restarting the release process. ## How do I get permissions to upload to Maven Central - Create an account: https://central.sonatype.org/publish/publish-guide/ - Request permissions to publish to dev.sigstore on JIRA ([example](https://issues.sonatype.org/browse/OSSRH-83556)) and get [Bob](https://github.com/bobcallaway) (or [Appu](https://github.com/loosebazooka) to signoff on it. ## Why is the gradle wrapper jar checked in? The file `gradle-wrapper.jar` is usually checked into java projects that are built with gradle. This file is validated by the gradle/wrapper-validation-action in the gradle-wrapper-validation.yml workflow. More info at: https://github.com/gradle/wrapper-validation-action protobuf-specs-0.4.1/java/build.gradle.kts000066400000000000000000000064671477352757300205300ustar00rootroot00000000000000plugins { `java-library` `maven-publish` id("dev.sigstore.sign") version "1.3.0" id("com.diffplug.spotless") version "7.0.2" `signing` } description = "Sigstore protobuf spec protos bundled into a jar" repositories { mavenCentral() } sourceSets { main { resources { srcDirs("../protos") } } } // gradle reproducible jar builds tasks.withType().configureEach { isPreserveFileTimestamps = false isReproducibleFileOrder = true } java { withJavadocJar() withSourcesJar() } spotless { kotlinGradle { target("*.gradle.kts") // default target for kotlinGradle ktlint() } format("misc") { target("*.md", ".gitignore", "**/*.yaml") trimTrailingWhitespace() indentWithSpaces() endWithNewline() } // we have no non-generated java code } val repoUrl = "https://github.com/sigstore/protobuf-specs" publishing { publications { create("proto") { artifactId = project.name from(components["java"]) pom { name.set( (project.findProperty("artifact.name") as? String) ?: project.name, ) description.set( project.provider { project.description }, ) inceptionYear.set("2022") url.set(repoUrl) organization { name.set("Sigstore") url.set("https://sigstore.dev") } developers { developer { organization.set("Sigstore authors") organizationUrl.set("https://sigstore.dev") } } issueManagement { system.set("GitHub Issues") url.set("$repoUrl/issues") } licenses { license { name.set("Apache-2.0") url.set("https://www.apache.org/licenses/LICENSE-2.0.txt") } } scm { connection.set("scm:git:$repoUrl.git") developerConnection.set("scm:git:$repoUrl.git") url.set(repoUrl) tag.set("HEAD") } } } } repositories { maven { name = "sonatype" url = uri("https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/") credentials(PasswordCredentials::class) } } } signing { val signingKey: String? by project val signingPassword: String? by project useInMemoryPgpKeys(signingKey, signingPassword) sign(publishing.publications["proto"]) } tasks.withType().configureEach { onlyIf("Is a release") { project.hasProperty("release") } onlyIf("PGP Signing is not skipped") { !project.hasProperty("skipPgpSigning") } } tasks.withType().configureEach { onlyIf("Is a release") { project.hasProperty("release") } onlyIf("Sigstore Signing is not skipped") { !project.hasProperty("skipSigstoreSigning") } } protobuf-specs-0.4.1/java/gradle.properties000066400000000000000000000000441477352757300210060ustar00rootroot00000000000000group=dev.sigstore version=SNAPSHOT protobuf-specs-0.4.1/java/gradle/000077500000000000000000000000001477352757300166725ustar00rootroot00000000000000protobuf-specs-0.4.1/java/gradle/wrapper/000077500000000000000000000000001477352757300203525ustar00rootroot00000000000000protobuf-specs-0.4.1/java/gradle/wrapper/gradle-wrapper.jar000066400000000000000000001246751477352757300240030ustar00rootroot00000000000000PK! META-INF/LICENSEUTÝZ[sÛ6~ϯÀhfgìFI»íî¶}Rc§U7•3’½™>B$(aC,@ZÖþú=Ü(ÉNöu=™Ö¢‰ƒƒsùÎwôJ|égÑËr¯Ä]ªÎ©W/¼ù/e6øvþ¶¿Én”ö(¾}ûö»g퇡ÿñÍ›Ãá0—´ÍÜØÝ›†·ro^áÂûÛõï±X݈ww«›åýònµïïÖâas[ˆõíÇõÝÍÃ;|\Ð[7ËÍýzùó>!ßÌŪu§PÎÍ_ymfþD3áö²iD«d'8é lë„ì*Qš®âU¢6VŒNªޚj,ñqáEổvƒÕÛŸ éD…[ªJlb£Jò È·fÜíÅÂÔðAÃ{¦[Õ §z{¦Xiú£Õ»ý Ì¡SV€J°PG!Çao¬þíçå\Z1ìå `Ó•°°ÛÑKÞ™j'qK¢Ï”;< i¯„,IJÐÌïz1^ð jåxk0è`MSiUøÐÒžŸŽ]ËJÓ¶¦ó’ü‹â ‡=Ëá çâ½±¤G?ÚÞ@Ä$«F‡ͼ”ʼn+}ÍKÍAÙÜgÁK¨„îø÷B F”œŽïy)ü'²€­ìäN¡óp_7–{¯X!{EÇïÓ¾’dç–9hŒ&r¥ArÛë%ÕºköÊ–(úêû·¹¦í ˜‡ ƒÀêèp“U.H‘[ÕJ ®œHÏôL.ÿÃŒ3qkñ7;»Î½ÿÐ&ºQ–y|xê ´Õ½[í<Å'¹å,Ô6°[ )éÕžFZoU­¬…åôך,þ·hM¥áh’²*8Xwe3’) EgÑèVãîàGgêá€áåhCpJÖ¹G‚¼~¡ù_ëÝhéïà–Feðq·ý7„¹ê²;ò3pÇØP~ÔÖ´ðÇr/;Ð:$DEçðMŠž4þc-¤`ó¸bz@/ãä˜6½Æ„2¤œ?æ"Î'ÎÑ NúÈèíPçn«*-Åpìóc2öó(à!iL8„‘–R@wá1ØtþX­¬H¥nä¶ ùŸáRhŠXJJ2âB@70¼á-/k2«¬-d¡ ­qPO²íagXÐaÎ ñÍEß+Øù ’©1‡ëd…eõ#XñQ 4ˆ›FîqÙþô^Û (¾•×Q*V¸F?DcnEîÂ\8ìu¹ÏÀœ5@ €Ì´êQ“+1ŠÁ4>O„ >ïæ<›¼0¬rÊA¤õ%lfJ X¦wºƒ]Î}~ŽÇ§êIúâÔ|ÞzÍÞw$ÞW «Z©c~ª^ZŠ´ £UV5Gȃî3n Ñ‚qÒÉV]§k"[Ë’ŠD‘ÕÈhÔ3¥Ð:ÊÔÉëïÊ}¿èñÓˆ)›í è.ÔÒ¨ ›ø„b¸òL$H2lZNù"KŠQßÀÖM€m7n;h#Âñ3Z¼LåîÅj‘DeÚã}«À˜5˜âyòòuÕ^Ìâ™f^×û˰H5€Öè…­l(Ž×uD>ÆÎ[_`äFWÉPh§Á¥d!û»âÅR±+ßþ%uƒ‹ ” -+Y‘ ¹£Tër‡š;*,!%ÕHÿ»+³•ȵr£ŒL¢ ³6Ú 8n9:ªò´cKxéiä'B¼TšÔS0Âô¬!á(®×åhFÉÛJû¡Ï&v(—rz×öC(¢È°#Áj¶{K‘çê|vžÂ'ü:;dà)On@ÄÇödS±e¶ â (£"$¥ó}R:õçñÓà¶¥{s¹F›¥Ñ·sñ Ò*Üö]<~`Vb3rqõ±z±™ÉÒ,GeURd! 3±8â@á”Àðz5€eBøô5ÕA#×èL÷š<ïàÄøñ5°»ÃÆÉe3_×VÁ' ÄîÑ”ägÕÜ÷¸aè¶`äXq|†t Îûq kÁЍ}#!ÐãЙK­£'žXä}[Nó#Y>ÛñB9'laý5sÐG‰ ûà+X¦ú ZŽ!P$PÐqCt-z>kæ= ë l/±¼ õѦ®‘çAP À/ÿÅØqÀeÏ fÂÉÐ죰«ìûÛMÓÓÉʈ]^µ²‘ìÍïf‡+’ܺ7;È^ç¤Õ”µô Ò¡öå‰审 6òàIdõ´ìtA8w¸¾Ú‚úLò¦Êù-èŠPëæbY£ÿc/ä©0¦£S½cäN⟠ä|ã~• VäÖÖ8÷š †Ç(͈ü‰?ƒç¥häÁzÀ£6jÇE,”Oœà_8ª ¬¸ó­v’S&çñ‚?Zbª †©Ø4e ͨϔÐh¤ó%/°*®˜¢è½+ÒÂVÁÃ|Ѻ ûÄŠ¡à»¹X«|24§­[yLÈvŠB€ƒ:p› ½ÀòÈ%Ha³@Žâ üßÄŠ 3 %¡ ½ßs†øunæÌßD¸•ŽC>è!RóŠ eªŽÏ-B¬ãd6ˆ¬*üÝb¿“Gd&%¨î-ô5™P°õ8"?õS8Þ¨*ÕUchë$b°pÿÜyŠidà0Ä3\L&šVAÏÄ<ÀާñdžyîÞ⢉RWA´•†õLN_™+Pˆ?G®2Žä4²Ö ˽ÀàÓhï•‹ÉîŠL}A›"¥MMÍâñ™V$ŸÎÅT"y¸u6ÍK œÝVMªpdÝ8K&*q4ËÄN夘8ä{jvüM÷ª‰º¹xè Š:ršz‚Jí/IÌ.Hâ|ãxÊ"³aV6Æzvt•˜>îx:Èaª·Í§ÏÿKkæi©™ ‹`êZ…ÛG^¿2.Š·7T_¶†›2LÛµwXFH57B9pªR|„i¹ÄoÄ삤`ÅØí §£À?ú ¡ŽL=©2ƒxÞh«vÒò½ÒiïáïþPˆCXÌxte9¦ÜÙÞ_¨1} ײŹYd48õRögúþ#èäc˜_A4‘’ÚT«þµ¿=‚îÀ'XÒÉ¥PøM‹×Ó¨ XxG ô®ˆMNjÏæ³!›‚ß|5¸PØRŸ‹í¨uÂKÛZ|þ v9Æ$ˆªnÜÀRç-V‚ò"5/i V$‡ùÜwIÕ+Ô‡§-jþ6Ž/'νƹ@þl±ËÍLü¼Ø,7Á¸Ÿ–÷¿Þ=Ü‹O‹õz±º_ÞnÄÝ:¿–¿{/«?Ä?—« ;šo€Ÿp:êÒI4áJ•ISÑœTœ:B“K¦¢†ÈžC,ó~yÿá¶«¯^/Wï×ËÕ/·¿ß®î ñûíúݯ åâçå‡åýBï—÷«Û }`áe|\¬ÁakññaýñnsËÕ–o ¼Yý{ØTÓ­ÝÌpW8 ðœ5½ÕHÏéÀ5D¾Bñ—7›—ò´Ñ9àDxÜ×Ú²;SêØ&3¨û{V𯿭çÍ,ÇÞ?æð9˜}Ðr«º<_bå@ºô`ð¨¡a'èv6j 7Y@C>2èÔ®ÑÀ¾Ju]ÄÛîb2Ê“Ÿ/Æûœé7zK„Ž”Ûá<"Þ[„-ü‚£ÛñËùÁè9)8” .k4mì'äZÙÊÝt†«ÃWÒ—\¯ðn=»}†„bËW H`x¦‹r^h@hœ¹Þ8®¶|gŽU<Öj¼5>mtÉšcĘ‘ŸèÎ;3ÃÕ|bpõâxÐ ÝØ1ÕA7ùìð3eÓ÷§„È FT¼–º-W#ÙÔc—È Á ßÁ[ Þܼ±r8‡HÐOq^F¦ËêQÓ%ií¿¾à¾ÜàÅsü0‹kZ! /î¼H…:KŠO{¤îÓt=½,|ñº-°Ðro OAiÒ9¹l§™+ð¶ZžÔ‘†²+¢ç1¨G¿#Åj;üjIˆ±Y› »0ÛÆO¡ˆ·¼AØAæËW-pÌß_造±ÁøÕ°âV2Œì™ Nç£o´tMv9·¿¡!®Œ@š`”ô%¦“nQ¢§IQ~&Œ=“®Ÿ1á9ßÉ6u´M¥jhWx0ãêÂè\Ú–(ëhŔΣµé¶ÌOŽ“¡+Çf•‡¨ÅùÜx{ôd#èˆH6dþEcF£.À·«¬«—¾÷ê¿PK°·£é ¾'PK! META-INF/MANIFEST.MFUTóMÌËLK-.Ñ K-*ÎÌϳR0Ô3àåòÌ-ÈIÍMÍ+I, ê†d–ä¤Z)¸%¦ä¤*„%¤ñrñrPKm±>=@?PK!1 org/gradle/cli/CommandLineArgumentException.classUTMOÍJ1ž´µ­µ^/sRÛíRëa­"HÑSO-xO³Ó46É.ÉnÄ>ˆoáIðàøPâ,(:ßÏü}~½Àì3xÙngÉ_¹F—ò1—KÞç2³¹6¢Ð™‹l–"ñ Š€$®Dˆä å:”6ðñR˜€}ž«ÈŠ<ÒÕ \\œ§‹y}òÛ¿,!"¬D4¬,Ni‡èµSÄnÐÚE|2 ’(Å ncЙg¥—x§ 2èe^ÅÊ‹Ô`,Ž'™µÂ¥SštãUiÑ·óêî4=ˆˆp*ž•®ÐÿéMÍ+ítqÍàðxúgÕY—'÷]hÃnZÐaИÐ0„‚U0JR©v @ yÚ{ƒ½×Gj êßPK“`zX!pPK!& org/gradle/cli/CommandLineOption.classUTeR]OA=…¥í (‚Ÿ¸~µ¥ËʇZÀøBü ©ÖXÁø2ݶÛÝfwK4Fþ‡þ_ÕˆMŒÏþ‡zw¡¶„—¹3wÎ=çΙûëÏ·æ°Ìð~oïyéVãæŽp-mI3·´¢fz¦tx(=Wox– ¼/ÁA—uèf]˜;A«hK[Ü DQkÚzƒ7uqˆÚâ‚U›'¬_j×oµ‡Aë³ĵ¥+„/]›²»ÂH‹ò¥™ù™’n‰]ííCªêµ|S<”Ž`˜ò|Û°}n9Â0i¬xw­21UšQ³  ÃÛ|—wm£RÛf¨ ŸAñbDÀ0ZŽ­P:ÆcÔ«"$#TîÛ­†põ×M’Ê”;,+‚¤-˜¾ŒyFºÕ0zA’¶ïµš2¬3ôß“® ï“`®K±,ƒp9¿ÎЛ˯«B&#¤xª+c)d1¢bÉ$úp–a°#ºîIKÁ$CbmóÙç‘Nâ.¨HE»>\R1xT8Eív WCáóš#h 2:…žÏ0žËw5ºzœ_Vq ×Ó¸Šm–÷ rä. ÅSñ*ŒŸõBEÓiäQ¤æÜ8=Öæîúbžánøµ#7Ì·,†lîtm¤²€Û‘AwhLlVÚœ=ñŽÎ'Vh1K~(4þ d"_iÇ"Ãâ¨â ÅLdÅÊ a˜Ö%:UÑ^Ц ›/0úÙÍŒïcâ3.îãòÿó•CÜd(OBgx‡Éíf~bþÉL¿âîÆ‡¿¿?±T ‹ÇŠŒb_`ãk+ö ÷PK×™n³PK!3 org/gradle/cli/CommandLineParser$AfterOptions.classUT•SmOÓP~.ʺ1@|+È6( â‡ÆILL0¢˜ñÅܶw]¡½]n;”ù!þ?h‚’hâðGO·–,¶É=·ç<Ï9OÏ=÷÷Ÿ¿9¢:7—ˆt=)„ò¤KÞ¡"ªEþòâòbÙtÄñqŒAß›ÊÏ<_0˜¡rK®âŽ/J¶ï•6 àÒ©P¦\EBͬ×b¡¶‰ðHC?ÃBOJÛlÇ<2ö„Á¨\HЂ;çÒ¬1 >ö¤?a˜Í÷†vúóÏ ;YèÈêÐ0œÅÒi `„a4à‡– 9*nÿÃD¾²ÇxÉçÒ-mÇIÏÖ » áü·Û×…yQâå–´žk ýãJOÖk¹/ÃwòYÃ覭g¯zK=/²ÝÒ)“¸FçÊÍPžöæi·þ_z†é^‚5Übȉ÷±âëÊmBÆ_»t3öüÒºRü°âEñZwp7Û˜fïÐ`0¤¸ã\€-kOØ1 @3˜Õqh 6è–1Œ$"6›%Ô+nùK4TÝu†±dÆh×O{Zóô5…úÈfŠÕÔ róß0úÉ3Fï•(G6õ¥>wbã¸Ú‰ÍQÙ‘Ÿ˜¬1:ÿ¶x‚ë_Z5 ´’Í´êßÀͩةš+V‰qŒ{óß1÷挣St€öi²¬•¾©¿PKd¢½ Z¶PK!< org/gradle/cli/CommandLineParser$BeforeFirstSubCommand.classUTµUkOÓP~ …Z”‹x¿ŒŠ ¶• SÌà-Ð8%&š³î°UzYÚ4F†‰~öh¢b$^¾™ø£Œo×AòÅ-k×ç<ï{ž÷rÞþøùé €QÜ`xõüùìSµÈõ%a—Ô U_TSªîXUÃä¾áØšå”á®0÷-V¸§é¡/y5ËS'¹é‰”Z-k¯jFàCÇÏ”ŠâºÙßö‹5Ó$À«pm$ ØeÃÂ5ì2¡ËÂõh/³Ùá¬VËê³609ïÔ\]\3LÁ0æ¸åtÙå%S¤uÓHO;–ÅíÒ yºÍ]O¸ýSbÑq‰íz~¾Vl¬Kha8i{«Ä;¹ÂÝ’÷¹/$´2´úÃë?Í ÎD¹Éû¼aþE†ëƒÑô¿u¸´Ž—šWІövì‚¢@Ænö0t86)týP7ÃÂàÌ#¾ÌÓ&·Ë鼤6·Š”ÔHĺPLÚ­Â]$t3Äw¦g>ˆiŸŒô2Ä¢¶‘p€a¯SßË›z:aè ×|ÃLßà^e–Ws áp;âCצe ÇšËÂgX/ôVñ‘Ð}JÓ&HÁ ÄdGß¶:Ãøà¶­ÚàÒµÛŠöCyà 3¬F7 3£›œÛº™!é·º‹äm­[ZÛ¢ ­c&­¼æwèg¸Ù²3±Œ­j¥ =Öme³ª©ÅÉÓ^Õv¸}ýÓ5{s¸/IºªË%6Ïj\Án>ƒlyâ¤+°2ÄâWéÇ7è`˜ï;`ý†©oCÚuéaï ­ÄLª–¡uvh%Á d#ª‘çÃ’ø!uDå]݈uÚ¶:×W(l×MÝÝÁpªiŽaÏêì‘Í-œæ~†@Óîæ~ X„„% ‹*Ä%aCU“¯ØÄr¬±õµ¨Æ*5¨o×É¢N¼)2d\#ÞB2àZñv=U¦evÚ™|–›.CW“Ï ¡š™X‚湦C™3 MTjáÖ¾±%½¾äânCuœa´Ô¢†3‡ý–®Ih%’úR{wʸA(ۉaáåÐ%l¡ÜÔ£î°GÕn7⦠¶âfú¯j•LiĽƒ‡xÚíhÞ/cÚ¥AAÎà¢6ohš#2nÁŽ Q}+CË•-‹)Øy$Í u^‘MB7ÃöNSáÙœ;¦)TFUGÉÙÖˆ®qM²l¥Ð}Qƒ|+~ã*×;°“8!•¬JùÞV!ß*R®%cîLÃbÕxe¹'ˆ.Ķt_¢YÜQLËU\õ0WTs:&BÚC‰¨Ún?âG ’îì±{ù¤<Ý}"OI†›fÍKBwÂæ!y+ Ç?ÐD¾ê¹pi«Š‚èÇ=5ÃÝ;Ug¦™â=ÖïÈ2ª»ÃŠÆ´­{ÒvO\ƒ Ë.§¹+¯·%ÜÄ}XAc·Ä¡¡RÞ€*ºjÊ@Íåh[2D+¶ý.#¸¸oˆ¡ÆµŠeqSÅ2†.tÑ-s½ 1ºkfùP)³r ×úD:]cET‹JVeg8¡æ” §‚’T_v,a„Š‹’ɰ¡Òp(É8‚± Fñ™Ì†³8¡ŠÉÙÜ¡âðEN9Ø$ÎáQö1*þBx2ž²fg¸fÆ„Ô%<%òªi†ÁÐØTâ°Û2 B+v–hœgðlžÆs4Hý!.ãOb2.Çóµ8ŠUÅ‘ëYúÛæ%†‰¼áê4 §ëÚQF¹Í¯zö¼B£»ÜV]ËfXZ¬ï–Ý9Eü^PÞ  .?—ð1A_ob&È8}uxïP& .¯Ã齇? ½¿0Ôfl+Ÿ 6“ñ7ŸÇ÷Ë Áãòd§Šªo)QŠ.ñ^ÜTgp6ˆÛñ!qoó¬5BtþC,Sø'1é‹´Þbò>öú‰8§è¦OKª]Q=ùì ·ûÔAƒcm ‰>p«Q/¶9½Õ‹]î=i“{OÚãÞ“ö¾§IŒaý~æ}K$6„SVMaé,Oí™ÂÊðV·L`Mdk£XטÀza!šv$¥¢£z±Ò}cö-IæÓ3YjÜ7ë®0Eo8˜¨±÷ªîO:HùîKµ¦pÿ8Ò©öê¯Q×h¬žDf %‰¦V4&q8Yà)œ"º×ÅÉ|V‚¬{ZÆ‘Lâá¯p4•Ó¿Ç£ãxòsüq¼Û_Ø>Žé±òÀ)´à5¼|óÏbM…œ¼Z̉þÍxËçx—Q{5Fèí¯ _ak¹ŒŠœŸ½ø£ïñƒIœžÖŒ‡‹šÍ„q]O„tÿNhž$¥DÄWºø]4Rt× Ø"ÎN\üà*ÞÏ‘óïÉùú™,šXYV¢9Ú‰ú]$é§ö¦1(MRƒä©=ŽSƒ”Qù#‰Ù©„H¯*c ~™ßPKC'|¢L—PK!= org/gradle/cli/CommandLineParser$OptionAwareParserState.classUT…TkOÓP~”!Œ›€¨PA7¶2nʸ™ D‚_ÈY{(…¶#§BŒüƒÔpI4ñø£Œo(—%k“žÓç}Þ罜·ýýçÇ/£˜cø||¼šý¨¸¾'\CRõm5­êEgß²¹o]Í)‚p)lÁ=AÆîiúŽÐ÷¼’ã©SÛÜöDZÝ75‡ïkV ! “ãFaŒ¸2{å¿]²m¼®×´\!¤åš„éQ,³CcCYÍê§z0†èZ±$uñÚ²ÃDQšSrÃݶ2óEÇá®±DJ+\zBö/ï9ç>py‰¬ùÜ  éªÎ7<êôÿué–Àݸ&3ÍPçïX^ÿpöp{Ær-–áM¢:½zøä»¢hˆ ÷‰ň¡9 ñê L­ Í?**Túå†1´'–vùÏØÜ53k~p.ÓÉ÷ JbÎKjC©zÜ'¿Û]Dq¸OóàÅðQt£‡ZWtóE÷JüU%ñª_?jVoµúô2܇¾ä9i–áúV]ò-;““’-Yž?ƒŠ'ô¡Ÿ¡µAÁS†7Œ[Y.ì ݧÎÄ@2Šg¼›ÙJ¤)ÌòÊúâr~+Ÿ{»°µ’[__XÍ3t]KO SR]¾/¤K)!†á/g `”¡Þþ¼Í=ª²5‘¼–åHãxÅ^0hU›Û¦¨åódîÌd剋a*ŠIÐ …çéSgh Lù’SrlSè‡Sƒx0„@s<˜SBB`äßHÏ—ôÖƒ0!dÜÜL¢)tŽ–ô)Ú¾!¸âhGÇ%ó1iÕЪ¤Z:Ïðð mféYGkpÇñˆHeò ‰äþÁÍ´` u†ÔÆ š¾cdã ?1¹9H¦sÌ|ý§ÔMZµ´o#)´Qr„ô^Ä]”ú PK´”[£×JPK!8 org/gradle/cli/CommandLineParser$OptionParserState.classUT•PMO1细PJ9q誇¤bY =¤€Z$D¥( ¦Ê¡7ïîdãàõ®lo„„à‡ô_ôT©‡þ€þ¨ŠqDo>øß›yž™ß~þ€ØbðíîîsÿÆ yt…*ö½hâíxQ–æBr+2å§YŒÄk”È ’8實]™"5Þá„Kƒ;^žø)Ï}á<0|ÿ.{”«ûõ“BJ"Ì”ûû.E%B!j¡bç¨ ýE|··Û÷cœ{· ` š£¬Ðž ‰ 2‰æ±Ä ’"8ÍÒ”«x@N—\Ôo.r×óßÇÈr‹u¨0hÏøœ’«$¸gÙ:ÔÔŽ…ö„A¹Ó¯Bž5¡M•Î§î¸ U·2E>ÚñÚ~Ð ƒ½Nwðß6þiàˆfÈ•)*ËàcgðØÍȺ=Ù±• =çæÑ•Føºøˆ*s‰––U9¥Å3Xs&Ã" Qá¡ÄÊk¬îÔ€¹©éÞ¤×:!#¬¾ý+ßÞvòêRÞ&,-åçNfðjéA1-¹k°X699| |éxÊ*Ó]‚ò=PKu[z?¢}PK!3 org/gradle/cli/CommandLineParser$OptionString.classUTuR]OA=Ck[êji« ¢+J[º4ˆŒ’øD„ˆ”3»;ÝìWf·}1ò?ôøª ”D€?Êx·-ñ£5“ÌÞ9sÏ™{ÏÞ?¿~ðÃÇÓÓ×ÍwºÉ­áÛú¦nµõºn^(]ËÀ7¼À„+á  ºìðȰ:Â:‰º^¤o¶¹‰º:†ÇCC&Â|úÄ67(W5/ùí®ëu¸±ž¤øŽô…PÒwí Ñ[„7×6Öš†-zúûC~?è*K¼”®`0å4ÅmW4,W6¶Ïã¾½CJ{\EB-ï†IÍûq¢›Ešaæ˜÷xÃå¾ÓØ5…g‘aHqå0”v~_)[ ™` AÁ3éËø9ÃJeî|ÀÂ7,µjoKå èg˜íãQ•O(àÚŸðgâ2ÔiÏÐw¸RƒrR¿PK×é CPK!2 org/gradle/cli/CommandLineParser$ParserState.classUT…QËJ1½±µÕjµ¾W.\´Òqð±¨\( BQ±âÂ]fævÍ$%3-ˆè‡ø®~€%ÞÔú‚‚“ssÏɹɽoï/¯° ÎkwŽÏƒT¡³íM§ê:n ÉS¡•ë)oP"OÈOÜ …ÁMÒ‰g»Ée‚U§¹1o»Âz ¿µú¤5µ¯óÍŽ””HZÜ]³ …h„Š(ÛE“P-Ê×V7Vknˆ]ç~ƒBCwL€GB"ƒª6‘Jô)¼Ç\…ur:ã&A³ü ”§˜‡,ƒÒ5ïrOry§þ5ir r»B‰tA¦\¹‡-@ ²åãÊe†m\Šù­deÒӶ탹rýǯ‘ÚËïT®µú£» p²þïs> =j‡Á˜V'Z}•Út¥ÿÿZ–´ú%9T!uâ€ÆÆ`Ò&N:±æ‚û³KÔœ<Ø/ÌvŽÖ9ÚM2Âá•g{²|ÉÒã}z‘p¨O-Í`¾ïA1 j&¡70r²8 3=Õìw…boOÏ ­CùPKÉɉ§ÎPK!? org/gradle/cli/CommandLineParser$UnknownOptionParserState.classUT•SíNQ=—¶Ê –oWÔ¶t»|˜XÀ˜ ‰ÑØ€Å@LÌíîeYØæî5FÄgð‡&ø>”qn)Ò Iß{gæœ93³÷÷ŸŸ¿Ìa†áËÁÁËò'½Ê­=Øú¢nmëEÝ ýšëñØ ÃmA~)<Á#AÁÖŽ°ö¢ºé‹ÛÜ‹DQ¯9†Ïk†«8Duá¾]§\Y>Áo×=Ñ7fUJà¸Ò òî Q-ò—Kó¥²a‹}ýs7Cf=¬KKLžF¹ã GAáà]>ÇÓ&·Êé|é®Ð<ûåGÇó¹é\fd`z43>>0–cˆ ûàºg˜iG”ÅBz”{žp¬~:qŠ»äS’à^5\^2…ÎÀn3°k¾õÊbÁ“v“ŸëÜ­ŒðšôÀMÓž¿iÍZö¼•oœaØwÁ° ïC Ñ~+‚8†‚f†æm> ã0š#ˆà­&ìÅQ†ÐJ½áàÀF¦Y“‚Uc8ª ×p„žY ¾àq¯îú×ÝŽà]´„q'"c¿tÙÊÐ’˜ºüõTí~Æ´êÕ“ë«Ôôd{ï1Û…&ï3(v¡¥Ã!5¸éoß•âNãƒ0N¡-‚šd0íDOƒ\†ó‰É¼íÞ †‰÷ýšmyܰÜ!±ÈpdsPŽè—L¤Ð%ÉMSMS!œùOã4nSÐMèzÜñÜ Ã«lñµùú‡q牌*÷h:†îÍØl…;q¯.,Mì@ÉHãQÒ‹>IIÿœ¯‚\\¿Æà²,è%|Â߸nÐ4E™›§\¯ ËXЄOŽ‚+ “YnY¶§r]Wd«m§Ý6•»*·Ö,š\Z梺ʥÊÍZ…SWÐÌjªFépªèÒLªm©6ÿ1ÝÖÂU*áŒíP| ½;Ð5¹C5¶£"¸†O%¥×w!ÝŸœÏÂÈbˆ¡ïf$1~9Õy*§Œ›ahÍo:dÐ!Ó\_Tu1C}¥(ÿFõɯ’{c­‰üªe‡/ÒPˆî.1r:±{þþ! £ìoâV㘠IlÝmä^ c $Fн&,[E¨ ¤§)Üi"äÛô…¶|I‚bP¹gSËMlepÕNNJÐÂà ý‹nßW0CaÐg!'¼*hÙ2 † E†Ã‰öí9G0 S⪤Lµ:Ázv˜Ó7÷ʺ+59Ê÷èÊ,}nHeUrõjI8ãR¸q†ÄE¡^1©5ÀÁ˜@²4Km¥'ÃÛþ{€V¤ÉôïÒÛIÿˆ&‹+ˆ¾ÄáâÐ Ž$± !¼³ŽmÅ{(ºw'ƒžAÆ_ ñ Éø1:й þ‹ÚKÏ•ŽW—N´|‡¯’'ÎöŸãX,¸Œ—p#Œž]FÏÒ?")–‘y‚¦oìéë?_"[ þ ¥8ˆ Ñä ®`ø·-öÜ.öÑ ûX±8ÒñŸ¯`ò9¦—!†;~Æ]†'8ž¤iñï8—£ÀRËp&ž¾þ»ó{Ÿ1þÔõ·´~ìg ËþPKŒ™é¯cPK!& org/gradle/cli/ParsedCommandLine.classUTUÛvUþv“vÒéXh P@$DJÛ{ÓØÖ"ФE¢Ô@=LfvÒi'3qfÒË%Ëðä¸Åµjf©\yáò¼ôE¬ÿΓ¥ö"ÿß?ßÚß·w~ûëÇŸLÃdxòøñÝäWᜪíqKχµ|8ÖìbÉ0Uϰ­xÑÖ9ÅnrÕåôåŽêƵ®í¹å¢žÏ«¦ËcáR!^TKqCÔ๹Y=7CX'ÙÌÏ—M“«`Xœ;†U è>w\êEñääÌd2®óýð×09c—ß0LβB¢à¨ºÉši$ËõU»XT-=Eõ$øŽïªûjÂT­Bb3·Ë5OBÃ1»$ÖqWe<Ñ•áDª,{†™¸©º;iµ´À0Xr¸Ë-o³ï„e¸'`/Úû\ä=G]v å"eS`¸%oÙqÔG)Ù}‹†ex×Nw©qOÁ1 Édê˜SÂIÃ* ¿½8Ý¢bÎÈ8+P2êMJÝ{‹öê2ž„Œ‹"ã ÜÛ ÃãŽêÙŽ˜x¢eä[ø‚‚Q\Æ‚ßK˜`H5tDµíî+ˆ"6€â ~«>Ù¬ÝBUNà›ê$¿…ö: f.ÿ—DšØ+2®ŠÃ• ü5×Ãm 69QÄœŒYÌ·‰«®# ‹´S©L+$Ç;7èŒt]ó® BßcP¾,Û_¶ôÛ¶a1L·Šd9ç’Æ4oÕ6MÊ£™ÛªÕ"‰þgl¥l˜:'&Þ—±&¶¾FÔhÊ™tw>ÀMÁaO,ÀmRªZ*Ñ£ÀïìÒٸф¶I!-úl0°±î†<»yëÚynSp‘ò‘‚¬Ê¤<º3+;uc¡öûUµ_B à=ú=rˆŸÕÚ å6Ú,Ðú~² Ê;ÀùçØZ?À%2©\ “îû Rv{ÃÉø£™ÞX&˜?ǧÍBŸáóF¡+T¨‡ìD„£ÞÚK:ƒõ_ÑyVÏúE™u_4,D(¿‚Ý_j%XmÉøþPKë2w:áPK!, org/gradle/cli/ParsedCommandLineOption.classUTmÍJÃ@…ÏmÚmmuë" Q1†ª‹øƒ 7Áå4¹MÇN’2“DôA| "(ø>”8)¸ss¹÷œoΙïŸ/{Xexy~¾½Æ”ÅÞ‘ ½/ÊÓ‰¼yæ§yLFW$‰k2æˆk?Q4Öeª½£!—šv¼Iâ§|â‹*ƒ‡ñ`ß°*ü;?,¥4‚q¿[!Y"2"%²Ä¨SRÚì2z¸»¿ú1M½§:ƒs“—*¢s!‰a#WI(K ")‚+®4ŽókÜ«â".Up—Ê :M ;štx®’¶wx;å)lƒüb˜€I7ÃUúû“ð1zo¸#ì"p­]#ìâ¡kí+>n¹Ö·#bü/L»ÖÁœ!–]ëpλÖÑq]k¦@¦\‹¨üIkî9NüƒŸFX²~.bÛÜ¿_àìƒòKš%·e;ö+ÿâÊëB×5zN’rYuŒô•pe„V¢èJïPKù$OÿœPK!A org/gradle/internal/file/locking/ExclusiveFileAccessManager.classUTePMOÛ@}[LBRHiùî"ŒÅÇ!T©BåDUµHô¼^Oœ%ëu´kG ªüþž9¡ràÈUulzèf4ï½y3;îîìb]àçõõ×Ñ÷0‘jJ6 B5·BUä3md© åEJŒ;2$=19‘>RRS_å><Kãi+œeQ.g‘®=(y·Ÿ&{¬u£çþqe ~"£Zb3m‰œ¶£sržg1>ÚÞÛE)ÍÃKݳ¢rŠN´!ÃÂeqædj(Ö¶$g¥‰ÇLŦPS¶Š?^*Sy=o>(EÞ’Vfä´V/ä\ÆF²òsrAª °(°x¤­.ß ,llž÷°„Nºƒ\^%tl O_*M¥¹Xß8mLt7„L nž³ø?8ÀK¶ªËVÑYÆ kÿ–àuiV_9Àš@ë˜O…´yzý^@ÔËp|ÃÕ€³àÜÞbù¦tÐCÿ‰~ûD¯ ÐþÆ+_h}»a°Å¢>^3É_l|þPKyµwʇPK!> org/gradle/util/internal/WrapperDistributionUrlConverter.classUT…Q]oÓ0=fÝ2ºƒ­ãûc᥃¦ÆCX/CHHC UõÑInSoŽ9N_û!üŠ=u“xEâG!œu4°dY÷øœ{ÿøòÀ¬2|Þßß?z÷H%Þ†½ŽçY!$7"W~–'dqM’xIörÄK?Q¼WVYém ¹,©ã©ŸñÂuŠž=M¢uËÕá©~XIirÄýÇ5E¥Bi¡R‹ŽI—ÖËâaw½ú ½Oó` Í~^é˜^ I a®Ó Õ<‘TFÈ@(CZq|м(H¿¥Ñ"ªêÁw´ÜÌ•íl) ‹»|ÌÉU¼v)6æVâ)錔áQ{ëX ò vïmý–÷M=wom )2ÁÎöëƒûgí É0÷\(a^0´ÚÿпwáâB ¸Èp>%Ó·ïšÙ Ëíµ¿é.q¹&_9u:ÍÁ²5ø%ï‹¡ˆßqm\¬L5Wîÿ?Ðñ@×›háìÉm ûní3A]ÜÂíšt‡¡±i¿·±ŠY8¨—Íy»îÙª‹fìéaa0xóð—&Xú†¥#´:\;ÄÍ îtNÔ5ûf~PKb§ŠÁ£PK!/ org/gradle/wrapper/BootstrapMainStarter$1.classUTmQËnA¬!5Æ@$ëÂÁ޼^™p0 Ê!Hœ‚°Äqiï¶×ãÌήfÆæ€È‡ð \¸€Äࣽ¸Liª«ª{z¾ÿøú ÀCÜSøpyùrô.žPvÁ6ãl÷ã¬*km(èÊ&e•³ðŽ “g)ÎÈ'ÙŒ³ ¿(}|<%ã¹×ERRè&ƒ'å“#ѺÑoÿtaŒ~Fɰ‘ØB[f§m!ì’—^ÂGƒQ’ó2~ß‚Rh«…Ëø™6¬Ð«\‘ŽrÃé[GuÍ.=«ªàƒ\ž“¶ã@.°{0Œ°®°5§%¥†l‘¾˜Ì9 6öW¬®Ò&ÓRÙd‹'BKaó‰¶:œ*¬u{¯:hãF:R ,ã:(Üïžÿí?9ÿÓcš×œô^+\ ™ZXY•K‡osr-lý3Ö•%ÂŽBTR©WØëþ/´ƒ;Økcû ëOe§bC†S¸.yMP¦•ó®Ü¶•àÆáÜü¬¨[¸ý«¼+ò5Á¨¿³ýWµ¢¤ðPKœEÒšŽPK!A org/gradle/wrapper/Download$DefaultDownloadProgressListener.classUTSQoÓVþ.iêÖsKJ(TÕ£, MC)+¡+CJ ëÔ ¢H“Ø}ã¸uììÚN‘¼ì‡=ñðÈ3ÒZ*&Áž6iûOÓÎ50:ÄÐlÉçÜs¿{¾sÎçûÇ_/^8¯Ý»·V¹c¶¸µ)|Û\2­¶9kZA·çzü¿•PªC=)ún‡ÿ`5éG ¬Fõœw}7ú’áXþ#ÖRùºQdth30„áa¤1n@Ç'ÊË00¢¼C Yû [#âQ.whÂfHçWV 냗šH†ÑwcºÎ£Ž†£DÕå·´V+Ô äð©Žc˜Vq×7ðÙëõñ¸)y5œ`ÐúÜ‹Åj›ŠÈ× õ÷1Uyt|Ž"ÃáÿìYÃ,MGE|*ûT~OjF6ıð-QÝKp9Aó–'ˆde%œ"’üòGP§jáÈ;ÄZìGnW|}Û=u/4|Á0¹·„l%)^‹rVÇ"*$éÜ– Æt8Ï0žœqƒrmuO:Ò{`™î Ãþ:]oânKÈ*æéœFʤ0¦$&oL œhEò’ݧTÃ~ú^¢UÆ‹ÍïžãÀÉmL°mLmcòY"ñ˜ªæ øO Ò ü˜~ð߯ÁßÑÌè{ú~î~0©ÍŸR·vaîb&£{·:ÍÅôCT7™M?A¹˜M“?‘Mïâäæ33‹é_Qʦwpæ&>Åȵ_°Ø,>ǹW¹é1¢àª„½©Èš×~Ãp17½ƒ ϨÏ)œ@•‰=‹«‰]ÁZbôU–á2=J39Jþ õ»A>ýÉ4RPK€Ó%)äPK!4 org/gradle/wrapper/Download$ProxyAuthenticator.classUTTÛRÓP]–  âýPÓÒ‹\ÔR¼A/àÈTeìøàœ&§m4MêI 2Ž|ˆà³:ZF™q|Ò?Êq‡‹ÓVf$'ÉÞkïµÎ>+ùõûë7ãX`x»¾žK¿Ö Üx!SËhFQ‹k†[©Z6÷-×IT\SP\ [pOP²Ì½„QÆ ¯Vñ´L‘ÛžˆkÕR¢Â« +è! S“fa‚°2½[_¬Ù6¼2OŒ§d9BHË)QtEH¸(žNN$Ó S¬hoÂ` ÊC·& 1oÙ‚!éÊRª$¹i‹ÔªäÕª©9wÕ±]nŽ,I÷ÕÚLÍ/ Ç· î»2„†¡ç|…§á§Zr oÍóE…*©“o ¡oq _ó-;uŸW§:¯ZŽå_gÐ[rÑe†v=º¬Bª „atuáúŽ–„¿Ä=oÕ•f5m“aX.þÕµ7ˆ˜#Ô!'^Ö„G‚­UizcaÓ†FšÓ*b0Ðtˆad?!f8°”{ð$Ïp~¿$Gq¬ Gp¼I,éãÜ"…ÅR„ð'q*ušAmÌ„p–¡;˜t}×pm†ÁÝb›;¥ÔC?p 5ƈ ç·fgk–m :Ù tôÐÉqL†„þo«»ïÔI £A‹8Ù/Y lõØ2Œ$CØw·Á*.JtŒ1ô4Ù"„ ²í…ÆØÈû ð\~ïNHÅ%\îÆ$®ÐÌZU…0Åл-c×)a;Èj×NÿÇF!Ü Éún¶Ì匔|¡C>ͪ˜Á¬‚ ²4É=Æó4»íë[ nb^E?‚ƒ»CåYú 1F&ÑO„Q†!² =?úlÑ:)Zêÿ‚t¾#“¿ÿS±VÐÕÐÂOt\_ØÄÍ `=Î29x°§ñ@Å€æZ¬¢©":XQ_Õ“)5¡Xª®“zŒÝà ®˜œ‡3âÑa34+š•„Éë+Rñ`RIU!ƒlÞØ@{Æÿ`:‘ ‚9¤׉-Z\Õ87T-NÔn˜¤‹è¡ ¡Æ`ŒTÜìcð†õ´å;ÔgX©ñ†¸¡Ä¼aÔPR)n4lÓGµ„®Ä$¸J÷+#JCBÑâ ×ìçQKB CIBǹAütØ‹ÍÄœ2ô¸ÁM³C5-® †+ 1ä4VnãƒJ:aåðî9ìB¤9NßIZ!FKå&öiKM4t*)Ú´TãÖ¨n ÷ªI®§-ÖÆpaT×È+Vxž€ºê< çškòÈ»sÈ^6(ÁO.¹BÕTk CquM¿˱‹e(gX^H¶„‹<\³Œñ0'˪ó•©Ù‡‹±Ò‹.aX2kIÂjâU-n(–N.-ŸÅÛ–¥“/\Œ ‚Á?]B%ƒD™×ÅÇ,Ûêë}¸ k£ Õ .Í&/ÏÉÎË’\‹:±¯žaÙ,ßWn'’"ĹÕÎÇ}X+ö6`ÙléaKäç\¹•änÀF/$\N{‰½_I¤¹/r4’‘)ÌÆêù&ͧ´» Í"*W0¬¯^ s Ľ­¦_XVÁ«}ða‰øjahþ?’ZÂ6†Õ ™ãäÓ/¶c§^,ZÛ|XŠ ÄW;Ã%”Ûƒj6¾5m Qn©Q»ßøÐ)’Ñ. ¸© ò>Cµ5Ú§¤RièëiËù+‡2øòq = ‹(&aêSI J¯ˆT}$’¨»tÓòa·C{±CëÖ '¯ÈÒë±G¬ìÍ®(Ö/qvïsb}]š”,ŠC`XLĆOÒA|ˆ9têX©êù‰s>”¶ÿÍñ}\(b¸øÜzOšœäÛÇ¢<%¼,a?UÃ…ÚjL¶t9¥&—Éu$j¶ºwÈÐG•ÏêÓ¼†Ní5?áqÍRÆòÞHnȲR¡”zŸÉ ÌYÝÂnNiŠB\4™Ë NÁ2ÅØbŒ`œz«oæ+8ÀZ(Ûç& è47SªžÓ¥ƒ¾Ü‹ƒxõ°™ƒÎa}]X&·²5DÉKÛY[m¯Æ­^Ü‚×@%kQL5:»jæä}>ÖѪk¹€6’82Ú9d({HÇo˜u):±”p;©œ½»[1Mº„b¼‘.Ÿ¹-i5Åÿf/î×D‰`Òb Á©2¿Yfù)XoÅÛ„ˆ·S¨n]xã;ÄÆÃâçr§ÀĉڴA݇w:ö.·rî&›øišEVʺ/Þ‹{¼¸ïc¸u÷Öž®¶®rŸIJå]½½Ý²íyvdî`YÑdU3y”–ñ¹(›X6db’wÚ•cÔ) u -ö„än{jl¦J“Ó3 Ã!>ÀøT‡¨hv™s¢¼Zÿ0îõâ(>Be$ × õ&Ûnî#8'òàã"w H68HÑgðYq®S Í=WåŒÚÓä,n#½ÙñÀƒÏ1íi‘ðX–³| _¤ˆ¨Úˆ>L×Âæºç<ÛÝ—ñ¸_ÂW(÷ûzw=øªs)µŒ[b>,/ä×=-><‰¯‹ôÿƒ,6Œ…Æ’‰Ð€ªÅBÛK±ÆS¼Õ™9Å9¿ES^Šx-Ç-ª¦ã|'¿ùÍjA¾K-ˆš_•!7­ìôH]yÍyÝ"¿§½ø~ÀpC®C‹j)PX¦<ªZC ®jÊšnÉf:•¢›.9¢ÓSB¾¦¿“ ïG¹YÐ6!ï–ú1"ª$¢izýpÑp¶ÆI*õ;%F¨çéäŸeŽPöáâÁ/²•Iž#þŠ&Ý iJ’{ðJ`Bfç,*Ftȃ?ˆYÃ>樢'ÀZþL÷G•ÙPeÊÕUf³ý_“÷éÁ_)£u#©Xs2ª@þȨ™9öoø»HŒÐÝJ‰-Þ(ô6ëJ'¸Ñ+îy¬£yL¢'£ eb¢¤¯21ÙÙæJÒ|G°„VK cøaûPL<@¸vË"퓸0ƒUS¸”¡£n 5 wa3}N¢!éœÂz† 6uMa3ÃxXçQ,©·1"wÖë3¸r÷ѳ§jAüÑlŽ-YekI¹PVYÙ»wWÕÇÖúãhÆöH{Ý$vÕÇ5«Ž£#ƒk'lîEèÆuYî[G¼jáˆA;£½‘Î nØ’ÁK›\D›Ü 6•ÔÖÕ¯ ¸î@É$ÔcíÓŽø“µ“H=b YLoƒì.PrÿÌUU1ý¡øßPK° e *PK!- org/gradle/wrapper/GradleUserHomeLookup.classUTR]OA=C+Ý~ XQPTdU( ÛâCEb‚Rà¡ÓZŸšéîívé~ev·†ù!þ c‚F€?Êx·hŒâƒ/3sÏœsï¹wæÛ÷Ï_ÜÇŠÀ»ããvãÞ—Öˆ[ßÔ­¾®[¡¹žLÜ00üÐ&Æy$câË¡Œ kHÖ(NýXßH/¦u=r _F†›å þÃvƒ¹ªñK?H=x({%pÜ€H¹Ãè˜T̵oÔ7ê æ±þVƒ(uÂTY´ëz$°*Çt”´=2_+E¤Ì½IØIí‡>µÂp”FäfåXšž ó HVRÀ´ÀÂNsw»ÛzÑÛkoï´š½n§Ùîí:ë뉄ytOêpÊ<šÚ9(^Ãð«é0×à­_¥^ØÍ&ÌVpÀLÂÿ ]æ %ëËfˆðÌ\Ù§A#}Ô/¸8« 6´B×Âzº?—„ v Ÿ*ÃSRH&(é¦Ã¶ø:•”×åâFV¥4VÚT‚ +2C™¬‘ C«t„i¶E&—'Ãñ ²ß1Õcdý6 ¿àC~³ÏŸ0üÛ¯ìôŒ†þ°-9$v›’CGôDΨmPézœ32Ñt¼ ŸªrÎðFz“àó†b[§7§Óúbð_ ]=#xaZQ¢Ýb ‡KYÁ›è-½™Þ:]—–Ä&iT¡·Ay¶é™ˆ«Ð;Õm«fZ†lVá(áRÈN}ÇÝJïNò¯Óa[¡ƒ–BàR “¹m5 ;™ƒÈô.n§gòÊÃØ>ûnn¡÷ˆZžÆuä}½ÞÏ´îõ‚ay‹Ñg¦ípîÉõöº‹ikà,Nkcoš 1M¹~îÖèôŤhÙ®#k¤õ^‘Ü>ĤÆÅ[ÖL ©&+¨£°õ|„î© ÓG‹TJöú8²Ú‹.ãXÖ o„î0Ý[AŸ O¢N¤¬åÒzRð}ŠîpŸfª]}Ùž¬žç÷Ã_“úP¯÷tvgÁÃË&c°óYzP£èsLR0¨ÒI¦àë×Ü ùH¡çªó‡4¥‡¡•`pÿÕMëTú<^’zÅ5ã¡G7ut ¥!“ëÍÜ»:ÐQ6[Ð_Dæ5S%Òî;Çðº¢Ú'éÇ>I§…ÊžbZóÏã+ô ²Y]áÍiHxQàx)åâËô¬F_¢¯€ZàòLmAŸM «TúTOÅŒc;ûàeP^‡‡¾AÏ ]}SørÇLêü¶yMª™j.¤q¦–ržóÏrûzQpû]‹[Á¬Åë÷‘½–£.ˆrwj‹gDüÅ<ôC»Äý©º€<(~Rì-~š‹˜íé´¸Ÿjô3Q:+¢f*‹F4³ÃòÐ/DCu;ý’iÁTQZrñDLÔß_£ø ~£ÑK¢5q‰þ;…,+þ d éïé‚Ä‘?²¦½é¡?‰Fæ%ú3Œ…i±¨^ý•îšù›¥u¨q0adÑ;¼b›÷¿ÁI³EvÀÊ0ä?è„•þ2š©.³ÐxèŒÐð‚æZtŠæhOÅ<,‰žâv”¶¤VéTØYè\Æ›ª©abg“‡V5v±[(оô\¼cZ6˜h/؃ªÀ³˜.<7…ç !„§äB½ƒ+Ý<—«Šs)€Âó4®¹š›T^0CN±Â ꩌ¡Á1˜›ÚVÒôŒe¿$—óÆhq;c´˜WN« cÄpáæD…dr•™“—ó —1¦‹Ùƒi#Ïï§Qp=àZ7‹qC-†‚‡­ÁâŠüiOf‡<ÜÿãjÆL!gâ×^ÃÂÅÓòîxiZ×Ì L*húõ„ðíÇ¢FÁZ˜Ù¬úQýÉ\"‡‹ûíÖ£AåÆEÀ]P€Š™FÆŸ2³?bøõÔ È&tå3Ì{¶¡ï ›æáÜ ÂD¶µoiÞÞ}hkws[¸ýОžöîCÛvv¶{¸­oæ–ñƹA4Î VãÌmöŒY°Ï\óœR”¾…·ŠØØ²SÏTy;l ²FꈇÃ6 f—ÊÂù“FÞÉ´,P:†Í0zð0¸›iUÅØñ=áfÛH ¤:Þm_LÝŸ~jm¹¾’¯ä½ïá«Ð–95lö÷‹Cö‰Ò³Ïæò€Æûù ˜|!ሇDÒkbÌ/5eH.?ÚÑœh£¸¾/’ÍʲZ™š£ì3 £Œúç‚û÷ÅQcK²Ò”£Z-˜\Z·ÓpÜ tÌ3‹Ê µaK˜zLáDy—,GUaô%³1©5Ó‡wÇ“†)’ wxx¯s³Éh3€×#bºeê3ÈZÖBYÎiœdÌMk夶m´© n‡­¼cž^äŽT&‹ÁTá7”Î1Å}q Êr²×r¶7!´ËšmÜɬoÔøÆPu𬠟Åe7Kx)œt“Æoä· ‰ÅEƒÙ›³[ÆÙSîƒøf~›0ËÛa¤F•oARÀüší°û*¿Ó.˜®¹ Ó”’ÑûŒ=é8Ó’®eÆIßÁï ÆP5+k6÷´vt~·u¤m—¨ü>x`émX§‘ÉèýF[¼ß%í¸¬,£¤Ä-ÛꙓUyàç|·Æw1f!´Z†²Âåä@íþˆ?,äü")7CrǼØß"ŠÒGùcíãŲ†6~ ±%Þß‘ÊVRÌ:Äßÿ'–>lqŸÆ÷ò§Eô¸øu¿h’çs'›¦EÏ *=(2¶É˜S”]Ý;···îVùäHë&ŠGlÈQ@^´±¶×Æì5î{íQx¹q,šÈeâG¬kÙæhêéÔSÐb²i²_Å!Y:¥'ì별= õ6¶ÏˆÝ>Æ_pÃág:æ`Z¾Zá'˜Òg ”R/*çîje÷Êf:Û §5~’Ÿ²o]Ò >” ¥…›~†¿¤‘Â_†s4$¢‡UÆôáJŽaP÷ð×ìœþu8d}|½‚™Fƒñœ½ŽÉã¼ ŠÝ¹T9pR£ðmLA­f.³Š4mÀÕüƒÖmš?V¤æï3Ó~¡v¿0€_å˜æ€ëæÞŒ™Èe Û²/Z׆ü]Ÿ…šÒS¦È¸V ½ÝÃ?àŠêû#'‰?sZ6;ˆ¿õXq®€º»Q)ͤmI뮓†ù8}Tå_hüsQo5¡ª=•2P–&]¼FíÕŒe³T÷k~I þ†iñYAþœ4› CJ‘Jf"-öA÷üGÏ/3múø©ÂM*­[ƒÖ„™1®r‰¡‰ëœnmˆáÌjHÿÊÓø/ü÷’‰b÷̆²ˆ©Å™IÆ È"Ûø?M嫌©EŽÂC=|†ûOJTôвò)’e¾Ôx$SŒä0¯8ñ^URPÔËxz‹if‘ôAñõÄš³1Ž®V$·&i"»¸ÈâøymiÄ¥ô¤PNV‹ýS.Â%4 n%Í.N‘…ï Vp„Q Fi®¸ðõ¾3„=’W|‹X&ÍC3=%œŠó‘=Ç›±I;âÂmZ Ú[àe¡äÓ¤Òyj·/dÙ_ú)dl+è¥%šT)]€ „š ‚̶ÝÔë({.•j`@iɈÏ:p°cÙvÏ Ì|ž´BºP ¯„GÃb …¦7¡çRÑ4Þv/ì¥JµÂBÀ,d¢¯Ó|r“T'Õ =`¶ñ–¹ëV¤FÜõÅ¡ÓȘtsÊû§Qž|VÚè·ž6ºFºX£ imÉD)”"‰ï+ñÔó0ÐÆ2CäÌ—Ç%c—´AÚ¨I를+*bÑ#]*¢RÂ`Ô>‘:âÛ˜aÝ‚ÛjõÕìßÞÜí§ŠË“K§åŠÌÊUßocQɧðZF?ex-NBR³Ô‚®ABö­(ôºâf^•ÚÅgÀ2×O“îO¤­è$ÌP~øøŽùêñ,€¬ì?^«ýº•ÁüYÓ*!P#–(øâwGûãÎþ|¢J]8zBKF  Ý~ûšÂïÂuö‹C„ƒy´˜DÝ(1RS}¡ºú'Æ8»B µNŒ´Öøƒƒö0]ÖŠ\‡¥&Ât2ž2üQánƒ(`–˜…dæß®§ý}i3éš1£²-µWÜÒœ…µˆ`m_±},t =C©¬~l¢êJŠŸr-]¦åómFß3—ŠÙwjÒÕÅë f2Æ7Y|œE 4]¹d¯‘Þ-x ¥ä$Å*¯ˆ9Rñ¥^"÷Ÿñ«‚HõV9ó4'OÕyZ”§¥‘pžVVÕæ©ñ¸òbÝ(]òmb SÕÞ1jŠtÖåióµÕ‡WÕß·âßöªpUWžºGhOž®²ÿ†Çh_äÀ®:(Ÿ$ÝùÕE"Žªh\eôä)^Uw’ÌâêuX͈սŕVŽŠ•HÕ«Þp’n¥›Çèí‘sö¿ní7ˆý¹MyúÖ:X ÐlÐj§|õë‰<}O Ò,$¿@’'@}Škl׃ôã»h!€ÿËv SÅý,2B?? C*¤ö©yúÕqš'h‰ßEÞæ ´Cnå¶ ÞâsŽÑKŸûPÕoGèwyz9O{OCè<ýý8y‹‚Úl¼öŒ/¯†œÎujµêƒº^»çµ“>gµ*_#$­V-QCªEV-!k3ój >5ÃgNÁNz)³¯ŠS¢3òUhyfñ{OS,²Ö5F·ƒó®¨Êåyö öæyþ„µ©£ÄÖ^^˜çE‘uêÝT)èyyqž—î>óœÏŧ8ªU!"_S0µµý9Ÿ Š#WVE…žxöÞ\'ŽSØrVE± ¹,VË7 Ÿ°_ÖÊ¥ÂíðæÈóz0ƒ¨&cŒ7F¼¼i„/=eÿ¼Lü|[÷z¹}”;pÖiª¡‘œÀñ¹ ?½¼c”»fØc­8AE„f0"^ë¼¼k„{F9Ä‚ÏY²Âû#]±ê:DSQ:ühá«ó¬w<1ÆÑH¤~Œ—EF86Âý'øpç'¬?ÁXb”áëÇø‘NDÞߒκ~sp„ß øH× ~‡ O›Á°—oÍóm‘uÊݱgû\Õ¶Î…í¼|{qOƒLÊ0Íò¹ÕŠe™`dFùÎ<¿7¤zùý£üÁHÈ _â{òü‰1¾N$¯Ã맪U0ô™¹Ëóü€å[ ^? ÏGÓˇl )Âxê >*Ы•4Ÿ3ä†`ÅJ Ò{êBî Oõ¹¥  t‚§%ÂBƒB5÷ þ|$¤©¹}ΰQ+[Yïs×M"tª”Pá§kœæ þâ? û ¦O®‡>ŸÎó³V ŽtŠ ¹ª;–|Û-_ÇÆv¤+Ï߸‹V…1iß²ò‰ŒŸÜúC^þŽˆ;þ^ïû§¸‹Cˆ±ÏãŸ$½üÓ›õõN)>å)ÚSXï|÷Ý´mŒ±‚ë—õààWyþ­åEŠt=EKæ ñ þVÒÓ7òkäíð)]Ã<ù© Æ=óÀŽavû”Óôº¼ÄðhArÃ:þ•Ç0ëÛ¢Öy%U$„¨­·„¨©“´HçˆTQŸ—æD:OÓÜúGå“VïXÓ9LNî¬?M»Ç¤ÊÈ0 ªòRu§ü-Ž8ê{F¥Eyiñˆä•–ãä€WZ•—°[ ;¼ÒE=^i5Ö/ÁŠ‚•U=Œ·M{óÒeŸz³–w8ê¶yÕ¨Ô&T6yî:UÔ1Œã•¶XÆùi^êðJ;„•Ý%*_,jkͧòJvNôJ;'`ÇÜ3ì^éŠU#ÒîS“8®ÇW9ž"ÉUÅu ˜ûOÑ<´ ³U·tÑr H‡äûå“ʳ’.Ê_´žÏÈ/ˆ§«ÒµÐ•#r­t5Xϵ®õÖs“«Õz¶º¶¸ <;\aëy…k¿õ<ä2¬ç ®›”Ëñ¼Éu›‡ëNñT.W¶[ÏNe—õìVbÖ³_¹Q<ÑÄDñ_í°›$Ñ.rÐ>’É@Ã' ¡í¹ Ï»ÐèÜC¡–Ò'ÉC÷Ñ,ºŸfÓ×h=GsY£J®¢*éÓ䕦yÒ)ª–§ùŽÅ´Àá§…ŽäsÔÒyŽu´ÈÑJç;vÑbÇ-q¤èÇ[Éïx-u|“j£e²ƒ–Ë ­çÐ…r­”ë©V^K«äõT'7S½|åƒÔ G©Q¾™.’?F«åaZ#ßOË/ÐZùÏt‰ü ­“_£õÎ¥´Á¹Š6:ƒrn¡MÎ0]ê¼–šœºÌy„.w¾‡6;ï£fW%µ¸ÖR«ëNjs½Ú]ÏÓ¥¶*·Ò6å+Ô¡¼HÛ•—¡+ÌëЗDŽÿPKÝ»‡ ©)PK!" org/gradle/wrapper/Install$1.classUTW |[eÿIÓ{{›m}¬ÛÒ½ºnƒ®mÚ½Z¶0[R(e¬%Û¤Ü&·íÝ’Ürs³n¼DDDµQP©è˜]Z(0@Ý`€¢¨<œ¼Á¢¨¨€›ç|Iº´Ëæúû¥çžïûÎû|çœoÿÁ°X¬Øqå•k—]VÙ©‡¶±pe 2ÔUY[²¢½fDwL+æZaƒÖm#bèqƒ6{ô¸?Ôc„¶ÄÑxe KÄÚÊÞnTïõ›ÌÃè\¾4ܹ„ÎÚË2ô]‰H„â=º‰u›1ðÍX7­n5ì8É¢õeuKê–ùÃÆÖÊ+T­ÍJØ!ãL3b̰ìîún[GŒú>[ïí5ìúæXÜÑ#‘y‹ä mÖ·êõ=Ö]^çf#ä(Ș)WŽ©Y±P¶˜SßDdzgÄP áV=2/b…ôȳ7%mb‹$3­zÆO(ä3a3î¬6mÒ f› öÔz;2J3œúõk›‰¨˜‘Ô.³;aK ,hÉaH{ 6e%ú|§ÇŒÏ[HÆç"J[ÏçV˜1Ó9UÀ®«w.,£Þ±X·Ž .ð¢%ð Ì …ü5Õ oêËçÅLä¯é^LBÍpW1])fkPP!G®'ÿM®ZÐ2>†d7Ûó&tΙŠVQ†0c©'àD óQ%0õ0Ë6‡snUÂŒ„ [Aµ†¯»V=jŒ× uœ˜ùQÇÌêÉÓìƒXXÀ_uäÁ#iÓ¢ˆÅ",fiKÈø:k‹ŠÕ±R§¼8‰Ô`™ÀÜœ#Eº.À q^šqÊHr”eo—ŽÝàÅ)8•wO#uÍ8sñbeji•À$2vegÜŠ$cîôx±:eÝåGO £ ©‡BFœ2r!ådwÕ13èÿYq âyiØÄ…†|׌³5œ…sNlÓ°”õZañı¸jÁÆñµà2\Îò®ÈDCrYiÛºd¯à®â:àÖÃáqáH!¾UWãS|îʱº(¸–âa:¹Ñ¢$š2FÛæô:éq>[ˆÏàz2èÈ}Ÿ§¤ þÚjls¼øN-Ä ø"ǘ\¸ syáfòcÄêî6HÐô\w¨En’´[pk9þËdõjNJ«Šp¦TT¨ø*—‘.;;|Gå¤àvò ‰ôâ>þ5|<žJHÙ ‹Ç¥ûêNÜÅAÿ&ïì|òâÛÜ 6ãîLUOgŠ‚ïPUw¬•mMÍÍ™¢ø]®K÷à{äQšÌ®í«­¾XÄÒÃMéD !ÇÕ9žúù}ü€õ»—²9»Ôìmáæ´l5ŒïÇ.&üaªi¤êçIZ«‹Ë£b˜Û *»•JDåÑ{Eæ¦xñ F˜ËCdmFêªDW—aᵆ.ûÕ#™ÄIk”Y4Gd“x\ÃcøWˆÑÝMzÿ'ö`/%/U¥p Íd^<ÁÆìÁ“žPÄŠÓÊSܦ÷àiJ’&+ WÄ,§¢‹¯ekO•ʦŸR:æÈ¡LÈ’uèŸ:‚«‚mŸ|¤±ÐÝè-ó–Þ…Ù>µÌ»8˜ •.ôi>úútû5^1pèUŸP}êøœ@êãF~,á¯/ ­£¤·‘™)_$ña|«}àГ¤_þ ’Øé÷)ø$3ðR{YAþØëSöá±Zy*P$;…œÄ{÷ LJꈔêS}š? ê䬓rÈö71Åc¤Ý~<„}Iì¨I<ãSÊZÙO>^ØSÌ¢t”üŒ Ư’x¡ä¥Qk2ûjGÉièk£[" ä5ªe®‹ƒwˆ…eꎃg"O¿’ÙάøSE í` ãZò»a¼µ ï$ñ÷’&ñŸ~Šž’–zü%’5¢uÿ æ?ŒwƒAŸ§#è.9Ô–W*D›§1?)òÊò;Ú†„’…”*IQÜ-l|ëˆ( úhaê('ëGÄ Ú3ÉuûÉÑ{QåË/³jÞÃP‚·Oi#×$Å\ ÜÖѯœYÌ£)þaQDýaª„þ½˜ãËËøÆÓQ*êÇeAmuMR,n——%LàüVÿ}#bi3H4ìáïTKÅI’ö…R±<HÚÇBzÜ*.'Ó~„;i†e8ˆ$|û$ÜOÃ—ðŠ„¯ãM ߯»~ C걚„ÄT ËÅ +År Ok%ŒŠ^ñ$žð ñ”8ˆgij? ^gèºÚu½ëqš„ï‹•®›\·Hœ!ã·ºú%ÎñÛ]÷Hœ!ã;]÷Jœ!ã»\»%Îñ!דgÈøÓ®ç$Îñ]/Kœ!㯺Þ8CÆßr½'q†Œt»%Îpª†MT7£<»œCÕµnlD½Ö=ôDÌǵTao†Š»©b~h ÅjxE7&ˆ(&º0ÉuŠ\ëPìÚ€×E(u9˜ì> eî³1ŽSÝë1Í}!|îMRŽ[Vq÷ÿPK"˜‘à ºPK!- org/gradle/wrapper/Install$InstallCheck.classUTe‘ËJ1†ÿXµZÇj½mÜ‚V;^õ‚AAAp™ÎœN£™ É´.DÄ·p!‚ À‡ÏTEDäœóç;Bòþñú`sçÍ;·%ƒJBwÇ Únà Ò8SZæ*M¼8 ‰uCš¤%ÞìHë nl7¶îN[jK 7‹¼Xfž*<¨µ½¶6™5ÍŸùvWklGzë’D*!2*‰X푱|ë͵͵¦RϽ¨\¤]СÒ$°”šÈŒ 5ù·Ffÿ8±¹Ôzñ;+cP`òZö¤¯eùg­k ò2†Ùïkü(Ù¯zÒgTêþ»,´¥Ò]C§d­Œ˜˜:ùu¹È‹Û25¼§•ï ,,ÿ5ø×/JËõKª”1á`££BÍAcE5-0xÀ¯„unÊü3¨Wµ‚á,8Œó:ËݦÆÊ‰ŠSæp`œµQ;Xs˜‘ÃpBC¬íèÌ{‡©ƒ:å<ث馔ÇÑ•ø mÒh³¸o¡éî2(a;ãaÚRÒr+´Ö³€ts,5;8vÈm¶ÑvÙßÍ´x¦ŽAº‡)hº»å"aÚI­ é^ºé±”É£Y09æPA.]ßê®fmÃZ¶ÍØ–§›VT·ÆÁVÒÉ4ÜŽèî3cÙ¨gG‡M+5Îè/7Ý8Í7Þ¢ž¡\òwúàŠaÛÉë°éöØ\@­qù\aÚM÷k´‹`Zÿ="(HýLkcoŠHNí¡“ß57húôö‹ù ªç5_Ùj.ŒcGóÿWÛÄ‘@»x¼¿m®EÂtˆ‹*’ræPˆÞ&8¾m5ŒÑkÛÔÔÇöB±AOwÖm ÒQŽ æÚfÚÄÒó’2âÙ]—Åjªú!ú°ð~DªÆÛ èŒÂtA2ì.ú¨=”›~¼”œŸaZìô ¹v®àR¡Ãô¬HXK?É<úÀŲ́6,j× €è°$dïlµZ­4:¥šêê~Œèúê̓£:a°_sllôÆ£ˆÆq9«”,’ådÑQ^G²ŠJ×5]¯#™ … +[¢ºÕÊa ƒ©ãÊۣÎG”{NÁ•,窾¯C.ÝU}›ÃN®;Úê¶Z•x¦ì’JÓÓy²Ò£v«Ü1ÑžŒŸÌYÑO¡§»é̓Ë3óºcLäégfÐ B£çéò’{Q„k„êÛœjš§küLè¼HŸÇ2L?G?¯¡]ú‚°`=n"¶ï¨Y{næ½vt>lŽ•ú—Ë]‘ì4,OêSdF;²Û*ä G™:LÒ”ð¿4C^K®³xìµcwÎÈC* þYi>C×ЖqÆ+/Ì’é ôËô+Âþ«ÈÜs´Þ 5ǃôëС±%L/KH|Ž^AèÎÛ¢JðrtyH襢-)æ7ÑϹú° -‘Åt×÷üPò·èó½J¿-'£‹ªËŸD•Å]§»_B©8¡·0<,µ¿àUeÞßcº­ÚM3W_£¯ˆ_"7=[ÍP†ÖhôeúCôfªÄL¯2uÆRµMW¹ÌÌ:€ûü1ý‰¸öOá•ZéÏ·Ó(O0ø_Høsúºü<)»ý5E µ\˜~‘~IúWèÓúìB.«j¶bˆ†èo$"üR¾CôwLŒ:ý¸ë¼Ï™ }KÚ{$Lß–xú'úgdÛƒxpJT‡è_+åMùèШcŸÖ‡ÞÿÅ`›rA Ó Tþƒþ«ºî¯ äÿ?zH<( /3:_`Y†×yø`¿ ¬EyËòJoÐ¥±¶Y îÇ€/\¤Ž14J!|»ª’]ˆƒè&nñn r=`/x=ì˜L·Çfi3sæ…ÖXãE³JÚ¼½oUIã%p/­Ô•[ºî¨Wru4âG¥½­ÑWGôcÑx ùòn[ˆ3Lñ[Cu:‹–j Л‡‘§CbÐF­`òfeäMŸE2\’2-c_!?d8‡DmDUÂÛ>j¿ÿà«Aþú£h˜ƒÅ´9óFëÁï]O›¤Æ«´,=0IÍñ+´"q…"íWhÕ%ŸzZMkJ›øl©]˜ˆéÎ#EZ´)JLÐ}‰"mL¼Bu7¾¿J›Ó©IºûÚNßV³Ís´&Þìß”î©ëv9ç牯%â/Ñ[˜ÎSÔÿY ¦|탽ñ)zpà*íI§8>I{'èÃàü«Ùç°Å}/Ñ‘èuÖb~m:Š7¦'éa({žb uþ‰«tL|ãÇÒ©—iIâšÿYªOø6MŸ_©>bhÎ‹Ô Î$>1ŸÄoö®ƒÐ<Î|t Æz/¬ü f'`ý×à—oÜ7ÀWOÊ–Z‡uñÜÚ_!ÿ¥Æá)Êí»JVºÛŸ˜¤·7,¥Ï„º¿XìtzkÝ3Ô˜Œ|ÍuEzbÖ¦4×-¸(fÿZ2â/ÒÙ"ýöŸÃþIzÊ·5ÐH^{Ž:’ÍÍ tãì½'ÝÍ?Žû.Œø—n(Òû@<¦>xä\ùR»È9ŸÞW¤‡J‰t‘.ÂÙÏ¥‚b”ô±n¿/>èO Úë’ƒ?ñ—,ô|öùÙëJ‹ë°@3­¤-¸Ùj”g¡møÝ2m¯ÅàÀ^^Åw½´ßetöbź5®ü•ùqÓ:ß½«¸Íjñl<±z;E—.PÀ÷â¹Ðýup^|± \4Keo-GÇsWéJ:½j‹ôiÁÝuÁÝ9|ü2ŸKïÉ0}²H¿1E_PÀùâyZ"—úÝ#7¾8AGÛ“WéËÂù•´xf’¾ LÒékÝþÿ,}”–TüõÚÄo&Óe'½Žÿ7Þ9ÇI¯_o/Ò_^’ŸëˆP ñÚ«,´LÑU´…ZÑVE·ÓNEwÓES´_уtLÑctÖ%ÒiXÑQ²-Ð{•_á{?}DÑ’_4øØÄêäŽoTlˆ¹ ÊíSô×—Òûâéˤ%Ž7~s’þ†ÿ?íåïÄ T¤)³&7þ›býïé•ë8‹àŸ:doÉVßA„¨ ’¹ør{<­By Qd_éD`»È°ä›¸®$J¹êâ‘òAíÇ›8„“¦xq‘*:¬Ú'æFæbŸ¸“»ýÜP,+âÕî ¸ùªx”×ÚWò?-qÁwy]ßu¼Èí—yC‘ïV¿]Eîî4Äàü½E¾w«¿¾+TߥEí aD¨7Å}E¾ÿ=¶\[j?u¬+¤wáSÇÇ2~ _ÿôÇ)¼\kö?õôZ‘\.“FWè2§0µ\+òÁH°Ý×DB—Öš¸ñì@$ØíŸ ·L»¯Òé&>4ɇ¯'"ÁH y™jâ·áúØ!†bª8ÌyôÈ%dï-²Mì»6ñ#°(74ñq|&Õ‡š8[²t|’G®WK~™B÷³¿|Á+¯Ç¯S„F豩è£ðmžÎ¨±PŸ¡'xÆBWcüz^…Êøú„ •ñ'é’ •±D¯Œ…Êøeú¼ •ñ×éj,TÆo ”±PŒÙÇ 2VTÆíÜ¥ÆBeœá§•ž¥¸húß ¬>J 8E>Î`Ì*K- ßwPK?>©g¨ÚPK! org/gradle/wrapper/Logger.classUT…“kOA†ßèB)B¹ ×[[XVÀ•ÓÄ„¤Qc F¾M·‡íÂ^Ê^0ÆÈáW¨ÑšøÁà2ž¡EHÚ†Ýììî™÷9™?ý° SàôääMé“^—Ö!ù }[·öõ5Ý ¼–ãÊØ |à Äñ\’ñdSF†Õ$ë0J¼HßÞ—nDkzË6<Ù2•ƒêO7ê[¬ Kçü~⺈šÒØPßv|¢ÐñmŽS±ÇKë[ë%£AÇúçQt-HB‹^8. ,¡mÚ¡l¸d~e«E¡Y l›B ÃSòXš®ômóUý€¬XCJ`ö"úœ ¿!ë.i9JŠÄž@ê©ã;ñ3áü^aW`(_ØÍ ƒëih˜Ì ñ1Œ Ë3n` Ìå«yk±ZGYq—j¨}Œbò4Ì1$ì3×AœÀ|Íú˜)’^9ƒXÃ<fú4ä´– ¸~˘Mc ·¸dy¶G—k©4eX££„|‹Ê…j¿Å—Ì«ž"W +ß»›Ù†WC},(Ë‡Üø|e`æ…ÿs}U‚Uîj…w¡Àd•7ÝËÄ«SøVáØàžjã'«šÌçb„¿3˜àÑà¿y\ãHßÿÄTB]YÌ`¶«Éu5“Åï˜>Eún®¶qû\¸‚;]a¡+Ìv„ãá½wÅ/Xç1Åo°Ha÷»Ø*†ùf:؄––ÛÈ÷‚Cg`a°_®µ^Œ£ÊwèPK]öµo;PK!& org/gradle/wrapper/PathAssembler.classUTUÏJÃ@ÆgMÿÄZEŸ@ÙS+MC­‡´Š ‚'AQè}³™&Ûn6a7­±â[x<ø>”8=8 óñýö›YöóëýN`ÁËfs=ñXÈ%š„O¹œó—E^*-*U˜ /$nQ£pH—™pÌP.Ý*w|:Úဗi‹2PõŒ'§I<¦¬þæç+­ ¸L£:bRe­2)Ñ5ZGo†ãa$¸æÏ>0‡be%^+ Ž ›†©‰ÆðÑŠ²DÞ‰*»tóX£mCƒÁþB¬E¨…IÃÛx²jC‹Aë\U]08ìÝüTÖ[Ïþ»þŒ×ëϺàC§mØaи¢/Àšdëbt|ئ¾Kî€Ô#m¿A÷õ7Pƒ-ð¾PKê)“>$jPK!0 org/gradle/wrapper/SystemPropertiesHandler.classUTT[wUþŽM;q´´”B knÓE -x‹ô‚ P/x29I†Nf²ÎLZX]²üôÁW^yš,ÌZòà›¾ùüÖ}RÚ¦—f­ÌÌùö}ïoŸßÿþåW—Ð`Ø|òd¹¸‘ªpkU¸ÕÔtʪ¥ò)Ëk¶l‡¶çM¯*—ÂÜ$lpß°ÂZõÛM?5]ãŽ/ò©VÝhò–a+¢rårµ2Eº²¸c_k;~ƒ•Š[·]!¤íÖ ]Ò§X„'§&‹FU¬¥~Œ‚1èe¯--qÓvCÖ“õB]òª# ë’·ZBÊý@4oK-ü9î’Xjˆ0 ?äk¼àp·^Xª<V aˆa´.‚ƒV çÓó=mÛ+¨`3™íc;°Â÷ ¼5Ãpü¨Agºj»vð1Ã@:s/Ž8Žéˆá †x¿O äjûÛÅêý8F0ªã8N0œØs½——†“:Æ•§ñ~O%·Õʼ©á4¥u0ù^oéHàm†ˆãñ*é=¥>ûžî;8«Â$-ÇóE)UBç)áUñ¸,¤¿'ÍÄñ.&”á{ Çö‰4d¢v $<ÉprŸméNrÈÇ…Á0rX®¡À ßÅ£ Ž‹á}\¢Š\¨e;^ûFL>/ã¥÷!exT%qì î6JºE\Ñ¡aš!æïrb2Š«ûس­®&¬û—¿b´=céÃ>ÕT?Åg:>Áu†×ývÅ•ÂXºtdŸcViߤ^;´Ê1‘£ÇJJð%ëjéÃåÙy,¨±,’! š¡x„áÿtu_)./3$ö¤Ëm7°›bö‘%Zê’Ðpg‡¡}¥]oÛNU­â=Z¯Y)=™\o7©èHâdk—æÉ±öZ_ÿKK{t¾¯cßP“Ôâ»Dhã?Ú±/ *å;|¯\ˆyZ¸`jQ\ %qF,h•ª¬PSÖÔdE+2œ×™Â¨Á0X¦F²Pf… £V1Ä•U 6#V¥d…V“2ÏÁòË Å|µzÆáK5EÁ £L“s\¢J²Ê˜.«ÎÖ™nàZ8Ÿ™MÏf’EV?ú€ðç´š^`Ïe…ˆjº”’tZTXêR§Õ*ÓSï,»­©%YªéÍ={ÁC xNë4¥PUJäÏYÁôB/¡(¦.çk\G °ßT©ÌLf÷V‘joá „÷ÿeÊ™|Çíº7Ô,nÊ•éüâR®V!à}/W­Lܳ´\ñRÓ/ÞʦÕLdÀH*r‘šl§%Ñ¡®`ô„@ïš¬Êæ:îXüH€!öƒnãV^d7wöwÏs»Ù³—¯v}0*€nõAŒèwJÅ÷gøà®‚¼/@Àò0`y¢Aqï¡aäÞ#3w®•.»^;¾)ôq}‚À t]o`(w+æ á&ŽujãG©­Š¶ç°fGÛ´Û"@¿uÞ4ŠŒDxý?qÚç7ZVÄZÆŽXƒF^·µ{† Olj ¡£›Ú4^€1ã?Oì„§ñlãkƒ9<—_8¾ Þô¿M+ض߶ÛØ›ß´Øz´Aôð¦áw GiÌJÐFÇǧ§ßa$|§‘ð½Œso‚{“¡h°SžD¿ÿ… q;AºðЛ˜nÀ´Ÿ¤¡å ô$¾A䋞…”qðÇ®ø¸ƒÏ¹ãã>ïŠ/8ø¢;¾ààK®ø„ƒ?qÇ'<ãŠO:ø²;>éà+°ê‚O}¶Ãkð´`w|6\ð¨ƒ?ƒM7Ün,ÞKüvA÷PKæÉîPPK!( org/gradle/wrapper/WrapperExecutor.classUTVùwUþž]BXÊ£Ð6Mv •ÐJ7S„:M^Ò¡“™83i î ŠûJÅQTP˜V*òƒçðƒ”Çûf’&iSçääÎ{ï~wùî{÷½þ½õ€ ¸Í0vútoóÉÀ€âj"°-OB¸–ÎÈŠdÊšNk Nó:W¸dpZ”Œp|LJŒlÚlKJŠÁCL*œ–2aYØà[7%6’®ÞœÇ'³ŠBÆ ^/TÔ”¬r®ËjŠf‡¹n/šonÚØÔNðáÀ)7ƒ'ªeõ8ß#+œ! é©HJ— ŒèR&ÃõÈ!Gîåñ¬©é.T2,<& KERS‘îcÊPW_j¯| *êz1 =p¡†÷èÍ…E /æÍA–xáÆñµÌ 抯 Þâ8\XEAòQÙ0 ÛuŸ÷ãVÃO(š”(„çE ÁI/¹2fuµß õ”r&JÍ…;Õ;D½„"ì“3NÑbNL}4mM?ãàáòú§ð'’ƒ˜Â;Ó O%ª!í.nŽhúÐ~9͵¬iÑv/R:2Ce}»˜hÁÈŒöx1$”¨´*4Ê0¬ Ë%ENH&ŸvJ¼ÐÅù¯…!p}¢!´ +ŒΘçh“—Qð¤](AoV5)šÝ£qžqšÕó ¡6-«$üªfúE£ñ纛¿ÐŠýI]KûëÖuMnœ.éðNQ]x‘úWRÓÓ’Y~oî˜~+”?//ã^« ÁÿßaûumD öáôé×=8…7hãTŠÒ<˰´¸ç´«™¬IF¹”vá­BÉ·$ÇæÛœÃ;ÔUËÝ.¼Gd ÆhàE–m+àCÞÇGùÈJU\ø„¡*®hbË~&.›Oqžš\¢´ªn|ΰ¶\«(¾¾.¿dØÙ¥ù‡%%Ëý#²9èâÇí*ú ËI™'ü²Z¶ÞÄA¾Þ_ &v v¿¥«H-ÙÓn|Çà²=t'E3k/Ð%\Eýx.¬¶Ó]’WŠTs–†HGë*~öà'üB….¿õݸ&àå{Î%ü&Bø½$„VM£gm›Ô%ìr3³„A‡pXøƒJßFO+*U½¤º²é®ïÛë錺èWqñÓW¸ömIO’.‘X@ÿ“sa*iöïÆ`c0ŒÃ7‰ÚX¬k‹obéM,¿‰•<‹á`86óG¸Ð¶Pßi!DŸ,lñ5Ó`{¨ßÂcÚ|»iôdn´××I£žP……¨…ƒ¾§ix8·xÔ÷,â¹QÒÂ1 i ÏY0-ŒX8y«:'q*Vy®XWEcÔ÷Bx¯…ÆqæÎuJ-€V\Á›hC·-{pÄ–G1dK'lygly–þ…QÈ“‚QRArÙ$ÎÅ:CÁq¼²ð±…±ë$ÇîÞ\Ò^ØÄÒ'‡ìE5î#¹%xË},|uޠ﫤l¯‰ÈiaåÞ*~¬£Âw!ZŒú¾i¤Æqñ!þ¤Y©¡ïŶ¤k=@? 0META-INF/MANIFEST.MFUTPK!“`zX!p1 »org/gradle/cli/CommandLineArgumentException.classUTPK!×™n³& Dorg/gradle/cli/CommandLineOption.classUTPK!d¢½ Z¶3 org/gradle/cli/CommandLineParser$AfterOptions.classUTPK!‹ã1,]< Óorg/gradle/cli/CommandLineParser$BeforeFirstSubCommand.classUTPK!éÚ³ßb= rorg/gradle/cli/CommandLineParser$KnownOptionParserState.classUTPK!C'|¢L—< Å org/gradle/cli/CommandLineParser$MissingOptionArgState.classUTPK!´”[£×J= „#org/gradle/cli/CommandLineParser$OptionAwareParserState.classUTPK!u[z?¢}8 Ï&org/gradle/cli/CommandLineParser$OptionParserState.classUTPK!×é C3 à(org/gradle/cli/CommandLineParser$OptionString.classUTPK!Éɉ§Î2 W+org/gradle/cli/CommandLineParser$ParserState.classUTPK!KÏ– sÇ? g-org/gradle/cli/CommandLineParser$UnknownOptionParserState.classUTPK!Œ™é¯c& P0org/gradle/cli/CommandLineParser.classUTPK!ë2w:á& \5org/gradle/cli/ParsedCommandLine.classUTPK!ZÝvmT¬, ó9org/gradle/cli/ParsedCommandLineOption.classUTPK!ù$Oÿœ3 ª;org/gradle/internal/file/PathTraversalChecker.classUTPK!yµwʇA ?org/gradle/internal/file/locking/ExclusiveFileAccessManager.classUTPK!b§ŠÁ£> Aorg/gradle/util/internal/WrapperDistributionUrlConverter.classUTPK!œEÒšŽ/ HCorg/gradle/wrapper/BootstrapMainStarter$1.classUTPK!€Ó%)äA ©g¨Ú {org/gradle/wrapper/Install.classUTPK!]öµo; ~Šorg/gradle/wrapper/Logger.classUTPK!ê)“>$j& org/gradle/wrapper/PathAssembler.classUTPK!Öã%¬šN0 Žorg/gradle/wrapper/SystemPropertiesHandler.classUTPK!æÉîP- ‘’org/gradle/wrapper/WrapperConfiguration.classUTPK!YaèSÅ ( ã•org/gradle/wrapper/WrapperExecutor.classUTPK!! •œprotobuf-specs-0.4.1/java/gradle/wrapper/gradle-wrapper.properties000066400000000000000000000003721477352757300254060ustar00rootroot00000000000000distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists distributionUrl=https\://services.gradle.org/distributions/gradle-8.7-bin.zip networkTimeout=10000 validateDistributionUrl=true zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists protobuf-specs-0.4.1/java/gradlew000077500000000000000000000207641477352757300170200ustar00rootroot00000000000000#!/bin/sh # # Copyright © 2015-2021 the original authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ############################################################################## # # Gradle start up script for POSIX generated by Gradle. # # Important for running: # # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is # noncompliant, but you have some other compliant shell such as ksh or # bash, then to run this script, type that shell name before the whole # command line, like: # # ksh Gradle # # Busybox and similar reduced shells will NOT work, because this script # requires all of these POSIX shell features: # * functions; # * expansions «$var», «${var}», «${var:-default}», «${var+SET}», # «${var#prefix}», «${var%suffix}», and «$( cmd )»; # * compound commands having a testable exit status, especially «case»; # * various built-in commands including «command», «set», and «ulimit». # # Important for patching: # # (2) This script targets any POSIX shell, so it avoids extensions provided # by Bash, Ksh, etc; in particular arrays are avoided. # # The "traditional" practice of packing multiple parameters into a # space-separated string is a well documented source of bugs and security # problems, so this is (mostly) avoided, by progressively accumulating # options in "$@", and eventually passing that to Java. # # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS, # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly; # see the in-line comments for details. # # There are tweaks for specific operating systems such as AIX, CygWin, # Darwin, MinGW, and NonStop. # # (3) This script is generated from the Groovy template # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt # within the Gradle project. # # You can find Gradle at https://github.com/gradle/gradle/. # ############################################################################## # Attempt to set APP_HOME # Resolve links: $0 may be a link app_path=$0 # Need this for daisy-chained symlinks. while APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path [ -h "$app_path" ] do ls=$( ls -ld "$app_path" ) link=${ls#*' -> '} case $link in #( /*) app_path=$link ;; #( *) app_path=$APP_HOME$link ;; esac done # This is normally unused # shellcheck disable=SC2034 APP_BASE_NAME=${0##*/} # Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036) APP_HOME=$( cd "${APP_HOME:-./}" > /dev/null && pwd -P ) || exit # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD=maximum warn () { echo "$*" } >&2 die () { echo echo "$*" echo exit 1 } >&2 # OS specific support (must be 'true' or 'false'). cygwin=false msys=false darwin=false nonstop=false case "$( uname )" in #( CYGWIN* ) cygwin=true ;; #( Darwin* ) darwin=true ;; #( MSYS* | MINGW* ) msys=true ;; #( NONSTOP* ) nonstop=true ;; esac CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar # Determine the Java command to use to start the JVM. if [ -n "$JAVA_HOME" ] ; then if [ -x "$JAVA_HOME/jre/sh/java" ] ; then # IBM's JDK on AIX uses strange locations for the executables JAVACMD=$JAVA_HOME/jre/sh/java else JAVACMD=$JAVA_HOME/bin/java fi if [ ! -x "$JAVACMD" ] ; then die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi else JAVACMD=java if ! command -v java >/dev/null 2>&1 then die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. Please set the JAVA_HOME variable in your environment to match the location of your Java installation." fi fi # Increase the maximum file descriptors if we can. if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then case $MAX_FD in #( max*) # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked. # shellcheck disable=SC2039,SC3045 MAX_FD=$( ulimit -H -n ) || warn "Could not query maximum file descriptor limit" esac case $MAX_FD in #( '' | soft) :;; #( *) # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked. # shellcheck disable=SC2039,SC3045 ulimit -n "$MAX_FD" || warn "Could not set maximum file descriptor limit to $MAX_FD" esac fi # Collect all arguments for the java command, stacking in reverse order: # * args from the command line # * the main class name # * -classpath # * -D...appname settings # * --module-path (only if needed) # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables. # For Cygwin or MSYS, switch paths to Windows format before running java if "$cygwin" || "$msys" ; then APP_HOME=$( cygpath --path --mixed "$APP_HOME" ) CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" ) JAVACMD=$( cygpath --unix "$JAVACMD" ) # Now convert the arguments - kludge to limit ourselves to /bin/sh for arg do if case $arg in #( -*) false ;; # don't mess with options #( /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath [ -e "$t" ] ;; #( *) false ;; esac then arg=$( cygpath --path --ignore --mixed "$arg" ) fi # Roll the args list around exactly as many times as the number of # args, so each arg winds up back in the position where it started, but # possibly modified. # # NB: a `for` loop captures its iteration list before it begins, so # changing the positional parameters here affects neither the number of # iterations, nor the values presented in `arg`. shift # remove old arg set -- "$@" "$arg" # push replacement arg done fi # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' # Collect all arguments for the java command: # * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments, # and any embedded shellness will be escaped. # * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be # treated as '${Hostname}' itself on the command line. set -- \ "-Dorg.gradle.appname=$APP_BASE_NAME" \ -classpath "$CLASSPATH" \ org.gradle.wrapper.GradleWrapperMain \ "$@" # Stop when "xargs" is not available. if ! command -v xargs >/dev/null 2>&1 then die "xargs is not available" fi # Use "xargs" to parse quoted args. # # With -n1 it outputs one arg per line, with the quotes and backslashes removed. # # In Bash we could simply go: # # readarray ARGS < <( xargs -n1 <<<"$var" ) && # set -- "${ARGS[@]}" "$@" # # but POSIX shell has neither arrays nor command substitution, so instead we # post-process each arg (as a line of input to sed) to backslash-escape any # character that might be a shell metacharacter, then use eval to reverse # that process (while maintaining the separation between arguments), and wrap # the whole thing up as a single "set" statement. # # This will of course break if any of these variables contains a newline or # an unmatched quote. # eval "set -- $( printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" | xargs -n1 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' | tr '\n' ' ' )" '"$@"' exec "$JAVACMD" "$@" protobuf-specs-0.4.1/java/gradlew.bat000066400000000000000000000055461477352757300175630ustar00rootroot00000000000000@rem @rem Copyright 2015 the original author or authors. @rem @rem Licensed under the Apache License, Version 2.0 (the "License"); @rem you may not use this file except in compliance with the License. @rem You may obtain a copy of the License at @rem @rem https://www.apache.org/licenses/LICENSE-2.0 @rem @rem Unless required by applicable law or agreed to in writing, software @rem distributed under the License is distributed on an "AS IS" BASIS, @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. @rem See the License for the specific language governing permissions and @rem limitations under the License. @rem @if "%DEBUG%"=="" @echo off @rem ########################################################################## @rem @rem Gradle startup script for Windows @rem @rem ########################################################################## @rem Set local scope for the variables with windows NT shell if "%OS%"=="Windows_NT" setlocal set DIRNAME=%~dp0 if "%DIRNAME%"=="" set DIRNAME=. @rem This is normally unused set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Resolve any "." and ".." in APP_HOME to make it shorter. for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome set JAVA_EXE=java.exe %JAVA_EXE% -version >NUL 2>&1 if %ERRORLEVEL% equ 0 goto execute echo. 1>&2 echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 1>&2 echo. 1>&2 echo Please set the JAVA_HOME variable in your environment to match the 1>&2 echo location of your Java installation. 1>&2 goto fail :findJavaFromJavaHome set JAVA_HOME=%JAVA_HOME:"=% set JAVA_EXE=%JAVA_HOME%/bin/java.exe if exist "%JAVA_EXE%" goto execute echo. 1>&2 echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 1>&2 echo. 1>&2 echo Please set the JAVA_HOME variable in your environment to match the 1>&2 echo location of your Java installation. 1>&2 goto fail :execute @rem Setup the command line set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar @rem Execute Gradle "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* :end @rem End local scope for the variables with windows NT shell if %ERRORLEVEL% equ 0 goto mainEnd :fail rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of rem the _cmd.exe /c_ return code! set EXIT_CODE=%ERRORLEVEL% if %EXIT_CODE% equ 0 set EXIT_CODE=1 if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE% exit /b %EXIT_CODE% :mainEnd if "%OS%"=="Windows_NT" endlocal :omega protobuf-specs-0.4.1/java/settings.gradle.kts000066400000000000000000000005531477352757300212570ustar00rootroot00000000000000/* * This file was generated by the Gradle 'init' task. * * The settings file is used to specify which projects to include in your build. * * Detailed information about configuring a multi-project build in Gradle can be found * in the user manual at https://docs.gradle.org/7.5.1/userguide/multi_project_builds.html */ rootProject.name = "protobuf-specs" protobuf-specs-0.4.1/protoc-builder/000077500000000000000000000000001477352757300174455ustar00rootroot00000000000000protobuf-specs-0.4.1/protoc-builder/Dockerfile.go000066400000000000000000000014551477352757300220500ustar00rootroot00000000000000FROM golang:1.24.0-alpine@sha256:2d40d4fc278dad38be0777d5e2a88a2c6dee51b0b29c97a764fc6c6a11ca893c AS go-builder ADD hack/go/go.* tools/ # the specific versions of these tools are in hack/go.mod so that Dependabot can bump them for updates RUN cd tools && GOBIN=/go/tools go install tool FROM gcr.io/distroless/static-debian12:nonroot@sha256:6ec5aa99dc335666e79dc64e4a6c8b89c33a543a1967f20d360922a80dd21f02 COPY --from=go-builder /go/tools/protoc-* /usr/local/bin/ COPY --from=protoc-base:go /protobuf/bin/protoc /usr/local/bin/ COPY --from=protoc-base:go /protobuf/include/google /opt/include/google COPY --from=protoc-base:go /googleapis /googleapis ENTRYPOINT ["/usr/local/bin/protoc", "--plugin=protoc-gen-go=/usr/local/bin/protoc-gen-go", "--plugin=protoc-gen-go-grpc=/usr/local/bin/protoc-gen-go-grpc"] protobuf-specs-0.4.1/protoc-builder/Dockerfile.jsonschema000066400000000000000000000016671477352757300236020ustar00rootroot00000000000000FROM golang:1.24.0-alpine@sha256:2d40d4fc278dad38be0777d5e2a88a2c6dee51b0b29c97a764fc6c6a11ca893c AS jsonschema-builder ADD hack/jsonschema/go.* hack/jsonschema/tools.go tools/ # the specific versions of these tools are in hack/go.mod so that Dependabot can bump them for updates RUN cd tools && go build --trimpath -o /usr/local/bin/protoc-gen-jsonschema github.com/chrusty/protoc-gen-jsonschema/cmd/protoc-gen-jsonschema FROM gcr.io/distroless/static-debian12:nonroot@sha256:6ec5aa99dc335666e79dc64e4a6c8b89c33a543a1967f20d360922a80dd21f02 COPY --from=jsonschema-builder /usr/local/bin/protoc-gen-jsonschema /usr/local/bin/ COPY --from=protoc-base:jsonschema /protobuf/bin/protoc /usr/local/bin/ COPY --from=protoc-base:jsonschema /protobuf/include/google /opt/include/google COPY --from=protoc-base:jsonschema /googleapis /googleapis ENTRYPOINT ["/usr/local/bin/protoc", "--plugin=protoc-gen-jsonschema=/usr/local/bin/protoc-gen-jsonschema" ] protobuf-specs-0.4.1/protoc-builder/Dockerfile.protoc000066400000000000000000000027731477352757300227550ustar00rootroot00000000000000# syntax=docker/dockerfile-upstream:master # This container grabs the protoc compiler and the googleapi includes # /protobuf will contain the extracted protoc # /googleapis will contain the various googleapis proto imports one might need FROM debian:bullseye-slim@sha256:33b7c2e071c29e618182ec872c471f39d2dde3d8904d95f5b7a61acf3a592e7b AS protoc-builder # Create output directories RUN mkdir /protobuf /googleapis # Install needed utilities RUN apt-get update && apt-get install -y unzip git # Set up user and group to match host we're building the container on ARG UID RUN adduser --uid ${UID} --disabled-password myuser # Set permissions on the output directories so the user can write to them RUN chown myuser /protobuf /googleapis # Switch to user to execute the remaining commands USER myuser # Download specific release of protoc # TODO: add dependabot-like feature to check for release updates ARG PROTOC_VERSION ARG PROTOC_CHECKSUM ADD --chown=myuser --checksum=${PROTOC_CHECKSUM} https://github.com/protocolbuffers/protobuf/releases/download/${PROTOC_VERSION}/protoc-${PROTOC_VERSION#v}-linux-x86_64.zip /tmp/protoc.zip RUN unzip -d /protobuf /tmp/protoc.zip RUN chmod 755 /protobuf/bin/protoc # fetch specific commit of googleapis ARG GOOGLEAPIS_COMMIT RUN git clone --filter=tree:0 https://github.com/googleapis/googleapis.git /googleapis && \ cd /googleapis && git checkout ${GOOGLEAPIS_COMMIT} FROM scratch COPY --from=protoc-builder /protobuf /protobuf COPY --from=protoc-builder /googleapis /googleapis protobuf-specs-0.4.1/protoc-builder/Dockerfile.python000066400000000000000000000011031477352757300227520ustar00rootroot00000000000000FROM python:3.13.2-alpine@sha256:323a717dc4a010fee21e3f1aac738ee10bb485de4e7593ce242b36ee48d6b352 RUN pip3 install --upgrade --quiet pip # the specific versions of python protobuf tools are in hack/dev-requirements.txt so that Dependabot can bump them for updates ADD hack/dev-requirements.txt . RUN pip3 install -r dev-requirements.txt COPY --from=protoc-base:python /protobuf/bin/protoc /usr/local/bin/ COPY --from=protoc-base:python /protobuf/include/google /opt/include/google COPY --from=protoc-base:python /googleapis /googleapis ENTRYPOINT ["/usr/local/bin/protoc" ] protobuf-specs-0.4.1/protoc-builder/Dockerfile.ruby000066400000000000000000000005411477352757300224170ustar00rootroot00000000000000FROM gcr.io/distroless/static-debian12:nonroot@sha256:6ec5aa99dc335666e79dc64e4a6c8b89c33a543a1967f20d360922a80dd21f02 COPY --from=protoc-base:ruby /protobuf/bin/protoc /usr/local/bin/ COPY --from=protoc-base:ruby /protobuf/include/google /opt/include/google COPY --from=protoc-base:ruby /googleapis /googleapis ENTRYPOINT [ "/usr/local/bin/protoc" ] protobuf-specs-0.4.1/protoc-builder/Dockerfile.rust000066400000000000000000000006121477352757300224320ustar00rootroot00000000000000FROM rust:1.85.0@sha256:caa4a0e7bd1fe2e648caf3d904bc54c3bfcae9e74b4df2eb9ebe558c9e9e88c5 COPY --from=protoc-base:rust /protobuf/bin/protoc /usr/local/bin/ COPY --from=protoc-base:rust /protobuf/include/google /opt/include/google COPY --from=protoc-base:rust /googleapis /googleapis # this is not protoc because we will call Rust's prost crate to do the compilation ENTRYPOINT [ "/bin/bash" ] protobuf-specs-0.4.1/protoc-builder/Dockerfile.typescript000066400000000000000000000022641477352757300236500ustar00rootroot00000000000000FROM node:22@sha256:816f04d578545be8f3faadaefaa0926c65e67056d9bf2864009976380c2b0713 AS typescript-builder RUN mkdir /app COPY hack/package*.json /app WORKDIR /app # this flattens the node_modules in a way similar to the global install (which we'll (ab)use in a second) RUN npm ci --install-strategy=shallow # /usr/bin/env is called from ts-proto but not in distroless by default; we use busybox for this FROM gcr.io/distroless/base-debian12:debug-nonroot@sha256:ee694eefd7685d8c443fec6abd3bff8e30c437faa8fbeacc0ce4c2e847d45501 AS env-source FROM gcr.io/distroless/nodejs22-debian12:nonroot@sha256:894873fc72ea5731e38cf3cfa75a6a3b1985a9330e46bb4d81162e6a184f212e # node is installed in a non-default location in distroless ENV PATH=$PATH:/nodejs/bin COPY --from=typescript-builder /app/node_modules /usr/local/lib/node_modules COPY --from=env-source /busybox/busybox /usr/bin/env COPY --from=protoc-base:typescript /protobuf/bin/protoc /usr/local/bin/ COPY --from=protoc-base:typescript /protobuf/include/google /opt/include/google COPY --from=protoc-base:typescript /googleapis /googleapis ENTRYPOINT ["/usr/local/bin/protoc", "--plugin=/usr/local/lib/node_modules/ts-proto/protoc-gen-ts_proto" ] protobuf-specs-0.4.1/protoc-builder/hack/000077500000000000000000000000001477352757300203535ustar00rootroot00000000000000protobuf-specs-0.4.1/protoc-builder/hack/Dockerfile.protobuf000066400000000000000000000005161477352757300242060ustar00rootroot00000000000000# This Dockerfile exists to allow Dependabot to watch Homebrew builds of protobuf for triggering updates # We don't actually use the content of this image in the repo, as this is a dynamically linked version of protoc FROM ghcr.io/homebrew/core/protobuf/29:29.4@sha256:5c4a7e5591b8d71a653810b63d5b91294b5321bf7359abeb8b194e26ab44f36e protobuf-specs-0.4.1/protoc-builder/hack/dev-requirements.txt000066400000000000000000000000641477352757300244130ustar00rootroot00000000000000betterproto[compiler]==2.0.0b7 mypy-protobuf==3.6.0 protobuf-specs-0.4.1/protoc-builder/hack/go/000077500000000000000000000000001477352757300207605ustar00rootroot00000000000000protobuf-specs-0.4.1/protoc-builder/hack/go/go.mod000066400000000000000000000005041477352757300220650ustar00rootroot00000000000000module github.com/sigstore/protobuf-specs/protoc-builder/hack/go go 1.24 toolchain go1.24.0 tool ( google.golang.org/grpc/cmd/protoc-gen-go-grpc google.golang.org/protobuf/cmd/protoc-gen-go ) require ( google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 // indirect google.golang.org/protobuf v1.36.5 // indirect ) protobuf-specs-0.4.1/protoc-builder/hack/go/go.sum000066400000000000000000000031571477352757300221210ustar00rootroot00000000000000github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117 h1:1GBuWVLM/KMVUv1t1En5Gs+gFZCNd360GGb4sSxtrhU= google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0= google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 h1:F29+wU6Ee6qgu9TddPgooOdaqsxTMunOoj8KA5yuS5A= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1/go.mod h1:5KF+wpkbTSbGcR9zteSqZV6fqFOWBl4Yde8En8MryZA= google.golang.org/protobuf v1.36.5 h1:tPhr+woSbjfYvY6/GPufUoYizxw1cF/yFoxJ2fmpwlM= google.golang.org/protobuf v1.36.5/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= protobuf-specs-0.4.1/protoc-builder/hack/jsonschema/000077500000000000000000000000001477352757300225055ustar00rootroot00000000000000protobuf-specs-0.4.1/protoc-builder/hack/jsonschema/go.mod000066400000000000000000000016711477352757300236200ustar00rootroot00000000000000module github.com/sigstore/protobuf-specs/protoc-builder/hack/jsonschema go 1.23 require ( google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0 google.golang.org/protobuf v1.30.0 ) require ( github.com/alecthomas/jsonschema v0.0.0-20210918223802-a1d3f4b43d7b // indirect github.com/chrusty/protoc-gen-jsonschema v0.0.0-20230418203306-956cc32e45d6 // indirect github.com/envoyproxy/protoc-gen-validate v0.10.1 // indirect github.com/fatih/camelcase v1.0.0 // indirect github.com/iancoleman/orderedmap v0.2.0 // indirect github.com/iancoleman/strcase v0.2.0 // indirect github.com/konsorten/go-windows-terminal-sequences v1.0.2 // indirect github.com/sirupsen/logrus v1.4.2 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190809123943-df4f5c81cb3b // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect github.com/xeipuuv/gojsonschema v1.2.0 // indirect golang.org/x/sys v0.21.0 // indirect ) protobuf-specs-0.4.1/protoc-builder/hack/jsonschema/go.sum000066400000000000000000000316231477352757300236450ustar00rootroot00000000000000github.com/alecthomas/jsonschema v0.0.0-20210918223802-a1d3f4b43d7b h1:SM/PhkjhbeX1w/Jkv7dHFYDn2y4/88XZp3CIqVHsdcs= github.com/alecthomas/jsonschema v0.0.0-20210918223802-a1d3f4b43d7b/go.mod h1:/n6+1/DWPltRLWL/VKyUxg6tzsl5kHUCcraimt4vr60= github.com/chrusty/protoc-gen-jsonschema v0.0.0-20230418203306-956cc32e45d6 h1:WqTuh63Eanl4DrV0oAUW7ZJgpG9x25NCTTx3v6h4LPM= github.com/chrusty/protoc-gen-jsonschema v0.0.0-20230418203306-956cc32e45d6/go.mod h1:VC7bNYA8cg2IWaZDBNfrqUBcUMEuJ/RRkus0frzTQb4= github.com/chrusty/protoc-gen-jsonschema v0.0.0-20240212064413-73d5723042b8 h1:/Jd/W5vhPuLHSNOzK15RkiVd9zhFHGuxbOV+mRGFYQ0= github.com/chrusty/protoc-gen-jsonschema v0.0.0-20240212064413-73d5723042b8/go.mod h1:VC7bNYA8cg2IWaZDBNfrqUBcUMEuJ/RRkus0frzTQb4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/envoyproxy/protoc-gen-validate v0.10.0-SNAPSHOT.8 h1:8fATmp6SLK9XevENs3p8w3nMTcllQHfpYK6XGXDce28= github.com/envoyproxy/protoc-gen-validate v0.10.0-SNAPSHOT.8/go.mod h1:/tKK1rWdr4+hCDMhEg1hR/fkxiTnPwqVofFWkGFllMM= github.com/envoyproxy/protoc-gen-validate v0.10.1 h1:c0g45+xCJhdgFGw7a5QAfdS4byAbud7miNWJ1WwEVf8= github.com/envoyproxy/protoc-gen-validate v0.10.1/go.mod h1:DRjgyB0I43LtJapqN6NiRwroiAU2PaFuvk/vjgh61ss= github.com/fatih/camelcase v1.0.0 h1:hxNvNX/xYBp0ovncs8WyWZrOrpBNub/JfaMvbURyft8= github.com/fatih/camelcase v1.0.0/go.mod h1:yN2Sb0lFhZJUdVvtELVWefmrXpuZESvPmqwoZc+/fpc= github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk= github.com/google/go-cmp v0.5.5 h1:Khx7svrCpmxxtHBq5j2mp/xVjsi8hQMfNLvJFAlrGgU= github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= github.com/iancoleman/orderedmap v0.0.0-20190318233801-ac98e3ecb4b0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= github.com/iancoleman/orderedmap v0.2.0 h1:sq1N/TFpYH++aViPcaKjys3bDClUEU7s5B+z6jq8pNA= github.com/iancoleman/orderedmap v0.2.0/go.mod h1:N0Wam8K1arqPXNWjMo21EXnBPOPp36vB07FNRdD2geA= github.com/iancoleman/strcase v0.2.0 h1:05I4QRnGpI0m37iZQRuskXh+w77mr6Z41lwQzuHLwW0= github.com/iancoleman/strcase v0.2.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg= github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/lyft/protoc-gen-star/v2 v2.0.1/go.mod h1:RcCdONR2ScXaYnQC5tUzxzlpA3WVYF7/opLeUgcQs/o= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/spf13/afero v1.3.3/go.mod h1:5KUK8ByomD5Ti5Artl0RtHeI5pTF7MIDuXL3yY520V4= github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709 h1:Ko2LQMrRU+Oy/+EDBwX7eZ2jp3C47eDBB8EIhKTun+I= github.com/stretchr/testify v1.3.1-0.20190311161405-34c6fa2dc709/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI= github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= github.com/stretchr/testify v1.6.1 h1:hDPOHmpOpP40lSULcqw7IrRb/u7w6RpDC9399XyoNd0= github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonpointer v0.0.0-20190809123943-df4f5c81cb3b h1:6cLsL+2FW6dRAdl5iMtHgRogVCff0QpRi9653YmdcJA= github.com/xeipuuv/gojsonpointer v0.0.0-20190809123943-df4f5c81cb3b/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 h1:EzJWgHovont7NscjpAxXsDA8S8BMYve8Y5+7cuRE7R0= github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415/go.mod h1:GwrjFmJcFw6At/Gs6z4yjiIwzuJ1/+UwLxMQDVQXShQ= github.com/xeipuuv/gojsonschema v1.2.0 h1:LhYJRs+L4fBtjZUfuSZIKGeVu0QRy8e5Xi7D17UxZ74= github.com/xeipuuv/gojsonschema v1.2.0/go.mod h1:anYRn/JVcOK2ZgGU+IjEV4nwlhoK5sQluxsYJ78Id3Y= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY= golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs= golang.org/x/net v0.7.0 h1:rJrUqqhjsgNp7KqAIc25s9pZnjU7TUcSY7HcVZjdn1g= golang.org/x/net v0.8.0 h1:Zrh2ngAOFYneWTAIAPethzeaQLuHwhuBkuV6ZiRnUaQ= golang.org/x/net v0.8.0/go.mod h1:QVkue5JL9kW//ek3r6jTKnTFis1tRmNAW2P1shuFdJc= golang.org/x/net v0.26.0 h1:soB7SVo0PWrY4vPW/+ay0jKDNScG2X9wFeYlXIvJsOQ= golang.org/x/net v0.26.0/go.mod h1:5YKkiSynbBIh3p6iOc/vibscux0x38BZDkn8sCUPxHE= golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220520151302-bc2c85ada10a/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.7.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws= golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k= golang.org/x/term v0.6.0/go.mod h1:m6U89DPEgQRMq3DNkDClhWw02AUbt2daBVO4cn4Hv9U= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.7.0 h1:4BRB4x83lYWy72KwLD/qYDuTu7q9PjSagHvijDw7cLo= golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= golang.org/x/text v0.8.0 h1:57P1ETyNKtuIjB4SRd15iJxuhj8Gc416Y78H3qgMh68= golang.org/x/text v0.8.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8= golang.org/x/text v0.16.0 h1:a94ExnEXNtEwYLGJSIUxnWoxoRz/ZcCsV63ROupILh4= golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI= golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= golang.org/x/tools v0.0.0-20200130002326-2f3ba24bd6e7/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU= golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk= google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117 h1:1GBuWVLM/KMVUv1t1En5Gs+gFZCNd360GGb4sSxtrhU= google.golang.org/genproto/googleapis/rpc v0.0.0-20240604185151-ef581f913117/go.mod h1:EfXuqaE1J41VCDicxHzUDm+8rk+7ZdXzHV0IhO/I6s0= google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc= google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0 h1:TLkBREm4nIsEcexnCjgQd5GQWaHcqMzwQV0TX9pq8S0= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.2.0/go.mod h1:DNq5QpG7LJqD2AamLZ7zvKE0DEpVl2BSEVjFycAAjRY= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.3.0 h1:rNBFJjBCOgVr9pWD7rs/knKL4FRTKgpZmsRfV214zcA= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.3.0/go.mod h1:Dk1tviKTvMCz5tvh7t+fh94dhmQVHuCt2OzJB3CTW9Y= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1 h1:F29+wU6Ee6qgu9TddPgooOdaqsxTMunOoj8KA5yuS5A= google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.5.1/go.mod h1:5KF+wpkbTSbGcR9zteSqZV6fqFOWBl4Yde8En8MryZA= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.27.1/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= google.golang.org/protobuf v1.28.1 h1:d0NfwRgPtno5B1Wa6L2DAG+KivqkdutMf1UhdNx175w= google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.30.0 h1:kPPoIgf3TsEvrm0PFe15JQ+570QVxYzEvvHqChK+cng= google.golang.org/protobuf v1.30.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= google.golang.org/protobuf v1.36.2 h1:R8FeyR1/eLmkutZOM5CWghmo5itiG9z0ktFlTVLuTmU= google.golang.org/protobuf v1.36.2/go.mod h1:9fA7Ob0pmnwhb644+1+CVWFRbNajQ6iRojtC/QF5bRE= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c h1:dUUwHk2QECo/6vqA44rthZ8ie2QXMNeKRTHCNY2nXvo= gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= protobuf-specs-0.4.1/protoc-builder/hack/jsonschema/tools.go000066400000000000000000000015441477352757300242000ustar00rootroot00000000000000//go:build tools // +build tools // Copyright 2024 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. // // This package imports things required by protoc code gen for Golang // DO NOT DELETE THIS FILE AS IT IS NEEDED TO RUN 'go mod tidy' package tools import ( _ "github.com/chrusty/protoc-gen-jsonschema/cmd/protoc-gen-jsonschema" ) protobuf-specs-0.4.1/protoc-builder/hack/package-lock.json000066400000000000000000004445141477352757300236030ustar00rootroot00000000000000{ "name": "hack", "version": "0.0.1", "lockfileVersion": 2, "requires": true, "packages": { "": { "version": "0.0.1", "dependencies": { "grpc_tools_node_protoc_ts": "5.3.3", "grpc-tools": "1.13.0", "protoc-gen-grpc-web": "1.5.0", "ts-proto": "2.6.1" } }, "node_modules/@bufbuild/protobuf": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/@bufbuild/protobuf/-/protobuf-2.2.3.tgz", "integrity": "sha512-tFQoXHJdkEOSwj5tRIZSPNUuXK3RaR7T1nUrPgbYX1pUbvqqaaZAsfo+NXBPsz5rZMSKVFrgK1WL8Q/MSLvprg==" }, "node_modules/@mapbox/node-pre-gyp": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", "dependencies": { "detect-libc": "^2.0.0", "https-proxy-agent": "^5.0.0", "make-dir": "^3.1.0", "node-fetch": "^2.6.7", "nopt": "^5.0.0", "npmlog": "^5.0.1", "rimraf": "^3.0.2", "semver": "^7.3.5", "tar": "^6.1.11" }, "bin": { "node-pre-gyp": "bin/node-pre-gyp" } }, "node_modules/@sindresorhus/is": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==", "engines": { "node": ">=4" } }, "node_modules/abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" }, "node_modules/agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "dependencies": { "debug": "4" }, "engines": { "node": ">= 6.0.0" } }, "node_modules/ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", "engines": { "node": ">=8" } }, "node_modules/aproba": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" }, "node_modules/archive-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz", "integrity": "sha512-zV4Ky0v1F8dBrdYElwTvQhweQ0P7Kwc1aluqJsYtOBP01jXcWCyW2IEfI1YiqsG+Iy7ZR+o5LF1N+PGECBxHWA==", "dependencies": { "file-type": "^4.2.0" }, "engines": { "node": ">=4" } }, "node_modules/archive-type/node_modules/file-type": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", "integrity": "sha512-f2UbFQEk7LXgWpi5ntcO86OeA/cC80fuDDDaX/fZ2ZGel+AF7leRQqBBW1eJNiiQkrZlAoM6P+VYP5P6bOlDEQ==", "engines": { "node": ">=4" } }, "node_modules/are-we-there-yet": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", "deprecated": "This package is no longer supported.", "dependencies": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" }, "engines": { "node": ">=10" } }, "node_modules/balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "node_modules/base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/bl": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", "dependencies": { "readable-stream": "^2.3.5", "safe-buffer": "^5.1.1" } }, "node_modules/bl/node_modules/readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "node_modules/bl/node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/bl/node_modules/string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dependencies": { "safe-buffer": "~5.1.0" } }, "node_modules/brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "dependencies": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "node_modules/buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ], "dependencies": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "node_modules/buffer-alloc": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", "dependencies": { "buffer-alloc-unsafe": "^1.1.0", "buffer-fill": "^1.0.0" } }, "node_modules/buffer-alloc-unsafe": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==" }, "node_modules/buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==", "engines": { "node": "*" } }, "node_modules/buffer-fill": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==" }, "node_modules/cacheable-request": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", "integrity": "sha512-vag0O2LKZ/najSoUwDbVlnlCFvhBE/7mGTY2B5FgCBDcRD+oVV1HYTOwM6JZfMg/hIcM6IwnTZ1uQQL5/X3xIQ==", "dependencies": { "clone-response": "1.0.2", "get-stream": "3.0.0", "http-cache-semantics": "3.8.1", "keyv": "3.0.0", "lowercase-keys": "1.0.0", "normalize-url": "2.0.1", "responselike": "1.0.2" } }, "node_modules/cacheable-request/node_modules/get-stream": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", "engines": { "node": ">=4" } }, "node_modules/cacheable-request/node_modules/lowercase-keys": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", "integrity": "sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A==", "engines": { "node": ">=0.10.0" } }, "node_modules/case-anything": { "version": "2.1.13", "resolved": "https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz", "integrity": "sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==", "engines": { "node": ">=12.13" }, "funding": { "url": "https://github.com/sponsors/mesqueeb" } }, "node_modules/chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", "engines": { "node": ">=10" } }, "node_modules/clone-response": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", "integrity": "sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q==", "dependencies": { "mimic-response": "^1.0.0" } }, "node_modules/color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==", "bin": { "color-support": "bin.js" } }, "node_modules/commander": { "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" }, "node_modules/concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "node_modules/console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" }, "node_modules/content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "dependencies": { "safe-buffer": "5.2.1" }, "engines": { "node": ">= 0.6" } }, "node_modules/core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, "node_modules/debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", "dependencies": { "ms": "^2.1.3" }, "engines": { "node": ">=6.0" }, "peerDependenciesMeta": { "supports-color": { "optional": true } } }, "node_modules/decode-uri-component": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==", "engines": { "node": ">=0.10" } }, "node_modules/decompress": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", "dependencies": { "decompress-tar": "^4.0.0", "decompress-tarbz2": "^4.0.0", "decompress-targz": "^4.0.0", "decompress-unzip": "^4.0.1", "graceful-fs": "^4.1.10", "make-dir": "^1.0.0", "pify": "^2.3.0", "strip-dirs": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/decompress-response": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", "dependencies": { "mimic-response": "^1.0.0" }, "engines": { "node": ">=4" } }, "node_modules/decompress-tar": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", "dependencies": { "file-type": "^5.2.0", "is-stream": "^1.1.0", "tar-stream": "^1.5.2" }, "engines": { "node": ">=4" } }, "node_modules/decompress-tar/node_modules/file-type": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", "engines": { "node": ">=4" } }, "node_modules/decompress-tarbz2": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", "dependencies": { "decompress-tar": "^4.1.0", "file-type": "^6.1.0", "is-stream": "^1.1.0", "seek-bzip": "^1.0.5", "unbzip2-stream": "^1.0.9" }, "engines": { "node": ">=4" } }, "node_modules/decompress-tarbz2/node_modules/file-type": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==", "engines": { "node": ">=4" } }, "node_modules/decompress-targz": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", "dependencies": { "decompress-tar": "^4.1.1", "file-type": "^5.2.0", "is-stream": "^1.1.0" }, "engines": { "node": ">=4" } }, "node_modules/decompress-targz/node_modules/file-type": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==", "engines": { "node": ">=4" } }, "node_modules/decompress-unzip": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", "integrity": "sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw==", "dependencies": { "file-type": "^3.8.0", "get-stream": "^2.2.0", "pify": "^2.3.0", "yauzl": "^2.4.2" }, "engines": { "node": ">=4" } }, "node_modules/decompress-unzip/node_modules/file-type": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", "integrity": "sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA==", "engines": { "node": ">=0.10.0" } }, "node_modules/decompress-unzip/node_modules/get-stream": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", "integrity": "sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA==", "dependencies": { "object-assign": "^4.0.1", "pinkie-promise": "^2.0.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/decompress-unzip/node_modules/pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "engines": { "node": ">=0.10.0" } }, "node_modules/decompress/node_modules/make-dir": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", "dependencies": { "pify": "^3.0.0" }, "engines": { "node": ">=4" } }, "node_modules/decompress/node_modules/make-dir/node_modules/pify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", "engines": { "node": ">=4" } }, "node_modules/decompress/node_modules/pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==", "engines": { "node": ">=0.10.0" } }, "node_modules/delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" }, "node_modules/detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==", "engines": { "node": ">=8" } }, "node_modules/download": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/download/-/download-8.0.0.tgz", "integrity": "sha512-ASRY5QhDk7FK+XrQtQyvhpDKanLluEEQtWl/J7Lxuf/b+i8RYh997QeXvL85xitrmRKVlx9c7eTrcRdq2GS4eA==", "dependencies": { "archive-type": "^4.0.0", "content-disposition": "^0.5.2", "decompress": "^4.2.1", "ext-name": "^5.0.0", "file-type": "^11.1.0", "filenamify": "^3.0.0", "get-stream": "^4.1.0", "got": "^8.3.1", "make-dir": "^2.1.0", "p-event": "^2.1.0", "pify": "^4.0.1" }, "engines": { "node": ">=10" } }, "node_modules/download/node_modules/make-dir": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", "dependencies": { "pify": "^4.0.1", "semver": "^5.6.0" }, "engines": { "node": ">=6" } }, "node_modules/download/node_modules/semver": { "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", "bin": { "semver": "bin/semver" } }, "node_modules/dprint-node": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/dprint-node/-/dprint-node-1.0.8.tgz", "integrity": "sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==", "dependencies": { "detect-libc": "^1.0.3" } }, "node_modules/dprint-node/node_modules/detect-libc": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==", "bin": { "detect-libc": "bin/detect-libc.js" }, "engines": { "node": ">=0.10" } }, "node_modules/duplexer3": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==" }, "node_modules/emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "node_modules/end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "dependencies": { "once": "^1.4.0" } }, "node_modules/escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", "engines": { "node": ">=0.8.0" } }, "node_modules/ext-list": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", "dependencies": { "mime-db": "^1.28.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/ext-name": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", "dependencies": { "ext-list": "^2.0.0", "sort-keys-length": "^1.0.0" }, "engines": { "node": ">=4" } }, "node_modules/fd-slicer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", "dependencies": { "pend": "~1.2.0" } }, "node_modules/file-type": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-11.1.0.tgz", "integrity": "sha512-rM0UO7Qm9K7TWTtA6AShI/t7H5BPjDeGVDaNyg9BjHAj3PysKy7+8C8D137R88jnR3rFJZQB/tFgydl5sN5m7g==", "engines": { "node": ">=6" } }, "node_modules/filename-reserved-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", "integrity": "sha512-lc1bnsSr4L4Bdif8Xb/qrtokGbq5zlsms/CYH8PP+WtCkGNF65DPiQY8vG3SakEdRn8Dlnm+gW/qWKKjS5sZzQ==", "engines": { "node": ">=4" } }, "node_modules/filenamify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-3.0.0.tgz", "integrity": "sha512-5EFZ//MsvJgXjBAFJ+Bh2YaCTRF/VP1YOmGrgt+KJ4SFRLjI87EIdwLLuT6wQX0I4F9W41xutobzczjsOKlI/g==", "dependencies": { "filename-reserved-regex": "^2.0.0", "strip-outer": "^1.0.0", "trim-repeated": "^1.0.0" }, "engines": { "node": ">=6" } }, "node_modules/from2": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", "dependencies": { "inherits": "^2.0.1", "readable-stream": "^2.0.0" } }, "node_modules/from2/node_modules/readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "node_modules/from2/node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/from2/node_modules/string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dependencies": { "safe-buffer": "~5.1.0" } }, "node_modules/fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "node_modules/fs-extra": { "version": "11.2.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", "dependencies": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" }, "engines": { "node": ">=14.14" } }, "node_modules/fs-minipass": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", "dependencies": { "minipass": "^3.0.0" }, "engines": { "node": ">= 8" } }, "node_modules/fs-minipass/node_modules/minipass": { "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "dependencies": { "yallist": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "node_modules/gauge": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", "deprecated": "This package is no longer supported.", "dependencies": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.2", "console-control-strings": "^1.0.0", "has-unicode": "^2.0.1", "object-assign": "^4.1.1", "signal-exit": "^3.0.0", "string-width": "^4.2.3", "strip-ansi": "^6.0.1", "wide-align": "^1.1.2" }, "engines": { "node": ">=10" } }, "node_modules/get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "dependencies": { "pump": "^3.0.0" }, "engines": { "node": ">=6" } }, "node_modules/glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "deprecated": "Glob versions prior to v9 are no longer supported", "dependencies": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" }, "engines": { "node": "*" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/google-protobuf": { "version": "3.15.8", "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.8.tgz", "integrity": "sha512-2jtfdqTaSxk0cuBJBtTTWsot4WtR9RVr2rXg7x7OoqiuOKopPrwXpM1G4dXIkLcUNRh3RKzz76C8IOkksZSeOw==" }, "node_modules/got": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", "dependencies": { "@sindresorhus/is": "^0.7.0", "cacheable-request": "^2.1.1", "decompress-response": "^3.3.0", "duplexer3": "^0.1.4", "get-stream": "^3.0.0", "into-stream": "^3.1.0", "is-retry-allowed": "^1.1.0", "isurl": "^1.0.0-alpha5", "lowercase-keys": "^1.0.0", "mimic-response": "^1.0.0", "p-cancelable": "^0.4.0", "p-timeout": "^2.0.1", "pify": "^3.0.0", "safe-buffer": "^5.1.1", "timed-out": "^4.0.1", "url-parse-lax": "^3.0.0", "url-to-options": "^1.0.1" }, "engines": { "node": ">=4" } }, "node_modules/got/node_modules/get-stream": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==", "engines": { "node": ">=4" } }, "node_modules/got/node_modules/pify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==", "engines": { "node": ">=4" } }, "node_modules/graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, "node_modules/grpc_tools_node_protoc_ts": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/grpc_tools_node_protoc_ts/-/grpc_tools_node_protoc_ts-5.3.3.tgz", "integrity": "sha512-M/YrklvVXMtuuj9kb42PxeouZhs7Ul+R4e/31XwrankUcKL8cQQP50Q9q+KEHGyHQaPt6VtKKsxMgLaKbCxeww==", "dependencies": { "google-protobuf": "3.15.8", "handlebars": "4.7.7" }, "bin": { "protoc-gen-ts": "bin/protoc-gen-ts" } }, "node_modules/grpc-tools": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/grpc-tools/-/grpc-tools-1.13.0.tgz", "integrity": "sha512-7CbkJ1yWPfX0nHjbYG58BQThNhbICXBZynzCUxCb3LzX5X9B3hQbRY2STiRgIEiLILlK9fgl0z0QVGwPCdXf5g==", "hasInstallScript": true, "dependencies": { "@mapbox/node-pre-gyp": "^1.0.5" }, "bin": { "grpc_tools_node_protoc": "bin/protoc.js", "grpc_tools_node_protoc_plugin": "bin/protoc_plugin.js" } }, "node_modules/handlebars": { "version": "4.7.7", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz", "integrity": "sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==", "dependencies": { "minimist": "^1.2.5", "neo-async": "^2.6.0", "source-map": "^0.6.1", "wordwrap": "^1.0.0" }, "bin": { "handlebars": "bin/handlebars" }, "engines": { "node": ">=0.4.7" }, "optionalDependencies": { "uglify-js": "^3.1.4" } }, "node_modules/has-symbol-support-x": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==", "engines": { "node": "*" } }, "node_modules/has-to-string-tag-x": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", "dependencies": { "has-symbol-support-x": "^1.4.1" }, "engines": { "node": "*" } }, "node_modules/has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" }, "node_modules/http-cache-semantics": { "version": "3.8.1", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" }, "node_modules/https-proxy-agent": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "dependencies": { "agent-base": "6", "debug": "4" }, "engines": { "node": ">= 6" } }, "node_modules/ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", "dependencies": { "once": "^1.3.0", "wrappy": "1" } }, "node_modules/inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "node_modules/into-stream": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", "integrity": "sha512-TcdjPibTksa1NQximqep2r17ISRiNE9fwlfbg3F8ANdvP5/yrFTew86VcO//jk4QTaMlbjypPBq76HN2zaKfZQ==", "dependencies": { "from2": "^2.1.1", "p-is-promise": "^1.1.0" }, "engines": { "node": ">=4" } }, "node_modules/is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", "engines": { "node": ">=8" } }, "node_modules/is-natural-number": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", "integrity": "sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ==" }, "node_modules/is-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz", "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-retry-allowed": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==", "engines": { "node": ">=0.10.0" } }, "node_modules/is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "node_modules/isurl": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", "dependencies": { "has-to-string-tag-x": "^1.2.0", "is-object": "^1.0.1" }, "engines": { "node": ">= 4" } }, "node_modules/json-buffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==" }, "node_modules/jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "dependencies": { "universalify": "^2.0.0" }, "optionalDependencies": { "graceful-fs": "^4.1.6" } }, "node_modules/keyv": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", "dependencies": { "json-buffer": "3.0.0" } }, "node_modules/lowercase-keys": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==", "engines": { "node": ">=0.10.0" } }, "node_modules/make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "dependencies": { "semver": "^6.0.0" }, "engines": { "node": ">=8" }, "funding": { "url": "https://github.com/sponsors/sindresorhus" } }, "node_modules/make-dir/node_modules/semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", "bin": { "semver": "bin/semver.js" } }, "node_modules/mime-db": { "version": "1.53.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==", "engines": { "node": ">= 0.6" } }, "node_modules/mimic-response": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==", "engines": { "node": ">=4" } }, "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "dependencies": { "brace-expansion": "^1.1.7" }, "engines": { "node": "*" } }, "node_modules/minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", "funding": { "url": "https://github.com/sponsors/ljharb" } }, "node_modules/minipass": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", "engines": { "node": ">=8" } }, "node_modules/minizlib": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", "dependencies": { "minipass": "^3.0.0", "yallist": "^4.0.0" }, "engines": { "node": ">= 8" } }, "node_modules/minizlib/node_modules/minipass": { "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "dependencies": { "yallist": "^4.0.0" }, "engines": { "node": ">=8" } }, "node_modules/mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", "bin": { "mkdirp": "bin/cmd.js" }, "engines": { "node": ">=10" } }, "node_modules/ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "node_modules/neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, "node_modules/node-fetch": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", "dependencies": { "whatwg-url": "^5.0.0" }, "engines": { "node": "4.x || >=6.0.0" }, "peerDependencies": { "encoding": "^0.1.0" }, "peerDependenciesMeta": { "encoding": { "optional": true } } }, "node_modules/nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", "dependencies": { "abbrev": "1" }, "bin": { "nopt": "bin/nopt.js" }, "engines": { "node": ">=6" } }, "node_modules/normalize-url": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", "dependencies": { "prepend-http": "^2.0.0", "query-string": "^5.0.1", "sort-keys": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/npmlog": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", "deprecated": "This package is no longer supported.", "dependencies": { "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", "gauge": "^3.0.0", "set-blocking": "^2.0.0" } }, "node_modules/object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==", "engines": { "node": ">=0.10.0" } }, "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "dependencies": { "wrappy": "1" } }, "node_modules/p-cancelable": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==", "engines": { "node": ">=4" } }, "node_modules/p-event": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/p-event/-/p-event-2.3.1.tgz", "integrity": "sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA==", "dependencies": { "p-timeout": "^2.0.1" }, "engines": { "node": ">=6" } }, "node_modules/p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==", "engines": { "node": ">=4" } }, "node_modules/p-is-promise": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", "integrity": "sha512-zL7VE4JVS2IFSkR2GQKDSPEVxkoH43/p7oEnwpdCndKYJO0HVeRB7fA8TJwuLOTBREtK0ea8eHaxdwcpob5dmg==", "engines": { "node": ">=4" } }, "node_modules/p-timeout": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", "dependencies": { "p-finally": "^1.0.0" }, "engines": { "node": ">=4" } }, "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", "engines": { "node": ">=0.10.0" } }, "node_modules/pend": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==" }, "node_modules/pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==", "engines": { "node": ">=6" } }, "node_modules/pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==", "engines": { "node": ">=0.10.0" } }, "node_modules/pinkie-promise": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", "dependencies": { "pinkie": "^2.0.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/prepend-http": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==", "engines": { "node": ">=4" } }, "node_modules/process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "node_modules/protoc-gen-grpc-web": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/protoc-gen-grpc-web/-/protoc-gen-grpc-web-1.5.0.tgz", "integrity": "sha512-esVfqdxCZyLdoil7qbZVTpFuWyksgdJ2ETBpAi/oaRpl0wHOyCyhithGa/rPOPdNE4lKVOagjtybBwQaxtfcgQ==", "hasInstallScript": true, "dependencies": { "download": "^8.0.0", "fs-extra": "^11.2.0" }, "bin": { "protoc-gen-grpc-web": "cli.js" } }, "node_modules/pump": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", "dependencies": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "node_modules/query-string": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", "dependencies": { "decode-uri-component": "^0.2.0", "object-assign": "^4.1.0", "strict-uri-encode": "^1.0.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/readable-stream": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "dependencies": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" }, "engines": { "node": ">= 6" } }, "node_modules/responselike": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", "dependencies": { "lowercase-keys": "^1.0.0" } }, "node_modules/rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "deprecated": "Rimraf versions prior to v4 are no longer supported", "dependencies": { "glob": "^7.1.3" }, "bin": { "rimraf": "bin.js" }, "funding": { "url": "https://github.com/sponsors/isaacs" } }, "node_modules/safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", "funding": [ { "type": "github", "url": "https://github.com/sponsors/feross" }, { "type": "patreon", "url": "https://www.patreon.com/feross" }, { "type": "consulting", "url": "https://feross.org/support" } ] }, "node_modules/seek-bzip": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", "dependencies": { "commander": "^2.8.1" }, "bin": { "seek-bunzip": "bin/seek-bunzip", "seek-table": "bin/seek-bzip-table" } }, "node_modules/semver": { "version": "7.6.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==", "bin": { "semver": "bin/semver.js" }, "engines": { "node": ">=10" } }, "node_modules/set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" }, "node_modules/signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, "node_modules/sort-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", "integrity": "sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==", "dependencies": { "is-plain-obj": "^1.0.0" }, "engines": { "node": ">=4" } }, "node_modules/sort-keys-length": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", "dependencies": { "sort-keys": "^1.0.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/sort-keys-length/node_modules/sort-keys": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", "dependencies": { "is-plain-obj": "^1.0.0" }, "engines": { "node": ">=0.10.0" } }, "node_modules/source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", "engines": { "node": ">=0.10.0" } }, "node_modules/strict-uri-encode": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", "integrity": "sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ==", "engines": { "node": ">=0.10.0" } }, "node_modules/string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "dependencies": { "safe-buffer": "~5.2.0" } }, "node_modules/string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "dependencies": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" }, "engines": { "node": ">=8" } }, "node_modules/strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "dependencies": { "ansi-regex": "^5.0.1" }, "engines": { "node": ">=8" } }, "node_modules/strip-dirs": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", "dependencies": { "is-natural-number": "^4.0.1" } }, "node_modules/strip-outer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz", "integrity": "sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==", "dependencies": { "escape-string-regexp": "^1.0.2" }, "engines": { "node": ">=0.10.0" } }, "node_modules/tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", "dependencies": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" }, "engines": { "node": ">=10" } }, "node_modules/tar-stream": { "version": "1.6.2", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", "dependencies": { "bl": "^1.0.0", "buffer-alloc": "^1.2.0", "end-of-stream": "^1.0.0", "fs-constants": "^1.0.0", "readable-stream": "^2.3.0", "to-buffer": "^1.1.1", "xtend": "^4.0.0" }, "engines": { "node": ">= 0.8.0" } }, "node_modules/tar-stream/node_modules/readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "dependencies": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "node_modules/tar-stream/node_modules/safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "node_modules/tar-stream/node_modules/string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "dependencies": { "safe-buffer": "~5.1.0" } }, "node_modules/through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "node_modules/timed-out": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", "integrity": "sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==", "engines": { "node": ">=0.10.0" } }, "node_modules/to-buffer": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==" }, "node_modules/tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "node_modules/trim-repeated": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", "integrity": "sha512-pkonvlKk8/ZuR0D5tLW8ljt5I8kmxp2XKymhepUeOdCEfKpZaktSArkLHZt76OB1ZvO9bssUsDty4SWhLvZpLg==", "dependencies": { "escape-string-regexp": "^1.0.2" }, "engines": { "node": ">=0.10.0" } }, "node_modules/ts-poet": { "version": "6.9.0", "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-6.9.0.tgz", "integrity": "sha512-roe6W6MeZmCjRmppyfOURklO5tQFQ6Sg7swURKkwYJvV7dbGCrK28um5+51iW3twdPRKtwarqFAVMU6G1mvnuQ==", "dependencies": { "dprint-node": "^1.0.8" } }, "node_modules/ts-proto": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/ts-proto/-/ts-proto-2.6.1.tgz", "integrity": "sha512-4LTT99MkwkF1+fIA0b2mZu/58Qlpq3Q1g53TwEMZZgR1w/uX00PoVT4Z8aKJxMw0LeKQD4s9NrJYsF27Clckrg==", "dependencies": { "@bufbuild/protobuf": "^2.0.0", "case-anything": "^2.1.13", "ts-poet": "^6.7.0", "ts-proto-descriptors": "2.0.0" }, "bin": { "protoc-gen-ts_proto": "protoc-gen-ts_proto" } }, "node_modules/ts-proto-descriptors": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-2.0.0.tgz", "integrity": "sha512-wHcTH3xIv11jxgkX5OyCSFfw27agpInAd6yh89hKG6zqIXnjW9SYqSER2CVQxdPj4czeOhGagNvZBEbJPy7qkw==", "dependencies": { "@bufbuild/protobuf": "^2.0.0" } }, "node_modules/uglify-js": { "version": "3.19.3", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", "optional": true, "bin": { "uglifyjs": "bin/uglifyjs" }, "engines": { "node": ">=0.8.0" } }, "node_modules/unbzip2-stream": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", "dependencies": { "buffer": "^5.2.1", "through": "^2.3.8" } }, "node_modules/universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==", "engines": { "node": ">= 10.0.0" } }, "node_modules/url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", "integrity": "sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==", "dependencies": { "prepend-http": "^2.0.0" }, "engines": { "node": ">=4" } }, "node_modules/url-to-options": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", "integrity": "sha512-0kQLIzG4fdk/G5NONku64rSH/x32NOA39LVQqlK8Le6lvTF6GGRJpqaQFGgU+CLwySIqBSMdwYM0sYcW9f6P4A==", "engines": { "node": ">= 4" } }, "node_modules/util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "node_modules/webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "node_modules/whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "dependencies": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "node_modules/wide-align": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", "dependencies": { "string-width": "^1.0.2 || 2 || 3 || 4" } }, "node_modules/wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==" }, "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "node_modules/xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==", "engines": { "node": ">=0.4" } }, "node_modules/yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/yauzl": { "version": "2.10.0", "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", "dependencies": { "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } } }, "dependencies": { "@bufbuild/protobuf": { "version": "2.2.3", "resolved": "https://registry.npmjs.org/@bufbuild/protobuf/-/protobuf-2.2.3.tgz", "integrity": "sha512-tFQoXHJdkEOSwj5tRIZSPNUuXK3RaR7T1nUrPgbYX1pUbvqqaaZAsfo+NXBPsz5rZMSKVFrgK1WL8Q/MSLvprg==" }, "@mapbox/node-pre-gyp": { "version": "1.0.11", "resolved": "https://registry.npmjs.org/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.11.tgz", "integrity": "sha512-Yhlar6v9WQgUp/He7BdgzOz8lqMQ8sU+jkCq7Wx8Myc5YFJLbEe7lgui/V7G1qB1DJykHSGwreceSaD60Y0PUQ==", "requires": { "detect-libc": "^2.0.0", "https-proxy-agent": "^5.0.0", "make-dir": "^3.1.0", "node-fetch": "^2.6.7", "nopt": "^5.0.0", "npmlog": "^5.0.1", "rimraf": "^3.0.2", "semver": "^7.3.5", "tar": "^6.1.11" } }, "@sindresorhus/is": { "version": "0.7.0", "resolved": "https://registry.npmjs.org/@sindresorhus/is/-/is-0.7.0.tgz", "integrity": "sha512-ONhaKPIufzzrlNbqtWFFd+jlnemX6lJAgq9ZeiZtS7I1PIf/la7CW4m83rTXRnVnsMbW2k56pGYu7AUFJD9Pow==" }, "abbrev": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-1.1.1.tgz", "integrity": "sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==" }, "agent-base": { "version": "6.0.2", "resolved": "https://registry.npmjs.org/agent-base/-/agent-base-6.0.2.tgz", "integrity": "sha512-RZNwNclF7+MS/8bDg70amg32dyeZGZxiDuQmZxKLAlQjr3jGyLx+4Kkk58UO7D2QdgFIQCovuSuZESne6RG6XQ==", "requires": { "debug": "4" } }, "ansi-regex": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==" }, "aproba": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/aproba/-/aproba-2.0.0.tgz", "integrity": "sha512-lYe4Gx7QT+MKGbDsA+Z+he/Wtef0BiwDOlK/XkBrdfsh9J/jPPXbX0tE9x9cl27Tmu5gg3QUbUrQYa/y+KOHPQ==" }, "archive-type": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/archive-type/-/archive-type-4.0.0.tgz", "integrity": "sha512-zV4Ky0v1F8dBrdYElwTvQhweQ0P7Kwc1aluqJsYtOBP01jXcWCyW2IEfI1YiqsG+Iy7ZR+o5LF1N+PGECBxHWA==", "requires": { "file-type": "^4.2.0" }, "dependencies": { "file-type": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-4.4.0.tgz", "integrity": "sha512-f2UbFQEk7LXgWpi5ntcO86OeA/cC80fuDDDaX/fZ2ZGel+AF7leRQqBBW1eJNiiQkrZlAoM6P+VYP5P6bOlDEQ==" } } }, "are-we-there-yet": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/are-we-there-yet/-/are-we-there-yet-2.0.0.tgz", "integrity": "sha512-Ci/qENmwHnsYo9xKIcUJN5LeDKdJ6R1Z1j9V/J5wyq8nh/mYPEpIKJbBZXtZjG04HiK7zV/p6Vs9952MrMeUIw==", "requires": { "delegates": "^1.0.0", "readable-stream": "^3.6.0" } }, "balanced-match": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" }, "base64-js": { "version": "1.5.1", "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==" }, "bl": { "version": "1.2.3", "resolved": "https://registry.npmjs.org/bl/-/bl-1.2.3.tgz", "integrity": "sha512-pvcNpa0UU69UT341rO6AYy4FVAIkUHuZXRIWbq+zHnsVcRzDDjIAhGuuYoi0d//cwIwtt4pkpKycWEfjdV+vww==", "requires": { "readable-stream": "^2.3.5", "safe-buffer": "^5.1.1" }, "dependencies": { "readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } } } }, "brace-expansion": { "version": "1.1.11", "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", "requires": { "balanced-match": "^1.0.0", "concat-map": "0.0.1" } }, "buffer": { "version": "5.7.1", "resolved": "https://registry.npmjs.org/buffer/-/buffer-5.7.1.tgz", "integrity": "sha512-EHcyIPBQ4BSGlvjB16k5KgAJ27CIsHY/2JBmCRReo48y9rQ3MaUzWX3KVlBa4U7MyX02HdVj0K7C3WaB3ju7FQ==", "requires": { "base64-js": "^1.3.1", "ieee754": "^1.1.13" } }, "buffer-alloc": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/buffer-alloc/-/buffer-alloc-1.2.0.tgz", "integrity": "sha512-CFsHQgjtW1UChdXgbyJGtnm+O/uLQeZdtbDo8mfUgYXCHSM1wgrVxXm6bSyrUuErEb+4sYVGCzASBRot7zyrow==", "requires": { "buffer-alloc-unsafe": "^1.1.0", "buffer-fill": "^1.0.0" } }, "buffer-alloc-unsafe": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/buffer-alloc-unsafe/-/buffer-alloc-unsafe-1.1.0.tgz", "integrity": "sha512-TEM2iMIEQdJ2yjPJoSIsldnleVaAk1oW3DBVUykyOLsEsFmEc9kn+SFFPz+gl54KQNxlDnAwCXosOS9Okx2xAg==" }, "buffer-crc32": { "version": "0.2.13", "resolved": "https://registry.npmjs.org/buffer-crc32/-/buffer-crc32-0.2.13.tgz", "integrity": "sha512-VO9Ht/+p3SN7SKWqcrgEzjGbRSJYTx+Q1pTQC0wrWqHx0vpJraQ6GtHx8tvcg1rlK1byhU5gccxgOgj7B0TDkQ==" }, "buffer-fill": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/buffer-fill/-/buffer-fill-1.0.0.tgz", "integrity": "sha512-T7zexNBwiiaCOGDg9xNX9PBmjrubblRkENuptryuI64URkXDFum9il/JGL8Lm8wYfAXpredVXXZz7eMHilimiQ==" }, "cacheable-request": { "version": "2.1.4", "resolved": "https://registry.npmjs.org/cacheable-request/-/cacheable-request-2.1.4.tgz", "integrity": "sha512-vag0O2LKZ/najSoUwDbVlnlCFvhBE/7mGTY2B5FgCBDcRD+oVV1HYTOwM6JZfMg/hIcM6IwnTZ1uQQL5/X3xIQ==", "requires": { "clone-response": "1.0.2", "get-stream": "3.0.0", "http-cache-semantics": "3.8.1", "keyv": "3.0.0", "lowercase-keys": "1.0.0", "normalize-url": "2.0.1", "responselike": "1.0.2" }, "dependencies": { "get-stream": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==" }, "lowercase-keys": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.0.tgz", "integrity": "sha512-RPlX0+PHuvxVDZ7xX+EBVAp4RsVxP/TdDSN2mJYdiq1Lc4Hz7EUSjUI7RZrKKlmrIzVhf6Jo2stj7++gVarS0A==" } } }, "case-anything": { "version": "2.1.13", "resolved": "https://registry.npmjs.org/case-anything/-/case-anything-2.1.13.tgz", "integrity": "sha512-zlOQ80VrQ2Ue+ymH5OuM/DlDq64mEm+B9UTdHULv5osUMD6HalNTblf2b1u/m6QecjsnOkBpqVZ+XPwIVsy7Ng==" }, "chownr": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==" }, "clone-response": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/clone-response/-/clone-response-1.0.2.tgz", "integrity": "sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q==", "requires": { "mimic-response": "^1.0.0" } }, "color-support": { "version": "1.1.3", "resolved": "https://registry.npmjs.org/color-support/-/color-support-1.1.3.tgz", "integrity": "sha512-qiBjkpbMLO/HL68y+lh4q0/O1MZFj2RX6X/KmMa3+gJD3z+WwI1ZzDHysvqHGS3mP6mznPckpXmw1nI9cJjyRg==" }, "commander": { "version": "2.20.3", "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" }, "concat-map": { "version": "0.0.1", "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" }, "console-control-strings": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/console-control-strings/-/console-control-strings-1.1.0.tgz", "integrity": "sha512-ty/fTekppD2fIwRvnZAVdeOiGd1c7YXEixbgJTNzqcxJWKQnjJ/V1bNEEE6hygpM3WjwHFUVK6HTjWSzV4a8sQ==" }, "content-disposition": { "version": "0.5.4", "resolved": "https://registry.npmjs.org/content-disposition/-/content-disposition-0.5.4.tgz", "integrity": "sha512-FveZTNuGw04cxlAiWbzi6zTAL/lhehaWbTtgluJh4/E95DqMwTmha3KZN1aAWA8cFIhHzMZUvLevkw5Rqk+tSQ==", "requires": { "safe-buffer": "5.2.1" } }, "core-util-is": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz", "integrity": "sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ==" }, "debug": { "version": "4.4.0", "resolved": "https://registry.npmjs.org/debug/-/debug-4.4.0.tgz", "integrity": "sha512-6WTZ/IxCY/T6BALoZHaE4ctp9xm+Z5kY/pzYaCHRFeyVhojxlrm+46y68HA6hr0TcwEssoxNiDEUJQjfPZ/RYA==", "requires": { "ms": "^2.1.3" } }, "decode-uri-component": { "version": "0.2.2", "resolved": "https://registry.npmjs.org/decode-uri-component/-/decode-uri-component-0.2.2.tgz", "integrity": "sha512-FqUYQ+8o158GyGTrMFJms9qh3CqTKvAqgqsTnkLI8sKu0028orqBhxNMFkFen0zGyg6epACD32pjVk58ngIErQ==" }, "decompress": { "version": "4.2.1", "resolved": "https://registry.npmjs.org/decompress/-/decompress-4.2.1.tgz", "integrity": "sha512-e48kc2IjU+2Zw8cTb6VZcJQ3lgVbS4uuB1TfCHbiZIP/haNXm+SVyhu+87jts5/3ROpd82GSVCoNs/z8l4ZOaQ==", "requires": { "decompress-tar": "^4.0.0", "decompress-tarbz2": "^4.0.0", "decompress-targz": "^4.0.0", "decompress-unzip": "^4.0.1", "graceful-fs": "^4.1.10", "make-dir": "^1.0.0", "pify": "^2.3.0", "strip-dirs": "^2.0.0" }, "dependencies": { "make-dir": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-1.3.0.tgz", "integrity": "sha512-2w31R7SJtieJJnQtGc7RVL2StM2vGYVfqUOvUDxH6bC6aJTxPxTF0GnIgCyu7tjockiUWAYQRbxa7vKn34s5sQ==", "requires": { "pify": "^3.0.0" }, "dependencies": { "pify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" } } }, "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" } } }, "decompress-response": { "version": "3.3.0", "resolved": "https://registry.npmjs.org/decompress-response/-/decompress-response-3.3.0.tgz", "integrity": "sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA==", "requires": { "mimic-response": "^1.0.0" } }, "decompress-tar": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/decompress-tar/-/decompress-tar-4.1.1.tgz", "integrity": "sha512-JdJMaCrGpB5fESVyxwpCx4Jdj2AagLmv3y58Qy4GE6HMVjWz1FeVQk1Ct4Kye7PftcdOo/7U7UKzYBJgqnGeUQ==", "requires": { "file-type": "^5.2.0", "is-stream": "^1.1.0", "tar-stream": "^1.5.2" }, "dependencies": { "file-type": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==" } } }, "decompress-tarbz2": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz", "integrity": "sha512-s88xLzf1r81ICXLAVQVzaN6ZmX4A6U4z2nMbOwobxkLoIIfjVMBg7TeguTUXkKeXni795B6y5rnvDw7rxhAq9A==", "requires": { "decompress-tar": "^4.1.0", "file-type": "^6.1.0", "is-stream": "^1.1.0", "seek-bzip": "^1.0.5", "unbzip2-stream": "^1.0.9" }, "dependencies": { "file-type": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-6.2.0.tgz", "integrity": "sha512-YPcTBDV+2Tm0VqjybVd32MHdlEGAtuxS3VAYsumFokDSMG+ROT5wawGlnHDoz7bfMcMDt9hxuXvXwoKUx2fkOg==" } } }, "decompress-targz": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/decompress-targz/-/decompress-targz-4.1.1.tgz", "integrity": "sha512-4z81Znfr6chWnRDNfFNqLwPvm4db3WuZkqV+UgXQzSngG3CEKdBkw5jrv3axjjL96glyiiKjsxJG3X6WBZwX3w==", "requires": { "decompress-tar": "^4.1.1", "file-type": "^5.2.0", "is-stream": "^1.1.0" }, "dependencies": { "file-type": { "version": "5.2.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-5.2.0.tgz", "integrity": "sha512-Iq1nJ6D2+yIO4c8HHg4fyVb8mAJieo1Oloy1mLLaB2PvezNedhBVm+QU7g0qM42aiMbRXTxKKwGD17rjKNJYVQ==" } } }, "decompress-unzip": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/decompress-unzip/-/decompress-unzip-4.0.1.tgz", "integrity": "sha512-1fqeluvxgnn86MOh66u8FjbtJpAFv5wgCT9Iw8rcBqQcCo5tO8eiJw7NNTrvt9n4CRBVq7CstiS922oPgyGLrw==", "requires": { "file-type": "^3.8.0", "get-stream": "^2.2.0", "pify": "^2.3.0", "yauzl": "^2.4.2" }, "dependencies": { "file-type": { "version": "3.9.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-3.9.0.tgz", "integrity": "sha512-RLoqTXE8/vPmMuTI88DAzhMYC99I8BWv7zYP4A1puo5HIjEJ5EX48ighy4ZyKMG9EDXxBgW6e++cn7d1xuFghA==" }, "get-stream": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-2.3.1.tgz", "integrity": "sha512-AUGhbbemXxrZJRD5cDvKtQxLuYaIbNtDTK8YqupCI393Q2KSTreEsLUN3ZxAWFGiKTzL6nKuzfcIvieflUX9qA==", "requires": { "object-assign": "^4.0.1", "pinkie-promise": "^2.0.0" } }, "pify": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", "integrity": "sha512-udgsAY+fTnvv7kI7aaxbqwWNb0AHiB0qBO89PZKPkoTmGOgdbrHDKD+0B2X4uTfJ/FT1R09r9gTsjUjNJotuog==" } } }, "delegates": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/delegates/-/delegates-1.0.0.tgz", "integrity": "sha512-bd2L678uiWATM6m5Z1VzNCErI3jiGzt6HGY8OVICs40JQq/HALfbyNJmp0UDakEY4pMMaN0Ly5om/B1VI/+xfQ==" }, "detect-libc": { "version": "2.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.3.tgz", "integrity": "sha512-bwy0MGW55bG41VqxxypOsdSdGqLwXPI/focwgTYCFMbdUiBAxLg9CFzG08sz2aqzknwiX7Hkl0bQENjg8iLByw==" }, "download": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/download/-/download-8.0.0.tgz", "integrity": "sha512-ASRY5QhDk7FK+XrQtQyvhpDKanLluEEQtWl/J7Lxuf/b+i8RYh997QeXvL85xitrmRKVlx9c7eTrcRdq2GS4eA==", "requires": { "archive-type": "^4.0.0", "content-disposition": "^0.5.2", "decompress": "^4.2.1", "ext-name": "^5.0.0", "file-type": "^11.1.0", "filenamify": "^3.0.0", "get-stream": "^4.1.0", "got": "^8.3.1", "make-dir": "^2.1.0", "p-event": "^2.1.0", "pify": "^4.0.1" }, "dependencies": { "make-dir": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", "requires": { "pify": "^4.0.1", "semver": "^5.6.0" } }, "semver": { "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" } } }, "dprint-node": { "version": "1.0.8", "resolved": "https://registry.npmjs.org/dprint-node/-/dprint-node-1.0.8.tgz", "integrity": "sha512-iVKnUtYfGrYcW1ZAlfR/F59cUVL8QIhWoBJoSjkkdua/dkWIgjZfiLMeTjiB06X0ZLkQ0M2C1VbUj/CxkIf1zg==", "requires": { "detect-libc": "^1.0.3" }, "dependencies": { "detect-libc": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-1.0.3.tgz", "integrity": "sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==" } } }, "duplexer3": { "version": "0.1.5", "resolved": "https://registry.npmjs.org/duplexer3/-/duplexer3-0.1.5.tgz", "integrity": "sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA==" }, "emoji-regex": { "version": "8.0.0", "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==" }, "end-of-stream": { "version": "1.4.4", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.4.tgz", "integrity": "sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q==", "requires": { "once": "^1.4.0" } }, "escape-string-regexp": { "version": "1.0.5", "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==" }, "ext-list": { "version": "2.2.2", "resolved": "https://registry.npmjs.org/ext-list/-/ext-list-2.2.2.tgz", "integrity": "sha512-u+SQgsubraE6zItfVA0tBuCBhfU9ogSRnsvygI7wht9TS510oLkBRXBsqopeUG/GBOIQyKZO9wjTqIu/sf5zFA==", "requires": { "mime-db": "^1.28.0" } }, "ext-name": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/ext-name/-/ext-name-5.0.0.tgz", "integrity": "sha512-yblEwXAbGv1VQDmow7s38W77hzAgJAO50ztBLMcUyUBfxv1HC+LGwtiEN+Co6LtlqT/5uwVOxsD4TNIilWhwdQ==", "requires": { "ext-list": "^2.0.0", "sort-keys-length": "^1.0.0" } }, "fd-slicer": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/fd-slicer/-/fd-slicer-1.1.0.tgz", "integrity": "sha512-cE1qsB/VwyQozZ+q1dGxR8LBYNZeofhEdUNGSMbQD3Gw2lAzX9Zb3uIU6Ebc/Fmyjo9AWWfnn0AUCHqtevs/8g==", "requires": { "pend": "~1.2.0" } }, "file-type": { "version": "11.1.0", "resolved": "https://registry.npmjs.org/file-type/-/file-type-11.1.0.tgz", "integrity": "sha512-rM0UO7Qm9K7TWTtA6AShI/t7H5BPjDeGVDaNyg9BjHAj3PysKy7+8C8D137R88jnR3rFJZQB/tFgydl5sN5m7g==" }, "filename-reserved-regex": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/filename-reserved-regex/-/filename-reserved-regex-2.0.0.tgz", "integrity": "sha512-lc1bnsSr4L4Bdif8Xb/qrtokGbq5zlsms/CYH8PP+WtCkGNF65DPiQY8vG3SakEdRn8Dlnm+gW/qWKKjS5sZzQ==" }, "filenamify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/filenamify/-/filenamify-3.0.0.tgz", "integrity": "sha512-5EFZ//MsvJgXjBAFJ+Bh2YaCTRF/VP1YOmGrgt+KJ4SFRLjI87EIdwLLuT6wQX0I4F9W41xutobzczjsOKlI/g==", "requires": { "filename-reserved-regex": "^2.0.0", "strip-outer": "^1.0.0", "trim-repeated": "^1.0.0" } }, "from2": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/from2/-/from2-2.3.0.tgz", "integrity": "sha512-OMcX/4IC/uqEPVgGeyfN22LJk6AZrMkRZHxcHBMBvHScDGgwTm2GT2Wkgtocyd3JfZffjj2kYUDXXII0Fk9W0g==", "requires": { "inherits": "^2.0.1", "readable-stream": "^2.0.0" }, "dependencies": { "readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } } } }, "fs-constants": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs-constants/-/fs-constants-1.0.0.tgz", "integrity": "sha512-y6OAwoSIf7FyjMIv94u+b5rdheZEjzR63GTyZJm5qh4Bi+2YgwLCcI/fPFZkL5PSixOt6ZNKm+w+Hfp/Bciwow==" }, "fs-extra": { "version": "11.2.0", "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.2.0.tgz", "integrity": "sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==", "requires": { "graceful-fs": "^4.2.0", "jsonfile": "^6.0.1", "universalify": "^2.0.0" } }, "fs-minipass": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", "requires": { "minipass": "^3.0.0" }, "dependencies": { "minipass": { "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "requires": { "yallist": "^4.0.0" } } } }, "fs.realpath": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" }, "gauge": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/gauge/-/gauge-3.0.2.tgz", "integrity": "sha512-+5J6MS/5XksCuXq++uFRsnUd7Ovu1XenbeuIuNRJxYWjgQbPuFhT14lAvsWfqfAmnwluf1OwMjz39HjfLPci0Q==", "requires": { "aproba": "^1.0.3 || ^2.0.0", "color-support": "^1.1.2", "console-control-strings": "^1.0.0", "has-unicode": "^2.0.1", "object-assign": "^4.1.1", "signal-exit": "^3.0.0", "string-width": "^4.2.3", "strip-ansi": "^6.0.1", "wide-align": "^1.1.2" } }, "get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", "integrity": "sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w==", "requires": { "pump": "^3.0.0" } }, "glob": { "version": "7.2.3", "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", "requires": { "fs.realpath": "^1.0.0", "inflight": "^1.0.4", "inherits": "2", "minimatch": "^3.1.1", "once": "^1.3.0", "path-is-absolute": "^1.0.0" } }, "google-protobuf": { "version": "3.15.8", "resolved": "https://registry.npmjs.org/google-protobuf/-/google-protobuf-3.15.8.tgz", "integrity": "sha512-2jtfdqTaSxk0cuBJBtTTWsot4WtR9RVr2rXg7x7OoqiuOKopPrwXpM1G4dXIkLcUNRh3RKzz76C8IOkksZSeOw==" }, "got": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/got/-/got-8.3.2.tgz", "integrity": "sha512-qjUJ5U/hawxosMryILofZCkm3C84PLJS/0grRIpjAwu+Lkxxj5cxeCU25BG0/3mDSpXKTyZr8oh8wIgLaH0QCw==", "requires": { "@sindresorhus/is": "^0.7.0", "cacheable-request": "^2.1.1", "decompress-response": "^3.3.0", "duplexer3": "^0.1.4", "get-stream": "^3.0.0", "into-stream": "^3.1.0", "is-retry-allowed": "^1.1.0", "isurl": "^1.0.0-alpha5", "lowercase-keys": "^1.0.0", "mimic-response": "^1.0.0", "p-cancelable": "^0.4.0", "p-timeout": "^2.0.1", "pify": "^3.0.0", "safe-buffer": "^5.1.1", "timed-out": "^4.0.1", "url-parse-lax": "^3.0.0", "url-to-options": "^1.0.1" }, "dependencies": { "get-stream": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-3.0.0.tgz", "integrity": "sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ==" }, "pify": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/pify/-/pify-3.0.0.tgz", "integrity": "sha512-C3FsVNH1udSEX48gGX1xfvwTWfsYWj5U+8/uK15BGzIGrKoUpghX8hWZwa/OFnakBiiVNmBvemTJR5mcy7iPcg==" } } }, "graceful-fs": { "version": "4.2.11", "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" }, "grpc_tools_node_protoc_ts": { "version": "5.3.3", "resolved": "https://registry.npmjs.org/grpc_tools_node_protoc_ts/-/grpc_tools_node_protoc_ts-5.3.3.tgz", "integrity": "sha512-M/YrklvVXMtuuj9kb42PxeouZhs7Ul+R4e/31XwrankUcKL8cQQP50Q9q+KEHGyHQaPt6VtKKsxMgLaKbCxeww==", "requires": { "google-protobuf": "3.15.8", "handlebars": "4.7.7" } }, "grpc-tools": { "version": "1.13.0", "resolved": "https://registry.npmjs.org/grpc-tools/-/grpc-tools-1.13.0.tgz", "integrity": "sha512-7CbkJ1yWPfX0nHjbYG58BQThNhbICXBZynzCUxCb3LzX5X9B3hQbRY2STiRgIEiLILlK9fgl0z0QVGwPCdXf5g==", "requires": { "@mapbox/node-pre-gyp": "^1.0.5" } }, "handlebars": { "version": "4.7.7", "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz", "integrity": "sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==", "requires": { "minimist": "^1.2.5", "neo-async": "^2.6.0", "source-map": "^0.6.1", "uglify-js": "^3.1.4", "wordwrap": "^1.0.0" } }, "has-symbol-support-x": { "version": "1.4.2", "resolved": "https://registry.npmjs.org/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz", "integrity": "sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw==" }, "has-to-string-tag-x": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz", "integrity": "sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw==", "requires": { "has-symbol-support-x": "^1.4.1" } }, "has-unicode": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/has-unicode/-/has-unicode-2.0.1.tgz", "integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==" }, "http-cache-semantics": { "version": "3.8.1", "resolved": "https://registry.npmjs.org/http-cache-semantics/-/http-cache-semantics-3.8.1.tgz", "integrity": "sha512-5ai2iksyV8ZXmnZhHH4rWPoxxistEexSi5936zIQ1bnNTW5VnA85B6P/VpXiRM017IgRvb2kKo1a//y+0wSp3w==" }, "https-proxy-agent": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz", "integrity": "sha512-dFcAjpTQFgoLMzC2VwU+C/CbS7uRL0lWmxDITmqm7C+7F0Odmj6s9l6alZc6AELXhrnggM2CeWSXHGOdX2YtwA==", "requires": { "agent-base": "6", "debug": "4" } }, "ieee754": { "version": "1.2.1", "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==" }, "inflight": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", "requires": { "once": "^1.3.0", "wrappy": "1" } }, "inherits": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" }, "into-stream": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/into-stream/-/into-stream-3.1.0.tgz", "integrity": "sha512-TcdjPibTksa1NQximqep2r17ISRiNE9fwlfbg3F8ANdvP5/yrFTew86VcO//jk4QTaMlbjypPBq76HN2zaKfZQ==", "requires": { "from2": "^2.1.1", "p-is-promise": "^1.1.0" } }, "is-fullwidth-code-point": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==" }, "is-natural-number": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/is-natural-number/-/is-natural-number-4.0.1.tgz", "integrity": "sha512-Y4LTamMe0DDQIIAlaer9eKebAlDSV6huy+TWhJVPlzZh2o4tRP5SQWFlLn5N0To4mDD22/qdOq+veo1cSISLgQ==" }, "is-object": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/is-object/-/is-object-1.0.2.tgz", "integrity": "sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA==" }, "is-plain-obj": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-1.1.0.tgz", "integrity": "sha512-yvkRyxmFKEOQ4pNXCmJG5AEQNlXJS5LaONXo5/cLdTZdWvsZ1ioJEonLGAosKlMWE8lwUy/bJzMjcw8az73+Fg==" }, "is-retry-allowed": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz", "integrity": "sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg==" }, "is-stream": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-1.1.0.tgz", "integrity": "sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ==" }, "isarray": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz", "integrity": "sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==" }, "isurl": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/isurl/-/isurl-1.0.0.tgz", "integrity": "sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w==", "requires": { "has-to-string-tag-x": "^1.2.0", "is-object": "^1.0.1" } }, "json-buffer": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.0.tgz", "integrity": "sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ==" }, "jsonfile": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/jsonfile/-/jsonfile-6.1.0.tgz", "integrity": "sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==", "requires": { "graceful-fs": "^4.1.6", "universalify": "^2.0.0" } }, "keyv": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/keyv/-/keyv-3.0.0.tgz", "integrity": "sha512-eguHnq22OE3uVoSYG0LVWNP+4ppamWr9+zWBe1bsNcovIMy6huUJFPgy4mGwCd/rnl3vOLGW1MTlu4c57CT1xA==", "requires": { "json-buffer": "3.0.0" } }, "lowercase-keys": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/lowercase-keys/-/lowercase-keys-1.0.1.tgz", "integrity": "sha512-G2Lj61tXDnVFFOi8VZds+SoQjtQC3dgokKdDG2mTm1tx4m50NUHBOZSBwQQHyy0V12A0JTG4icfZQH+xPyh8VA==" }, "make-dir": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-3.1.0.tgz", "integrity": "sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw==", "requires": { "semver": "^6.0.0" }, "dependencies": { "semver": { "version": "6.3.1", "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==" } } }, "mime-db": { "version": "1.53.0", "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.53.0.tgz", "integrity": "sha512-oHlN/w+3MQ3rba9rqFr6V/ypF10LSkdwUysQL7GkXoTgIWeV+tcXGA852TBxH+gsh8UWoyhR1hKcoMJTuWflpg==" }, "mimic-response": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/mimic-response/-/mimic-response-1.0.1.tgz", "integrity": "sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ==" }, "minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", "requires": { "brace-expansion": "^1.1.7" } }, "minimist": { "version": "1.2.8", "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==" }, "minipass": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==" }, "minizlib": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-2.1.2.tgz", "integrity": "sha512-bAxsR8BVfj60DWXHE3u30oHzfl4G7khkSuPW+qvpd7jFRHm7dLxOjUk1EHACJ/hxLY8phGJ0YhYHZo7jil7Qdg==", "requires": { "minipass": "^3.0.0", "yallist": "^4.0.0" }, "dependencies": { "minipass": { "version": "3.3.6", "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", "requires": { "yallist": "^4.0.0" } } } }, "mkdirp": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==" }, "ms": { "version": "2.1.3", "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" }, "neo-async": { "version": "2.6.2", "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==" }, "node-fetch": { "version": "2.7.0", "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.7.0.tgz", "integrity": "sha512-c4FRfUm/dbcWZ7U+1Wq0AwCyFL+3nt2bEw05wfxSz+DWpWsitgmSgYmy2dQdWyKC1694ELPqMs/YzUSNozLt8A==", "requires": { "whatwg-url": "^5.0.0" } }, "nopt": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/nopt/-/nopt-5.0.0.tgz", "integrity": "sha512-Tbj67rffqceeLpcRXrT7vKAN8CwfPeIBgM7E6iBkmKLV7bEMwpGgYLGv0jACUsECaa/vuxP0IjEont6umdMgtQ==", "requires": { "abbrev": "1" } }, "normalize-url": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/normalize-url/-/normalize-url-2.0.1.tgz", "integrity": "sha512-D6MUW4K/VzoJ4rJ01JFKxDrtY1v9wrgzCX5f2qj/lzH1m/lW6MhUZFKerVsnyjOhOsYzI9Kqqak+10l4LvLpMw==", "requires": { "prepend-http": "^2.0.0", "query-string": "^5.0.1", "sort-keys": "^2.0.0" } }, "npmlog": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/npmlog/-/npmlog-5.0.1.tgz", "integrity": "sha512-AqZtDUWOMKs1G/8lwylVjrdYgqA4d9nu8hc+0gzRxlDb1I10+FHBGMXs6aiQHFdCUUlqH99MUMuLfzWDNDtfxw==", "requires": { "are-we-there-yet": "^2.0.0", "console-control-strings": "^1.1.0", "gauge": "^3.0.0", "set-blocking": "^2.0.0" } }, "object-assign": { "version": "4.1.1", "resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz", "integrity": "sha512-rJgTQnkUnH1sFw8yT6VSU3zD3sWmu6sZhIseY8VX+GRu3P6F7Fu+JNDoXfklElbLJSnc3FUQHVe4cU5hj+BcUg==" }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", "requires": { "wrappy": "1" } }, "p-cancelable": { "version": "0.4.1", "resolved": "https://registry.npmjs.org/p-cancelable/-/p-cancelable-0.4.1.tgz", "integrity": "sha512-HNa1A8LvB1kie7cERyy21VNeHb2CWJJYqyyC2o3klWFfMGlFmWv2Z7sFgZH8ZiaYL95ydToKTFVXgMV/Os0bBQ==" }, "p-event": { "version": "2.3.1", "resolved": "https://registry.npmjs.org/p-event/-/p-event-2.3.1.tgz", "integrity": "sha512-NQCqOFhbpVTMX4qMe8PF8lbGtzZ+LCiN7pcNrb/413Na7+TRoe1xkKUzuWa/YEJdGQ0FvKtj35EEbDoVPO2kbA==", "requires": { "p-timeout": "^2.0.1" } }, "p-finally": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-finally/-/p-finally-1.0.0.tgz", "integrity": "sha512-LICb2p9CB7FS+0eR1oqWnHhp0FljGLZCWBE9aix0Uye9W8LTQPwMTYVGWQWIw9RdQiDg4+epXQODwIYJtSJaow==" }, "p-is-promise": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/p-is-promise/-/p-is-promise-1.1.0.tgz", "integrity": "sha512-zL7VE4JVS2IFSkR2GQKDSPEVxkoH43/p7oEnwpdCndKYJO0HVeRB7fA8TJwuLOTBREtK0ea8eHaxdwcpob5dmg==" }, "p-timeout": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/p-timeout/-/p-timeout-2.0.1.tgz", "integrity": "sha512-88em58dDVB/KzPEx1X0N3LwFfYZPyDc4B6eF38M1rk9VTZMbxXXgjugz8mmwpS9Ox4BDZ+t6t3QP5+/gazweIA==", "requires": { "p-finally": "^1.0.0" } }, "path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" }, "pend": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/pend/-/pend-1.2.0.tgz", "integrity": "sha512-F3asv42UuXchdzt+xXqfW1OGlVBe+mxa2mqI0pg5yAHZPvFmY3Y6drSf/GQ1A86WgWEN9Kzh/WrgKa6iGcHXLg==" }, "pify": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/pify/-/pify-4.0.1.tgz", "integrity": "sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g==" }, "pinkie": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/pinkie/-/pinkie-2.0.4.tgz", "integrity": "sha512-MnUuEycAemtSaeFSjXKW/aroV7akBbY+Sv+RkyqFjgAe73F+MR0TBWKBRDkmfWq/HiFmdavfZ1G7h4SPZXaCSg==" }, "pinkie-promise": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/pinkie-promise/-/pinkie-promise-2.0.1.tgz", "integrity": "sha512-0Gni6D4UcLTbv9c57DfxDGdr41XfgUjqWZu492f0cIGr16zDU06BWP/RAEvOuo7CQ0CNjHaLlM59YJJFm3NWlw==", "requires": { "pinkie": "^2.0.0" } }, "prepend-http": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/prepend-http/-/prepend-http-2.0.0.tgz", "integrity": "sha512-ravE6m9Atw9Z/jjttRUZ+clIXogdghyZAuWJ3qEzjT+jI/dL1ifAqhZeC5VHzQp1MSt1+jxKkFNemj/iO7tVUA==" }, "process-nextick-args": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz", "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==" }, "protoc-gen-grpc-web": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/protoc-gen-grpc-web/-/protoc-gen-grpc-web-1.5.0.tgz", "integrity": "sha512-esVfqdxCZyLdoil7qbZVTpFuWyksgdJ2ETBpAi/oaRpl0wHOyCyhithGa/rPOPdNE4lKVOagjtybBwQaxtfcgQ==", "requires": { "download": "^8.0.0", "fs-extra": "^11.2.0" } }, "pump": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/pump/-/pump-3.0.2.tgz", "integrity": "sha512-tUPXtzlGM8FE3P0ZL6DVs/3P58k9nk8/jZeQCurTJylQA8qFYzHFfhBJkuqyE0FifOsQ0uKWekiZ5g8wtr28cw==", "requires": { "end-of-stream": "^1.1.0", "once": "^1.3.1" } }, "query-string": { "version": "5.1.1", "resolved": "https://registry.npmjs.org/query-string/-/query-string-5.1.1.tgz", "integrity": "sha512-gjWOsm2SoGlgLEdAGt7a6slVOk9mGiXmPFMqrEhLQ68rhQuBnpfs3+EmlvqKyxnCo9/PPlF+9MtY02S1aFg+Jw==", "requires": { "decode-uri-component": "^0.2.0", "object-assign": "^4.1.0", "strict-uri-encode": "^1.0.0" } }, "readable-stream": { "version": "3.6.2", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", "requires": { "inherits": "^2.0.3", "string_decoder": "^1.1.1", "util-deprecate": "^1.0.1" } }, "responselike": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/responselike/-/responselike-1.0.2.tgz", "integrity": "sha512-/Fpe5guzJk1gPqdJLJR5u7eG/gNY4nImjbRDaVWVMRhne55TCmj2i9Q+54PBRfatRC8v/rIiv9BN0pMd9OV5EQ==", "requires": { "lowercase-keys": "^1.0.0" } }, "rimraf": { "version": "3.0.2", "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", "requires": { "glob": "^7.1.3" } }, "safe-buffer": { "version": "5.2.1", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==" }, "seek-bzip": { "version": "1.0.6", "resolved": "https://registry.npmjs.org/seek-bzip/-/seek-bzip-1.0.6.tgz", "integrity": "sha512-e1QtP3YL5tWww8uKaOCQ18UxIT2laNBXHjV/S2WYCiK4udiv8lkG89KRIoCjUagnAmCBurjF4zEVX2ByBbnCjQ==", "requires": { "commander": "^2.8.1" } }, "semver": { "version": "7.6.3", "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.3.tgz", "integrity": "sha512-oVekP1cKtI+CTDvHWYFUcMtsK/00wmAEfyqKfNdARm8u1wNVhSgaX7A8d4UuIlUI5e84iEwOhs7ZPYRmzU9U6A==" }, "set-blocking": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", "integrity": "sha512-KiKBS8AnWGEyLzofFfmvKwpdPzqiy16LvQfK3yv/fVH7Bj13/wl3JSR1J+rfgRE9q7xUJK4qvgS8raSOeLUehw==" }, "signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, "sort-keys": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-2.0.0.tgz", "integrity": "sha512-/dPCrG1s3ePpWm6yBbxZq5Be1dXGLyLn9Z791chDC3NFrpkVbWGzkBwPN1knaciexFXgRJ7hzdnwZ4stHSDmjg==", "requires": { "is-plain-obj": "^1.0.0" } }, "sort-keys-length": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/sort-keys-length/-/sort-keys-length-1.0.1.tgz", "integrity": "sha512-GRbEOUqCxemTAk/b32F2xa8wDTs+Z1QHOkbhJDQTvv/6G3ZkbJ+frYWsTcc7cBB3Fu4wy4XlLCuNtJuMn7Gsvw==", "requires": { "sort-keys": "^1.0.0" }, "dependencies": { "sort-keys": { "version": "1.1.2", "resolved": "https://registry.npmjs.org/sort-keys/-/sort-keys-1.1.2.tgz", "integrity": "sha512-vzn8aSqKgytVik0iwdBEi+zevbTYZogewTUM6dtpmGwEcdzbub/TX4bCzRhebDCRC3QzXgJsLRKB2V/Oof7HXg==", "requires": { "is-plain-obj": "^1.0.0" } } } }, "source-map": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==" }, "strict-uri-encode": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz", "integrity": "sha512-R3f198pcvnB+5IpnBlRkphuE9n46WyVl8I39W/ZUTZLz4nqSP/oLYUrcnJrw462Ds8he4YKMov2efsTIw1BDGQ==" }, "string_decoder": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", "requires": { "safe-buffer": "~5.2.0" } }, "string-width": { "version": "4.2.3", "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", "requires": { "emoji-regex": "^8.0.0", "is-fullwidth-code-point": "^3.0.0", "strip-ansi": "^6.0.1" } }, "strip-ansi": { "version": "6.0.1", "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", "requires": { "ansi-regex": "^5.0.1" } }, "strip-dirs": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/strip-dirs/-/strip-dirs-2.1.0.tgz", "integrity": "sha512-JOCxOeKLm2CAS73y/U4ZeZPTkE+gNVCzKt7Eox84Iej1LT/2pTWYpZKJuxwQpvX1LiZb1xokNR7RLfuBAa7T3g==", "requires": { "is-natural-number": "^4.0.1" } }, "strip-outer": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/strip-outer/-/strip-outer-1.0.1.tgz", "integrity": "sha512-k55yxKHwaXnpYGsOzg4Vl8+tDrWylxDEpknGjhTiZB8dFRU5rTo9CAzeycivxV3s+zlTKwrs6WxMxR95n26kwg==", "requires": { "escape-string-regexp": "^1.0.2" } }, "tar": { "version": "6.2.1", "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", "requires": { "chownr": "^2.0.0", "fs-minipass": "^2.0.0", "minipass": "^5.0.0", "minizlib": "^2.1.1", "mkdirp": "^1.0.3", "yallist": "^4.0.0" } }, "tar-stream": { "version": "1.6.2", "resolved": "https://registry.npmjs.org/tar-stream/-/tar-stream-1.6.2.tgz", "integrity": "sha512-rzS0heiNf8Xn7/mpdSVVSMAWAoy9bfb1WOTYC78Z0UQKeKa/CWS8FOq0lKGNa8DWKAn9gxjCvMLYc5PGXYlK2A==", "requires": { "bl": "^1.0.0", "buffer-alloc": "^1.2.0", "end-of-stream": "^1.0.0", "fs-constants": "^1.0.0", "readable-stream": "^2.3.0", "to-buffer": "^1.1.1", "xtend": "^4.0.0" }, "dependencies": { "readable-stream": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz", "integrity": "sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA==", "requires": { "core-util-is": "~1.0.0", "inherits": "~2.0.3", "isarray": "~1.0.0", "process-nextick-args": "~2.0.0", "safe-buffer": "~5.1.1", "string_decoder": "~1.1.1", "util-deprecate": "~1.0.1" } }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" }, "string_decoder": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz", "integrity": "sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg==", "requires": { "safe-buffer": "~5.1.0" } } } }, "through": { "version": "2.3.8", "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", "integrity": "sha512-w89qg7PI8wAdvX60bMDP+bFoD5Dvhm9oLheFp5O4a2QF0cSBGsBX4qZmadPMvVqlLJBBci+WqGGOAPvcDeNSVg==" }, "timed-out": { "version": "4.0.1", "resolved": "https://registry.npmjs.org/timed-out/-/timed-out-4.0.1.tgz", "integrity": "sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA==" }, "to-buffer": { "version": "1.1.1", "resolved": "https://registry.npmjs.org/to-buffer/-/to-buffer-1.1.1.tgz", "integrity": "sha512-lx9B5iv7msuFYE3dytT+KE5tap+rNYw+K4jVkb9R/asAb+pbBSM17jtunHplhBe6RRJdZx3Pn2Jph24O32mOVg==" }, "tr46": { "version": "0.0.3", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==" }, "trim-repeated": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/trim-repeated/-/trim-repeated-1.0.0.tgz", "integrity": "sha512-pkonvlKk8/ZuR0D5tLW8ljt5I8kmxp2XKymhepUeOdCEfKpZaktSArkLHZt76OB1ZvO9bssUsDty4SWhLvZpLg==", "requires": { "escape-string-regexp": "^1.0.2" } }, "ts-poet": { "version": "6.9.0", "resolved": "https://registry.npmjs.org/ts-poet/-/ts-poet-6.9.0.tgz", "integrity": "sha512-roe6W6MeZmCjRmppyfOURklO5tQFQ6Sg7swURKkwYJvV7dbGCrK28um5+51iW3twdPRKtwarqFAVMU6G1mvnuQ==", "requires": { "dprint-node": "^1.0.8" } }, "ts-proto": { "version": "2.6.1", "resolved": "https://registry.npmjs.org/ts-proto/-/ts-proto-2.6.1.tgz", "integrity": "sha512-4LTT99MkwkF1+fIA0b2mZu/58Qlpq3Q1g53TwEMZZgR1w/uX00PoVT4Z8aKJxMw0LeKQD4s9NrJYsF27Clckrg==", "requires": { "@bufbuild/protobuf": "^2.0.0", "case-anything": "^2.1.13", "ts-poet": "^6.7.0", "ts-proto-descriptors": "2.0.0" } }, "ts-proto-descriptors": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ts-proto-descriptors/-/ts-proto-descriptors-2.0.0.tgz", "integrity": "sha512-wHcTH3xIv11jxgkX5OyCSFfw27agpInAd6yh89hKG6zqIXnjW9SYqSER2CVQxdPj4czeOhGagNvZBEbJPy7qkw==", "requires": { "@bufbuild/protobuf": "^2.0.0" } }, "uglify-js": { "version": "3.19.3", "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.19.3.tgz", "integrity": "sha512-v3Xu+yuwBXisp6QYTcH4UbH+xYJXqnq2m/LtQVWKWzYc1iehYnLixoQDN9FH6/j9/oybfd6W9Ghwkl8+UMKTKQ==", "optional": true }, "unbzip2-stream": { "version": "1.4.3", "resolved": "https://registry.npmjs.org/unbzip2-stream/-/unbzip2-stream-1.4.3.tgz", "integrity": "sha512-mlExGW4w71ebDJviH16lQLtZS32VKqsSfk80GCfUlwT/4/hNRFsoscrF/c++9xinkMzECL1uL9DDwXqFWkruPg==", "requires": { "buffer": "^5.2.1", "through": "^2.3.8" } }, "universalify": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/universalify/-/universalify-2.0.1.tgz", "integrity": "sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==" }, "url-parse-lax": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/url-parse-lax/-/url-parse-lax-3.0.0.tgz", "integrity": "sha512-NjFKA0DidqPa5ciFcSrXnAltTtzz84ogy+NebPvfEgAck0+TNg4UJ4IN+fB7zRZfbgUf0syOo9MDxFkDSMuFaQ==", "requires": { "prepend-http": "^2.0.0" } }, "url-to-options": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/url-to-options/-/url-to-options-1.0.1.tgz", "integrity": "sha512-0kQLIzG4fdk/G5NONku64rSH/x32NOA39LVQqlK8Le6lvTF6GGRJpqaQFGgU+CLwySIqBSMdwYM0sYcW9f6P4A==" }, "util-deprecate": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" }, "webidl-conversions": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz", "integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==" }, "whatwg-url": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz", "integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==", "requires": { "tr46": "~0.0.3", "webidl-conversions": "^3.0.0" } }, "wide-align": { "version": "1.1.5", "resolved": "https://registry.npmjs.org/wide-align/-/wide-align-1.1.5.tgz", "integrity": "sha512-eDMORYaPNZ4sQIuuYPDHdQvf4gyCF9rEEV/yPxGfwPkRodwEgiMUUXTx/dex+Me0wxx53S+NgUHaP7y3MGlDmg==", "requires": { "string-width": "^1.0.2 || 2 || 3 || 4" } }, "wordwrap": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz", "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q==" }, "wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" }, "xtend": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz", "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ==" }, "yallist": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "yauzl": { "version": "2.10.0", "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz", "integrity": "sha512-p4a9I6X6nu6IhoGmBqAcbJy1mlC4j27vEPZX9F4L4/vZT3Lyq1VkFHw/V/PUcB9Buo+DG3iHkT0x3Qya58zc3g==", "requires": { "buffer-crc32": "~0.2.3", "fd-slicer": "~1.1.0" } } } } protobuf-specs-0.4.1/protoc-builder/hack/package.json000066400000000000000000000005051477352757300226410ustar00rootroot00000000000000{ "name": "hack", "description": "hack to leverage dependabot updates for protobuf typescript generation utilities", "version": "0.0.1", "dependencies": { "grpc_tools_node_protoc_ts": "5.3.3", "grpc-tools": "1.13.0", "protoc-gen-grpc-web": "1.5.0", "ts-proto": "2.6.1" } } protobuf-specs-0.4.1/protoc-builder/versions.mk000066400000000000000000000027521477352757300216540ustar00rootroot00000000000000# The default values for protoc version and googleapis commit will be used in the build *unless* overriden. # # If desired to override a language-specific protoc or googleapis import, # set a variable with the language name prefix followed by an underscore. # for example: # #GO_PROTOC_VERSION=v29.3 #GO_PROTOC_CHECKSUM=sha256:3e866620c5be27664f3d2fa2d656b5f3e09b5152b42f1bedbf427b333e90021a #GO_GOOGLEAPIS_COMMIT=fc2697ec5327db9073b4e0aa140248f19b15d7ef # release tag from https://github.com/protocolbuffers/protobuf DEFAULT_PROTOC_VERSION=v29.4 # sha256 of release zip file: sha256sum protoc-${DEFAULT_PROTOC_VERSION#v}-linux-x86_64.zip | awk '{print "sha256:" $1 }' DEFAULT_PROTOC_CHECKSUM=sha256:3074ee900792fa7bf06ab56bd14af83048016cac08abf2a2ae34aaa571a9c110 # git commit from https://github.com/googleapis/googleapis DEFAULT_GOOGLEAPIS_COMMIT=2f37e0ad56637325b24f8603284ccb6f05796f9a ################################################################################## ### DO NOT EDIT BELOW THIS LINE, AS THESE VALUES ARE USED IN THE CORE MAKEFILE ### ################################################################################## LANGUAGES := GO JSONSCHEMA PYTHON RUBY RUST TYPESCRIPT COMPONENTS := PROTOC_VERSION PROTOC_CHECKSUM GOOGLEAPIS_COMMIT # This is creating each possible variable permutation, e.g. # GO_PROTOC_VERSION, JSONSCHEMA_PROTOC_VERSION, etc $(foreach lang,$(LANGUAGES),\ $(foreach component,$(COMPONENTS),\ $(eval $(lang)_$(component) ?= $$(DEFAULT_$(component))))) protobuf-specs-0.4.1/protos/000077500000000000000000000000001477352757300160415ustar00rootroot00000000000000protobuf-specs-0.4.1/protos/envelope.proto000066400000000000000000000034151477352757300207460ustar00rootroot00000000000000// https://raw.githubusercontent.com/secure-systems-lab/dsse/9c813476bd36de70a5738c72e784f123ecea16af/envelope.proto // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package io.intoto; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/dsse"; option ruby_package = "Sigstore::DSSE"; // An authenticated message of arbitrary type. message Envelope { // Message to be signed. (In JSON, this is encoded as base64.) // REQUIRED. bytes payload = 1; // String unambiguously identifying how to interpret payload. // REQUIRED. string payloadType = 2; // Signature over: // PAE(type, payload) // Where PAE is defined as: // PAE(type, payload) = "DSSEv1" + SP + LEN(type) + SP + type + SP + LEN(payload) + SP + payload // + = concatenation // SP = ASCII space [0x20] // "DSSEv1" = ASCII [0x44, 0x53, 0x53, 0x45, 0x76, 0x31] // LEN(s) = ASCII decimal encoding of the byte length of s, with no leading zeros // REQUIRED (length >= 1). repeated Signature signatures = 3; } message Signature { // Signature itself. (In JSON, this is encoded as base64.) // REQUIRED. bytes sig = 1; // *Unauthenticated* hint identifying which public key was used. // OPTIONAL. string keyid = 2; } protobuf-specs-0.4.1/protos/events.proto000066400000000000000000000042401477352757300204320ustar00rootroot00000000000000// https://github.com/cloudevents/spec/blob/v1.0.2/cloudevents/formats/cloudevents.proto // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * CloudEvent Protobuf Format * * - Required context attributes are explicity represented. * - Optional and Extension context attributes are carried in a map structure. * - Data may be represented as binary, text, or protobuf messages. */ syntax = "proto3"; package dev.sigstore.events.v1; import "google/protobuf/any.proto"; import "google/protobuf/timestamp.proto"; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/events/v1"; option java_package = "dev.sigstore.proto.events.v1"; option java_multiple_files = true; option ruby_package = "Sigstore::Events"; message CloudEvent { // -- CloudEvent Context Attributes // Required Attributes string id = 1; string source = 2; // URI-reference string spec_version = 3; string type = 4; // Optional & Extension Attributes map attributes = 5; // -- CloudEvent Data (Bytes, Text, or Proto) oneof data { bytes binary_data = 6; string text_data = 7; google.protobuf.Any proto_data = 8; } /** * The CloudEvent specification defines * seven attribute value types... */ message CloudEventAttributeValue { oneof attr { bool ce_boolean = 1; int32 ce_integer = 2; string ce_string = 3; bytes ce_bytes = 4; string ce_uri = 5; string ce_uri_ref = 6; google.protobuf.Timestamp ce_timestamp = 7; } } } /** * CloudEvent Protobuf Batch Format * */ message CloudEventBatch { repeated CloudEvent events = 1; } protobuf-specs-0.4.1/protos/sigstore_bundle.proto000066400000000000000000000200031477352757300223110ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package dev.sigstore.bundle.v1; import "google/api/field_behavior.proto"; // https://raw.githubusercontent.com/secure-systems-lab/dsse/9c813476bd36de70a5738c72e784f123ecea16af/envelope.proto import "envelope.proto"; import "sigstore_common.proto"; import "sigstore_rekor.proto"; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/bundle/v1"; option java_package = "dev.sigstore.proto.bundle.v1"; option java_multiple_files = true; option java_outer_classname = "BundleProto"; option ruby_package = "Sigstore::Bundle::V1"; // Notes on versioning. // The primary message ('Bundle') MUST be versioned, by populating the // 'media_type' field. Semver-ish (only major/minor versions) scheme MUST // be used. The current version as specified by this file is: // application/vnd.dev.sigstore.bundle.v0.3+json // The semantic version is thus '0.3'. // Various timestamped counter signatures over the artifacts signature. // Currently only RFC3161 signatures are provided. More formats may be added // in the future. message TimestampVerificationData { // A list of RFC3161 signed timestamps provided by the user. // This can be used when the entry has not been stored on a // transparency log, or in conjunction for a stronger trust model. // Clients MUST verify the hashed message in the message imprint // against the signature in the bundle. repeated dev.sigstore.common.v1.RFC3161SignedTimestamp rfc3161_timestamps = 1; } // VerificationMaterial captures details on the materials used to verify // signatures. This message may be embedded in a DSSE envelope as a signature // extension. Specifically, the `ext` field of the extension will expect this // message when the signature extension is for Sigstore. This is identified by // the `kind` field in the extension, which must be set to // application/vnd.dev.sigstore.verificationmaterial;version=0.1 for Sigstore. // When used as a DSSE extension, if the `public_key` field is used to indicate // the key identifier, it MUST match the `keyid` field of the signature the // extension is attached to. message VerificationMaterial { // The key material for verification purposes. // // This allows key material to be conveyed in one of three forms: // // 1. An unspecified public key identifier, for retrieving a key // from an out-of-band mechanism (such as a keyring); // // 2. A sequence of one or more X.509 certificates, of which the first member // MUST be a leaf certificate conveying the signing key. Subsequent members // SHOULD be in issuing order, meaning that `n + 1` should be an issuer for `n`. // // Signers MUST NOT include root CA certificates in bundles, and SHOULD NOT // include intermediate CA certificates that appear in an independent root of trust // (such as the Public Good Instance's trusted root). // // Verifiers MUST validate the chain carefully to ensure that it chains up // to a CA certificate that they independently trust. Verifiers SHOULD // handle old or non-complying bundles that have superfluous intermediate and/or // root CA certificates by either ignoring them or explicitly considering them // untrusted for the purposes of chain building. // // 3. A single X.509 certificate, which MUST be a leaf certificate conveying // the signing key. // // When used with the Public Good Instance (PGI) of Sigstore for "keyless" signing // via Fulcio, form (1) MUST NOT be used, regardless of bundle version. Form (1) // MAY be used with the PGI for self-managed keys. // // When used in a `0.1` or `0.2` bundle with the PGI and "keyless" signing, // form (2) MUST be used. // // When used in a `0.3` bundle with the PGI and "keyless" signing, // form (3) MUST be used. oneof content { dev.sigstore.common.v1.PublicKeyIdentifier public_key = 1 [(google.api.field_behavior) = REQUIRED]; dev.sigstore.common.v1.X509CertificateChain x509_certificate_chain = 2 [(google.api.field_behavior) = REQUIRED]; dev.sigstore.common.v1.X509Certificate certificate = 5 [(google.api.field_behavior) = REQUIRED]; } // An inclusion proof and an optional signed timestamp from the log. // Client verification libraries MAY provide an option to support v0.1 // bundles for backwards compatibility, which may contain an inclusion // promise and not an inclusion proof. In this case, the client MUST // validate the promise. // Verifiers SHOULD NOT allow v0.1 bundles if they're used in an // ecosystem which never produced them. repeated dev.sigstore.rekor.v1.TransparencyLogEntry tlog_entries = 3; // Timestamp may also come from // tlog_entries.inclusion_promise.signed_entry_timestamp. TimestampVerificationData timestamp_verification_data = 4; } message Bundle { // MUST be application/vnd.dev.sigstore.bundle.v0.3+json when // when encoded as JSON. // Clients must to be able to accept media type using the previously // defined formats: // * application/vnd.dev.sigstore.bundle+json;version=0.1 // * application/vnd.dev.sigstore.bundle+json;version=0.2 // * application/vnd.dev.sigstore.bundle+json;version=0.3 string media_type = 1; // When a signer is identified by a X.509 certificate, a verifier MUST // verify that the signature was computed at the time the certificate // was valid as described in the Sigstore client spec: "Verification // using a Bundle". // // If the verification material contains a public key identifier // (key hint) and the `content` is a DSSE envelope, the key hints // MUST be exactly the same in the verification material and in the // DSSE envelope. VerificationMaterial verification_material = 2 [(google.api.field_behavior) = REQUIRED]; oneof content { dev.sigstore.common.v1.MessageSignature message_signature = 3 [(google.api.field_behavior) = REQUIRED]; // A DSSE envelope can contain arbitrary payloads. // Verifiers must verify that the payload type is a // supported and expected type. This is part of the DSSE // protocol which is defined here: // // DSSE envelopes in a bundle MUST have exactly one signature. // This is a limitation from the DSSE spec, as it can contain // multiple signatures. There are two primary reasons: // 1. It simplifies the verification logic and policy // 2. The bundle (currently) can only contain a single // instance of the required verification materials // During verification a client MUST reject an envelope if // the number of signatures is not equal to one. io.intoto.Envelope dsse_envelope = 4 [(google.api.field_behavior) = REQUIRED]; } // Reserved for future additions of artifact types. reserved 5 to 50; } protobuf-specs-0.4.1/protos/sigstore_common.proto000066400000000000000000000223611477352757300223410ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package dev.sigstore.common.v1; import "google/api/field_behavior.proto"; import "google/protobuf/timestamp.proto"; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/common/v1"; option java_package = "dev.sigstore.proto.common.v1"; option java_multiple_files = true; option java_outer_classname = "CommonProto"; option ruby_package = "Sigstore::Common::V1"; // This package defines commonly used message types within the Sigstore // community. // Only a subset of the secure hash standard algorithms are supported. // See for more // details. // UNSPECIFIED SHOULD not be used, primary reason for inclusion is to force // any proto JSON serialization to emit the used hash algorithm, as default // option is to *omit* the default value of an enum (which is the first // value, represented by '0'. enum HashAlgorithm { HASH_ALGORITHM_UNSPECIFIED = 0; SHA2_256 = 1; SHA2_384 = 2; SHA2_512 = 3; SHA3_256 = 4; SHA3_384 = 5; } // Details of a specific public key, capturing the the key encoding method, // and signature algorithm. // // PublicKeyDetails captures the public key/hash algorithm combinations // recommended in the Sigstore ecosystem. // // This is modelled as a linear set as we want to provide a small number of // opinionated options instead of allowing every possible permutation. // // Any changes to this enum MUST be reflected in the algorithm registry. // See: docs/algorithm-registry.md // // To avoid the possibility of contradicting formats such as PKCS1 with // ED25519 the valid permutations are listed as a linear set instead of a // cartesian set (i.e one combined variable instead of two, one for encoding // and one for the signature algorithm). enum PublicKeyDetails { PUBLIC_KEY_DETAILS_UNSPECIFIED = 0; // RSA PKCS1_RSA_PKCS1V5 = 1 [deprecated = true]; // See RFC8017 PKCS1_RSA_PSS = 2 [deprecated = true]; // See RFC8017 PKIX_RSA_PKCS1V5 = 3 [deprecated = true]; PKIX_RSA_PSS = 4 [deprecated = true]; // RSA public key in PKIX format, PKCS#1v1.5 signature PKIX_RSA_PKCS1V15_2048_SHA256 = 9; PKIX_RSA_PKCS1V15_3072_SHA256 = 10; PKIX_RSA_PKCS1V15_4096_SHA256 = 11; // RSA public key in PKIX format, RSASSA-PSS signature PKIX_RSA_PSS_2048_SHA256 = 16; // See RFC4055 PKIX_RSA_PSS_3072_SHA256 = 17; PKIX_RSA_PSS_4096_SHA256 = 18; // ECDSA PKIX_ECDSA_P256_HMAC_SHA_256 = 6 [deprecated = true]; // See RFC6979 PKIX_ECDSA_P256_SHA_256 = 5; // See NIST FIPS 186-4 PKIX_ECDSA_P384_SHA_384 = 12; PKIX_ECDSA_P521_SHA_512 = 13; // Ed 25519 PKIX_ED25519 = 7; // See RFC8032 PKIX_ED25519_PH = 8; // These algorithms are deprecated and should not be used, but they // were/are being used by most Sigstore clients implementations. PKIX_ECDSA_P384_SHA_256 = 19 [deprecated = true]; PKIX_ECDSA_P521_SHA_256 = 20 [deprecated = true]; // LMS and LM-OTS // // These keys and signatures may be used by private Sigstore // deployments, but are not currently supported by the public // good instance. // // USER WARNING: LMS and LM-OTS are both stateful signature schemes. // Using them correctly requires discretion and careful consideration // to ensure that individual secret keys are not used more than once. // In addition, LM-OTS is a single-use scheme, meaning that it // MUST NOT be used for more than one signature per LM-OTS key. // If you cannot maintain these invariants, you MUST NOT use these // schemes. LMS_SHA256 = 14; LMOTS_SHA256 = 15; // Reserved for future additions of public key/signature algorithm types. reserved 21 to 50; } // HashOutput captures a digest of a 'message' (generic octet sequence) // and the corresponding hash algorithm used. message HashOutput { HashAlgorithm algorithm = 1; // This is the raw octets of the message digest as computed by // the hash algorithm. bytes digest = 2; } // MessageSignature stores the computed signature over a message. message MessageSignature { // Message digest can be used to identify the artifact. // Clients MUST NOT attempt to use this digest to verify the associated // signature; it is intended solely for identification. HashOutput message_digest = 1; // The raw bytes as returned from the signature algorithm. // The signature algorithm (and so the format of the signature bytes) // are determined by the contents of the 'verification_material', // either a key-pair or a certificate. If using a certificate, the // certificate contains the required information on the signature // algorithm. // When using a key pair, the algorithm MUST be part of the public // key, which MUST be communicated out-of-band. bytes signature = 2 [(google.api.field_behavior) = REQUIRED]; } // LogId captures the identity of a transparency log. message LogId { // The unique identity of the log, represented by its public key. bytes key_id = 1 [(google.api.field_behavior) = REQUIRED]; } // This message holds a RFC 3161 timestamp. message RFC3161SignedTimestamp { // Signed timestamp is the DER encoded TimeStampResponse. // See https://www.rfc-editor.org/rfc/rfc3161.html#section-2.4.2 bytes signed_timestamp = 1 [(google.api.field_behavior) = REQUIRED]; } message PublicKey { // DER-encoded public key, encoding method is specified by the // key_details attribute. optional bytes raw_bytes = 1; // Key encoding and signature algorithm to use for this key. PublicKeyDetails key_details = 2; // Optional validity period for this key, *inclusive* of the endpoints. optional TimeRange valid_for = 3; } // PublicKeyIdentifier can be used to identify an (out of band) delivered // key, to verify a signature. message PublicKeyIdentifier { // Optional unauthenticated hint on which key to use. // The format of the hint must be agreed upon out of band by the // signer and the verifiers, and so is not subject to this // specification. // Example use-case is to specify the public key to use, from a // trusted key-ring. // Implementors are RECOMMENDED to derive the value from the public // key as described in RFC 6962. // See: string hint = 1; } // An ASN.1 OBJECT IDENTIFIER message ObjectIdentifier { repeated int32 id = 1 [(google.api.field_behavior) = REQUIRED]; } // An OID and the corresponding (byte) value. message ObjectIdentifierValuePair { ObjectIdentifier oid = 1; bytes value = 2; } message DistinguishedName { string organization = 1; string common_name = 2; } message X509Certificate { // DER-encoded X.509 certificate. bytes raw_bytes = 1 [(google.api.field_behavior) = REQUIRED]; } enum SubjectAlternativeNameType { SUBJECT_ALTERNATIVE_NAME_TYPE_UNSPECIFIED = 0; EMAIL = 1; URI = 2; // OID 1.3.6.1.4.1.57264.1.7 // See https://github.com/sigstore/fulcio/blob/main/docs/oid-info.md#1361415726417--othername-san // for more details. OTHER_NAME = 3; } message SubjectAlternativeName { SubjectAlternativeNameType type = 1; oneof identity { // A regular expression describing the expected value for // the SAN. string regexp = 2; // The exact value to match against. string value = 3; } } // A collection of X.509 certificates. // // This "chain" can be used in multiple contexts, such as providing a root CA // certificate within a TUF root of trust or multiple untrusted certificates for // the purpose of chain building. message X509CertificateChain { // One or more DER-encoded certificates. // // In some contexts (such as `VerificationMaterial.x509_certificate_chain`), this sequence // has an imposed order. Unless explicitly specified, there is otherwise no // guaranteed order. repeated X509Certificate certificates = 1; } // The time range is closed and includes both the start and end times, // (i.e., [start, end]). // End is optional to be able to capture a period that has started but // has no known end. message TimeRange { google.protobuf.Timestamp start = 1; optional google.protobuf.Timestamp end = 2; } protobuf-specs-0.4.1/protos/sigstore_rekor.proto000066400000000000000000000167071477352757300222020ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package dev.sigstore.rekor.v1; import "google/api/field_behavior.proto"; import "sigstore_common.proto"; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/rekor/v1"; option java_package = "dev.sigstore.proto.rekor.v1"; option java_multiple_files = true; option java_outer_classname = "RekorProto"; option ruby_package = "Sigstore::Rekor::V1"; // KindVersion contains the entry's kind and api version. message KindVersion { // Kind is the type of entry being stored in the log. // See here for a list: https://github.com/sigstore/rekor/tree/main/pkg/types string kind = 1 [(google.api.field_behavior) = REQUIRED]; // The specific api version of the type. string version = 2 [(google.api.field_behavior) = REQUIRED]; } // The checkpoint MUST contain an origin string as a unique log identifier, // the tree size, and the root hash. It MAY also be followed by optional data, // and clients MUST NOT assume optional data. The checkpoint MUST also contain // a signature over the root hash (tree head). The checkpoint MAY contain additional // signatures, but the first SHOULD be the signature from the log. Checkpoint contents // are concatenated with newlines into a single string. // The checkpoint format is described in // https://github.com/transparency-dev/formats/blob/main/log/README.md // and https://github.com/C2SP/C2SP/blob/main/tlog-checkpoint.md. // An example implementation can be found in https://github.com/sigstore/rekor/blob/main/pkg/util/signed_note.go message Checkpoint { string envelope = 1 [(google.api.field_behavior) = REQUIRED]; } // InclusionProof is the proof returned from the transparency log. Can // be used for offline or online verification against the log. message InclusionProof { // The index of the entry in the tree it was written to. int64 log_index = 1 [(google.api.field_behavior) = REQUIRED]; // The hash digest stored at the root of the merkle tree at the time // the proof was generated. bytes root_hash = 2 [(google.api.field_behavior) = REQUIRED]; // The size of the merkle tree at the time the proof was generated. int64 tree_size = 3 [(google.api.field_behavior) = REQUIRED]; // A list of hashes required to compute the inclusion proof, sorted // in order from leaf to root. // Note that leaf and root hashes are not included. // The root hash is available separately in this message, and the // leaf hash should be calculated by the client. repeated bytes hashes = 4 [(google.api.field_behavior) = REQUIRED]; // Signature of the tree head, as of the time of this proof was // generated. See above info on 'Checkpoint' for more details. Checkpoint checkpoint = 5 [(google.api.field_behavior) = REQUIRED]; } // The inclusion promise is calculated by Rekor. It's calculated as a // signature over a canonical JSON serialization of the persisted entry, the // log ID, log index and the integration timestamp. // See https://github.com/sigstore/rekor/blob/a6e58f72b6b18cc06cefe61808efd562b9726330/pkg/api/entries.go#L54 // The format of the signature depends on the transparency log's public key. // If the signature algorithm requires a hash function and/or a signature // scheme (e.g. RSA) those has to be retrieved out-of-band from the log's // operators, together with the public key. // This is used to verify the integration timestamp's value and that the log // has promised to include the entry. message InclusionPromise { bytes signed_entry_timestamp = 1 [(google.api.field_behavior) = REQUIRED]; } // TransparencyLogEntry captures all the details required from Rekor to // reconstruct an entry, given that the payload is provided via other means. // This type can easily be created from the existing response from Rekor. // Future iterations could rely on Rekor returning the minimal set of // attributes (excluding the payload) that are required for verifying the // inclusion promise. The inclusion promise (called SignedEntryTimestamp in // the response from Rekor) is similar to a Signed Certificate Timestamp // as described here https://www.rfc-editor.org/rfc/rfc6962.html#section-3.2. message TransparencyLogEntry { // The global index of the entry, used when querying the log by index. int64 log_index = 1 [(google.api.field_behavior) = REQUIRED]; // The unique identifier of the log. dev.sigstore.common.v1.LogId log_id = 2 [(google.api.field_behavior) = REQUIRED]; // The kind (type) and version of the object associated with this // entry. These values are required to construct the entry during // verification. KindVersion kind_version = 3 [(google.api.field_behavior) = REQUIRED]; // The UNIX timestamp from the log when the entry was persisted. // The integration time MUST NOT be trusted if inclusion_promise // is omitted. int64 integrated_time = 4 [(google.api.field_behavior) = REQUIRED]; // The inclusion promise/signed entry timestamp from the log. // Required for v0.1 bundles, and MUST be verified. // Optional for >= v0.2 bundles if another suitable source of // time is present (such as another source of signed time, // or the current system time for long-lived certificates). // MUST be verified if no other suitable source of time is present, // and SHOULD be verified otherwise. InclusionPromise inclusion_promise = 5; // The inclusion proof can be used for offline or online verification // that the entry was appended to the log, and that the log has not been // altered. InclusionProof inclusion_proof = 6 [(google.api.field_behavior) = REQUIRED]; // Optional. The canonicalized transparency log entry, used to // reconstruct the Signed Entry Timestamp (SET) during verification. // The contents of this field are the same as the `body` field in // a Rekor response, meaning that it does **not** include the "full" // canonicalized form (of log index, ID, etc.) which are // exposed as separate fields. The verifier is responsible for // combining the `canonicalized_body`, `log_index`, `log_id`, // and `integrated_time` into the payload that the SET's signature // is generated over. // This field is intended to be used in cases where the SET cannot be // produced determinisitically (e.g. inconsistent JSON field ordering, // differing whitespace, etc). // // If set, clients MUST verify that the signature referenced in the // `canonicalized_body` matches the signature provided in the // `Bundle.content`. // If not set, clients are responsible for constructing an equivalent // payload from other sources to verify the signature. bytes canonicalized_body = 7; } protobuf-specs-0.4.1/protos/sigstore_trustroot.proto000066400000000000000000000344431477352757300231420ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package dev.sigstore.trustroot.v1; import "google/api/field_behavior.proto"; import "sigstore_common.proto"; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/trustroot/v1"; option java_package = "dev.sigstore.proto.trustroot.v1"; option java_multiple_files = true; option java_outer_classname = "TrustRootProto"; option ruby_package = "Sigstore::TrustRoot::V1"; // TransparencyLogInstance describes the immutable parameters from a // transparency log. // See https://www.rfc-editor.org/rfc/rfc9162.html#name-log-parameters // for more details. // The included parameters are the minimal set required to identify a log, // and verify an inclusion proof/promise. message TransparencyLogInstance { // The base URL at which can be used to URLs for the client. string base_url = 1; // The hash algorithm used for the Merkle Tree. dev.sigstore.common.v1.HashAlgorithm hash_algorithm = 2; // The public key used to verify signatures generated by the log. // This attribute contains the signature algorithm used by the log. dev.sigstore.common.v1.PublicKey public_key = 3; // The unique identifier for this transparency log. // Represented as the SHA-256 hash of the log's public key, // calculated over the DER encoding of the key represented as // SubjectPublicKeyInfo. // See https://www.rfc-editor.org/rfc/rfc6962#section-3.2 dev.sigstore.common.v1.LogId log_id = 4; // The checkpoint key identifier for the log used in a checkpoint. // Optional, not provided for logs that do not generate checkpoints. // For logs that do generate checkpoints, if not set, assume // log_id equals checkpoint_key_id. // Follows the specification described here // for ECDSA and Ed25519 signatures: // https://github.com/C2SP/C2SP/blob/main/signed-note.md#signatures // For RSA signatures, the key ID will match the ECDSA format, the // hashed DER-encoded SPKI public key. Publicly witnessed logs MUST NOT // use RSA-signed checkpoints, since witnesses do not support // RSA signatures. // This is provided for convenience. Clients can also calculate the // checkpoint key ID given the log's public key. // SHOULD be set for logs generating Ed25519 signatures. // SHOULD be 4 bytes long, as a truncated hash. dev.sigstore.common.v1.LogId checkpoint_key_id = 5; } // CertificateAuthority enlists the information required to identify which // CA to use and perform signature verification. message CertificateAuthority { // The root certificate MUST be self-signed, and so the subject and // issuer are the same. dev.sigstore.common.v1.DistinguishedName subject = 1; // The URI identifies the certificate authority. // // It is RECOMMENDED that the URI is the base URL for the certificate // authority, that can be provided to any SDK/client provided // by the certificate authority to interact with the certificate // authority. string uri = 2; // The certificate chain for this CA. The last certificate in the chain // MUST be the trust anchor. The trust anchor MAY be a self-signed root // CA certificate or MAY be an intermediate CA certificate. dev.sigstore.common.v1.X509CertificateChain cert_chain = 3; // The time the *entire* chain was valid. This is at max the // longest interval when *all* certificates in the chain were valid, // but it MAY be shorter. Clients MUST check timestamps against *both* // the `valid_for` time range *and* the entire certificate chain. // // The TimeRange should be considered valid *inclusive* of the // endpoints. dev.sigstore.common.v1.TimeRange valid_for = 4; } // TrustedRoot describes the client's complete set of trusted entities. // How the TrustedRoot is populated is not specified, but can be a // combination of many sources such as TUF repositories, files on disk etc. // // The TrustedRoot is not meant to be used for any artifact verification, only // to capture the complete/global set of trusted verification materials. // When verifying an artifact, based on the artifact and policies, a selection // of keys/authorities are expected to be extracted and provided to the // verification function. This way the set of keys/authorities can be kept to // a minimal set by the policy to gain better control over what signatures // that are allowed. // // The embedded transparency logs, CT logs, CAs and TSAs MUST include any // previously used instance -- otherwise signatures made in the past cannot // be verified. // // All the listed instances SHOULD be sorted by the 'valid_for' in ascending // order, that is, the oldest instance first. Only the last instance is // allowed to have their 'end' timestamp unset. All previous instances MUST // have a closed interval of validity. The last instance MAY have a closed // interval. Clients MUST accept instances that overlaps in time, if not // clients may experience problems during rotations of verification // materials. // // To be able to manage planned rotations of either transparency logs or // certificate authorities, clienst MUST accept lists of instances where // the last instance have a 'valid_for' that belongs to the future. // This should not be a problem as clients SHOULD first seek the trust root // for a suitable instance before creating a per artifact trust root (that // is, a sub-set of the complete trust root) that is used for verification. message TrustedRoot { // MUST be application/vnd.dev.sigstore.trustedroot.v0.1+json // when encoded as JSON. // Clients MUST be able to process and parse content with the media // type defined in the old format: // application/vnd.dev.sigstore.trustedroot+json;version=0.1 string media_type = 1; // A set of trusted Rekor servers. repeated TransparencyLogInstance tlogs = 2; // A set of trusted certificate authorities (e.g Fulcio), and any // intermediate certificates they provide. // If a CA is issuing multiple intermediate certificate, each // combination shall be represented as separate chain. I.e, a single // root cert may appear in multiple chains but with different // intermediate and/or leaf certificates. // The certificates are intended to be used for verifying artifact // signatures. repeated CertificateAuthority certificate_authorities = 3; // A set of trusted certificate transparency logs. repeated TransparencyLogInstance ctlogs = 4; // A set of trusted timestamping authorities. repeated CertificateAuthority timestamp_authorities = 5; } // SigningConfig represents the trusted entities/state needed by Sigstore // signing. In particular, it primarily contains service URLs that a Sigstore // signer may need to connect to for the online aspects of signing. message SigningConfig { // MUST be application/vnd.dev.sigstore.signingconfig.v0.2+json // Clients MAY choose to also support // application/vnd.dev.sigstore.signingconfig.v0.1+json string media_type = 5; // URLs to Fulcio-compatible CAs, capable of receiving // Certificate Signing Requests (CSRs) and responding with // issued certificates. // // These URLs MUST be the "base" URL for the CAs, which clients // should construct an appropriate CSR endpoint on top of. // For example, if a CA URL is `https://example.com/ca`, then // the client MAY construct the CSR endpoint as // `https://example.com/ca/api/v2/signingCert`. // // Clients MUST select only one Service with the highest API version // that the client is compatible with, that is within its // validity period, and has the newest validity start date. // Client SHOULD select the first Service that meets this requirement. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. repeated Service ca_urls = 6; // URLs to OpenID Connect identity providers. // // These URLs MUST be the "base" URLs for the OIDC IdPs, which clients // should perform well-known OpenID Connect discovery against. // // Clients MUST select only one Service with the highest API version // that the client is compatible with, that is within its // validity period, and has the newest validity start date. // Client SHOULD select the first Service that meets this requirement. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. repeated Service oidc_urls = 7; // URLs to Rekor transparency logs. // // These URL MUST be the "base" URLs for the transparency logs, // which clients should construct appropriate API endpoints on top of. // // Clients MUST select Services with the highest API version // that the client is compatible with, that are within its // validity period, and have the newest validity start dates. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. // // Clients MUST select Services based on the selector value of // `rekor_tlog_config`. repeated Service rekor_tlog_urls = 8; // Specifies how a client should select the set of Rekor transparency // logs to write to. ServiceConfiguration rekor_tlog_config = 9; // URLs to RFC 3161 Time Stamping Authorities (TSA). // // These URLs MUST be the *full* URL for the TSA, meaning that it // should be suitable for submitting Time Stamp Requests (TSRs) to // via HTTP, per RFC 3161. // // Clients MUST select Services with the highest API version // that the client is compatible with, that are within its // validity period, and have the newest validity start dates. // All listed Services SHOULD be sorted by the `valid_for` window in // descending order, with the newest instance first. // // Clients MUST select Services based on the selector value of // `tsa_config`. repeated Service tsa_urls = 10; // Specifies how a client should select the set of TSAs to request // signed timestamps from. ServiceConfiguration tsa_config = 11; // Reserved tags for previously defined service URL fields reserved 1 to 4; } // Service represents an instance of a service that is a part of Sigstore infrastructure. // Clients MUST use the API version hint to determine the service with the // highest API version that the client is compatible with. Clients MUST also // only connect to services within the specified validity period and that has the // newest validity start date. message Service { // URL of the service. MUST include scheme and authority. MAY include path. string url = 1; // Specifies the major API version. A value of 0 represents a service that // has not yet been released. uint32 major_api_version = 2; // Validity period of a service. A service that has only a start date // SHOULD be considered the most recent instance of that service, but // the client MUST NOT assume there is only one valid instance. // The TimeRange MUST be considered valid *inclusive* of the // endpoints. dev.sigstore.common.v1.TimeRange valid_for = 3; } // ServiceSelector specifies how a client SHOULD select a set of // Services to connect to. A client SHOULD throw an error if // the value is SERVICE_SELECTOR_UNDEFINED. enum ServiceSelector { SERVICE_SELECTOR_UNDEFINED = 0; // Clients SHOULD select all Services based on supported API version // and validity window. ALL = 1; // Clients SHOULD select one Service based on supported API version // and validity window. It is up to the client implementation to // decide how to select the Service, e.g. random or round-robin. ANY = 2; // Clients SHOULD select a specific number of Services based on // supported API version and validity window, using the provided // `count`. It is up to the client implementation to decide how to // select the Service, e.g. random or round-robin. EXACT = 3; } // ServiceConfiguration specifies how a client should select a set of // Services to connect to, along with a count when a specific number // of Services is requested. message ServiceConfiguration { // How a client should select a set of Services to connect to. ServiceSelector selector = 1; // count specifies the number of Services the client should use. // Only used when selector is set to EXACT, and count MUST be greater // than 0. count MUST be less than or equal to the number of Services. uint32 count = 2; } // ClientTrustConfig describes the complete state needed by a client // to perform both signing and verification operations against a particular // instance of Sigstore. message ClientTrustConfig { // MUST be application/vnd.dev.sigstore.clienttrustconfig.v0.1+json string media_type = 1; // The root of trust, which MUST be present. TrustedRoot trusted_root = 2 [(google.api.field_behavior) = REQUIRED]; // Configuration for signing clients, which MUST be present. SigningConfig signing_config = 3 [(google.api.field_behavior) = REQUIRED]; } protobuf-specs-0.4.1/protos/sigstore_verification.proto000066400000000000000000000165211477352757300235340ustar00rootroot00000000000000// Copyright 2022 The Sigstore Authors. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. syntax = "proto3"; package dev.sigstore.verification.v1; import "sigstore_common.proto"; import "sigstore_trustroot.proto"; import "sigstore_bundle.proto"; option go_package = "github.com/sigstore/protobuf-specs/gen/pb-go/verification/v1"; option java_package = "dev.sigstore.proto.verification.v1"; option java_multiple_files = true; option java_outer_classname = "VerificationProto"; option ruby_package = "Sigstore::Verification::V1"; // The identity of a X.509 Certificate signer. message CertificateIdentity { // The X.509v3 issuer extension (OID 1.3.6.1.4.1.57264.1.1) string issuer = 1; dev.sigstore.common.v1.SubjectAlternativeName san = 2; // An unordered list of OIDs that must be verified. // All OID/values provided in this list MUST exactly match against // the values in the certificate for verification to be successful. repeated dev.sigstore.common.v1.ObjectIdentifierValuePair oids = 3; } message CertificateIdentities { repeated CertificateIdentity identities = 1; } message PublicKeyIdentities { repeated dev.sigstore.common.v1.PublicKey public_keys = 1; } // A light-weight set of options/policies for identifying trusted signers, // used during verification of a single artifact. message ArtifactVerificationOptions { message TlogOptions { // Number of transparency logs the entry must appear on. int32 threshold = 1; // Perform an online inclusion proof. bool perform_online_verification = 2; // Disable verification for transparency logs. bool disable = 3; } message CtlogOptions { // The number of ct transparency logs the certificate must // appear on. int32 threshold = 1; reserved 2; // Deprecated: Support for detached SCTs // Disable ct transparency log verification bool disable = 3; } message TimestampAuthorityOptions { // The number of signed timestamps that are expected. int32 threshold = 1; // Disable signed timestamp verification. bool disable = 2; } message TlogIntegratedTimestampOptions{ // The number of integrated timestamps that are expected. int32 threshold = 1; // Disable integrated timestamp verification. bool disable = 2; } message ObserverTimestampOptions { // The number of external observers of the timestamp. // This is a union of RFC3161 signed timestamps, and // integrated timestamps from a transparency log, that // could include additional timestamp sources in the // future. int32 threshold = 1; // Disable observer timestamp verification. bool disable = 2; } // At least one identity MUST be provided. Providing zero identities // is an error. If at least one provided identity is found as a // signer, the verification is considered successful. oneof signers { CertificateIdentities certificate_identities = 1; // To simplify verification implementation, the logic for // bundle verification should be implemented as a // higher-order function, where one of argument should be an // interface over the set of trusted public keys, like this: // `Verify(bytes artifact, bytes signature, string key_id)`. // This way the caller is in full control of mapping the // identified (or hinted) key in the bundle to one of the // trusted keys, as this process is inherently application // specific. PublicKeyIdentities public_keys = 2; } // Optional options for artifact transparency log verification. // If none is provided, the default verification options are: // Threshold: 1 // Online verification: false // Disable: false optional TlogOptions tlog_options = 3; // Optional options for certificate transparency log verification. // If none is provided, the default verification options are: // Threshold: 1 // Disable: false optional CtlogOptions ctlog_options = 4; // Optional options for certificate signed timestamp verification. // If none is provided, the default verification options are: // Threshold: 0 // Disable: true optional TimestampAuthorityOptions tsa_options = 5; // Optional options for integrated timestamp verification. // If none is provided, the default verification options are: // Threshold: 0 // Disable: true optional TlogIntegratedTimestampOptions integrated_ts_options = 6; // Optional options for observed timestamp verification. // If none is provided, the default verification options are: // Threshold 1 // Disable: false optional ObserverTimestampOptions observer_options = 7; } message Artifact { oneof data { // Location of the artifact string artifact_uri = 1; // The raw bytes of the artifact bytes artifact = 2; // Digest of the artifact. SHOULD NOT be used when verifying an // in-toto attestation as the subject digest cannot be // reconstructed. This option will not work with Ed25519 // signatures, use Ed25519Ph or another algorithm instead. dev.sigstore.common.v1.HashOutput artifact_digest = 3; } } // Input captures all that is needed to call the bundle verification method, // to verify a single artifact referenced by the bundle. message Input { // The verification materials provided during a bundle verification. // The running process is usually preloaded with a "global" // dev.sisgtore.trustroot.TrustedRoot.v1 instance. Prior to // verifying an artifact (i.e a bundle), and/or based on current // policy, some selection is expected to happen, to filter out the // exact certificate authority to use, which transparency logs are // relevant etc. The result should b ecaptured in the // `artifact_trust_root`. dev.sigstore.trustroot.v1.TrustedRoot artifact_trust_root = 1; ArtifactVerificationOptions artifact_verification_options = 2; dev.sigstore.bundle.v1.Bundle bundle = 3; // If the bundle contains a message signature, the artifact must be // provided. optional Artifact artifact = 4; }